blob: 2e78af5d7c94ab34b4682ec293bfd40fb3756fdc [file] [log] [blame]
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips/asm_support_mips.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020020#include "arch/mips/entrypoints_direct_mips.h"
21#include "arch/mips/instruction_set_features_mips.h"
22#include "art_method.h"
Chris Larsen701566a2015-10-27 15:29:13 -070023#include "code_generator_utils.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010024#include "compiled_method.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020025#include "entrypoints/quick/quick_entrypoints.h"
26#include "entrypoints/quick/quick_entrypoints_enum.h"
27#include "gc/accounting/card_table.h"
28#include "intrinsics.h"
Chris Larsen701566a2015-10-27 15:29:13 -070029#include "intrinsics_mips.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020030#include "mirror/array-inl.h"
31#include "mirror/class-inl.h"
32#include "offsets.h"
33#include "thread.h"
34#include "utils/assembler.h"
35#include "utils/mips/assembler_mips.h"
36#include "utils/stack_checks.h"
37
38namespace art {
39namespace mips {
40
41static constexpr int kCurrentMethodStackOffset = 0;
42static constexpr Register kMethodRegisterArgument = A0;
43
Alexey Frunze4147fcc2017-06-17 19:57:27 -070044// Flags controlling the use of thunks for Baker read barriers.
45constexpr bool kBakerReadBarrierThunksEnableForFields = true;
46constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
47constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
48
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020049Location MipsReturnLocation(Primitive::Type return_type) {
50 switch (return_type) {
51 case Primitive::kPrimBoolean:
52 case Primitive::kPrimByte:
53 case Primitive::kPrimChar:
54 case Primitive::kPrimShort:
55 case Primitive::kPrimInt:
56 case Primitive::kPrimNot:
57 return Location::RegisterLocation(V0);
58
59 case Primitive::kPrimLong:
60 return Location::RegisterPairLocation(V0, V1);
61
62 case Primitive::kPrimFloat:
63 case Primitive::kPrimDouble:
64 return Location::FpuRegisterLocation(F0);
65
66 case Primitive::kPrimVoid:
67 return Location();
68 }
69 UNREACHABLE();
70}
71
72Location InvokeDexCallingConventionVisitorMIPS::GetReturnLocation(Primitive::Type type) const {
73 return MipsReturnLocation(type);
74}
75
76Location InvokeDexCallingConventionVisitorMIPS::GetMethodLocation() const {
77 return Location::RegisterLocation(kMethodRegisterArgument);
78}
79
80Location InvokeDexCallingConventionVisitorMIPS::GetNextLocation(Primitive::Type type) {
81 Location next_location;
82
83 switch (type) {
84 case Primitive::kPrimBoolean:
85 case Primitive::kPrimByte:
86 case Primitive::kPrimChar:
87 case Primitive::kPrimShort:
88 case Primitive::kPrimInt:
89 case Primitive::kPrimNot: {
90 uint32_t gp_index = gp_index_++;
91 if (gp_index < calling_convention.GetNumberOfRegisters()) {
92 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index));
93 } else {
94 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
95 next_location = Location::StackSlot(stack_offset);
96 }
97 break;
98 }
99
100 case Primitive::kPrimLong: {
101 uint32_t gp_index = gp_index_;
102 gp_index_ += 2;
103 if (gp_index + 1 < calling_convention.GetNumberOfRegisters()) {
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800104 Register reg = calling_convention.GetRegisterAt(gp_index);
105 if (reg == A1 || reg == A3) {
106 gp_index_++; // Skip A1(A3), and use A2_A3(T0_T1) instead.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200107 gp_index++;
108 }
109 Register low_even = calling_convention.GetRegisterAt(gp_index);
110 Register high_odd = calling_convention.GetRegisterAt(gp_index + 1);
111 DCHECK_EQ(low_even + 1, high_odd);
112 next_location = Location::RegisterPairLocation(low_even, high_odd);
113 } else {
114 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
115 next_location = Location::DoubleStackSlot(stack_offset);
116 }
117 break;
118 }
119
120 // Note: both float and double types are stored in even FPU registers. On 32 bit FPU, double
121 // will take up the even/odd pair, while floats are stored in even regs only.
122 // On 64 bit FPU, both double and float are stored in even registers only.
123 case Primitive::kPrimFloat:
124 case Primitive::kPrimDouble: {
125 uint32_t float_index = float_index_++;
126 if (float_index < calling_convention.GetNumberOfFpuRegisters()) {
127 next_location = Location::FpuRegisterLocation(
128 calling_convention.GetFpuRegisterAt(float_index));
129 } else {
130 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
131 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
132 : Location::StackSlot(stack_offset);
133 }
134 break;
135 }
136
137 case Primitive::kPrimVoid:
138 LOG(FATAL) << "Unexpected parameter type " << type;
139 break;
140 }
141
142 // Space on the stack is reserved for all arguments.
143 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
144
145 return next_location;
146}
147
148Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
149 return MipsReturnLocation(type);
150}
151
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100152// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
153#define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700154#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200155
156class BoundsCheckSlowPathMIPS : public SlowPathCodeMIPS {
157 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000158 explicit BoundsCheckSlowPathMIPS(HBoundsCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200159
160 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
161 LocationSummary* locations = instruction_->GetLocations();
162 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
163 __ Bind(GetEntryLabel());
164 if (instruction_->CanThrowIntoCatchBlock()) {
165 // Live registers will be restored in the catch block if caught.
166 SaveLiveRegisters(codegen, instruction_->GetLocations());
167 }
168 // We're moving two locations to locations that could overlap, so we need a parallel
169 // move resolver.
170 InvokeRuntimeCallingConvention calling_convention;
171 codegen->EmitParallelMoves(locations->InAt(0),
172 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
173 Primitive::kPrimInt,
174 locations->InAt(1),
175 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
176 Primitive::kPrimInt);
Serban Constantinescufca16662016-07-14 09:21:59 +0100177 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
178 ? kQuickThrowStringBounds
179 : kQuickThrowArrayBounds;
180 mips_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100181 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200182 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
183 }
184
185 bool IsFatal() const OVERRIDE { return true; }
186
187 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS"; }
188
189 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200190 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS);
191};
192
193class DivZeroCheckSlowPathMIPS : public SlowPathCodeMIPS {
194 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000195 explicit DivZeroCheckSlowPathMIPS(HDivZeroCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200196
197 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
198 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
199 __ Bind(GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +0100200 mips_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200201 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
202 }
203
204 bool IsFatal() const OVERRIDE { return true; }
205
206 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS"; }
207
208 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200209 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS);
210};
211
212class LoadClassSlowPathMIPS : public SlowPathCodeMIPS {
213 public:
214 LoadClassSlowPathMIPS(HLoadClass* cls,
215 HInstruction* at,
216 uint32_t dex_pc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700217 bool do_clinit,
218 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high = nullptr)
219 : SlowPathCodeMIPS(at),
220 cls_(cls),
221 dex_pc_(dex_pc),
222 do_clinit_(do_clinit),
223 bss_info_high_(bss_info_high) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200224 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
225 }
226
227 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000228 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700229 Location out = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200230 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700231 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700232 InvokeRuntimeCallingConvention calling_convention;
233 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
234 const bool is_load_class_bss_entry =
235 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200236 __ Bind(GetEntryLabel());
237 SaveLiveRegisters(codegen, locations);
238
Alexey Frunzec61c0762017-04-10 13:54:23 -0700239 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
240 Register entry_address = kNoRegister;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700241 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700242 Register temp = locations->GetTemp(0).AsRegister<Register>();
243 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
244 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
245 // kSaveEverything call.
246 entry_address = temp_is_a0 ? out.AsRegister<Register>() : temp;
247 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
248 if (temp_is_a0) {
249 __ Move(entry_address, temp);
250 }
251 }
252
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000253 dex::TypeIndex type_index = cls_->GetTypeIndex();
254 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100255 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
256 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000257 mips_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200258 if (do_clinit_) {
259 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
260 } else {
261 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
262 }
263
Alexey Frunzec61c0762017-04-10 13:54:23 -0700264 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700265 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700266 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700267 DCHECK(bss_info_high_);
268 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
269 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, bss_info_high_);
270 bool reordering = __ SetReorder(false);
271 __ Bind(&info_low->label);
272 __ StoreToOffset(kStoreWord,
273 calling_convention.GetRegisterAt(0),
274 entry_address,
275 /* placeholder */ 0x5678);
276 __ SetReorder(reordering);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700277 }
278
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200279 // Move the class to the desired location.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200280 if (out.IsValid()) {
281 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000282 Primitive::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700283 mips_codegen->MoveLocation(out,
284 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
285 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200286 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200287 RestoreLiveRegisters(codegen, locations);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700288
289 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700290 if (is_load_class_bss_entry && !baker_or_no_read_barriers) {
291 // For non-Baker read barriers we need to re-calculate the address of
Alexey Frunzec61c0762017-04-10 13:54:23 -0700292 // the class entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700293 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000294 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700295 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000296 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700297 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
298 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, info_high);
Alexey Frunze6b892cd2017-01-03 17:11:38 -0800299 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700300 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base, info_low);
Alexey Frunze6b892cd2017-01-03 17:11:38 -0800301 __ StoreToOffset(kStoreWord, out.AsRegister<Register>(), TMP, /* placeholder */ 0x5678);
302 __ SetReorder(reordering);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000303 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200304 __ B(GetExitLabel());
305 }
306
307 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS"; }
308
309 private:
310 // The class this slow path will load.
311 HLoadClass* const cls_;
312
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200313 // The dex PC of `at_`.
314 const uint32_t dex_pc_;
315
316 // Whether to initialize the class.
317 const bool do_clinit_;
318
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700319 // Pointer to the high half PC-relative patch info for HLoadClass/kBssEntry.
320 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high_;
321
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200322 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS);
323};
324
325class LoadStringSlowPathMIPS : public SlowPathCodeMIPS {
326 public:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700327 explicit LoadStringSlowPathMIPS(HLoadString* instruction,
328 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high)
329 : SlowPathCodeMIPS(instruction), bss_info_high_(bss_info_high) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200330
331 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700332 DCHECK(instruction_->IsLoadString());
333 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200334 LocationSummary* locations = instruction_->GetLocations();
335 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunzec61c0762017-04-10 13:54:23 -0700336 HLoadString* load = instruction_->AsLoadString();
337 const dex::StringIndex string_index = load->GetStringIndex();
338 Register out = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200339 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700340 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700341 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200342 __ Bind(GetEntryLabel());
343 SaveLiveRegisters(codegen, locations);
344
Alexey Frunzec61c0762017-04-10 13:54:23 -0700345 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
346 Register entry_address = kNoRegister;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700347 if (baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700348 Register temp = locations->GetTemp(0).AsRegister<Register>();
349 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
350 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
351 // kSaveEverything call.
352 entry_address = temp_is_a0 ? out : temp;
353 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
354 if (temp_is_a0) {
355 __ Move(entry_address, temp);
356 }
357 }
358
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000359 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100360 mips_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200361 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700362
363 // Store the resolved string to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700364 if (baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700365 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700366 DCHECK(bss_info_high_);
367 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
368 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, bss_info_high_);
369 bool reordering = __ SetReorder(false);
370 __ Bind(&info_low->label);
371 __ StoreToOffset(kStoreWord,
372 calling_convention.GetRegisterAt(0),
373 entry_address,
374 /* placeholder */ 0x5678);
375 __ SetReorder(reordering);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700376 }
377
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200378 Primitive::Type type = instruction_->GetType();
379 mips_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700380 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200381 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200382 RestoreLiveRegisters(codegen, locations);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000383
Alexey Frunzec61c0762017-04-10 13:54:23 -0700384 // Store the resolved string to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700385 if (!baker_or_no_read_barriers) {
386 // For non-Baker read barriers we need to re-calculate the address of
Alexey Frunzec61c0762017-04-10 13:54:23 -0700387 // the string entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700388 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700389 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700390 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Alexey Frunzec61c0762017-04-10 13:54:23 -0700391 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700392 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
393 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700394 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700395 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base, info_low);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700396 __ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
397 __ SetReorder(reordering);
398 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200399 __ B(GetExitLabel());
400 }
401
402 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS"; }
403
404 private:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700405 // Pointer to the high half PC-relative patch info.
406 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high_;
407
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200408 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS);
409};
410
411class NullCheckSlowPathMIPS : public SlowPathCodeMIPS {
412 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000413 explicit NullCheckSlowPathMIPS(HNullCheck* instr) : SlowPathCodeMIPS(instr) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200414
415 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
416 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
417 __ Bind(GetEntryLabel());
418 if (instruction_->CanThrowIntoCatchBlock()) {
419 // Live registers will be restored in the catch block if caught.
420 SaveLiveRegisters(codegen, instruction_->GetLocations());
421 }
Serban Constantinescufca16662016-07-14 09:21:59 +0100422 mips_codegen->InvokeRuntime(kQuickThrowNullPointer,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200423 instruction_,
424 instruction_->GetDexPc(),
Serban Constantinescufca16662016-07-14 09:21:59 +0100425 this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200426 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
427 }
428
429 bool IsFatal() const OVERRIDE { return true; }
430
431 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS"; }
432
433 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200434 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS);
435};
436
437class SuspendCheckSlowPathMIPS : public SlowPathCodeMIPS {
438 public:
439 SuspendCheckSlowPathMIPS(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000440 : SlowPathCodeMIPS(instruction), successor_(successor) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200441
442 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Lena Djokicca8c2952017-05-29 11:31:46 +0200443 LocationSummary* locations = instruction_->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200444 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
445 __ Bind(GetEntryLabel());
Lena Djokicca8c2952017-05-29 11:31:46 +0200446 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufca16662016-07-14 09:21:59 +0100447 mips_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200448 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Lena Djokicca8c2952017-05-29 11:31:46 +0200449 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200450 if (successor_ == nullptr) {
451 __ B(GetReturnLabel());
452 } else {
453 __ B(mips_codegen->GetLabelOf(successor_));
454 }
455 }
456
457 MipsLabel* GetReturnLabel() {
458 DCHECK(successor_ == nullptr);
459 return &return_label_;
460 }
461
462 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS"; }
463
464 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200465 // If not null, the block to branch to after the suspend check.
466 HBasicBlock* const successor_;
467
468 // If `successor_` is null, the label to branch to after the suspend check.
469 MipsLabel return_label_;
470
471 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS);
472};
473
474class TypeCheckSlowPathMIPS : public SlowPathCodeMIPS {
475 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800476 explicit TypeCheckSlowPathMIPS(HInstruction* instruction, bool is_fatal)
477 : SlowPathCodeMIPS(instruction), is_fatal_(is_fatal) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200478
479 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
480 LocationSummary* locations = instruction_->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200481 uint32_t dex_pc = instruction_->GetDexPc();
482 DCHECK(instruction_->IsCheckCast()
483 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
484 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
485
486 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800487 if (!is_fatal_) {
488 SaveLiveRegisters(codegen, locations);
489 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200490
491 // We're moving two locations to locations that could overlap, so we need a parallel
492 // move resolver.
493 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800494 codegen->EmitParallelMoves(locations->InAt(0),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200495 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
496 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800497 locations->InAt(1),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200498 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
499 Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200500 if (instruction_->IsInstanceOf()) {
Serban Constantinescufca16662016-07-14 09:21:59 +0100501 mips_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800502 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200503 Primitive::Type ret_type = instruction_->GetType();
504 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
505 mips_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200506 } else {
507 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800508 mips_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
509 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200510 }
511
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800512 if (!is_fatal_) {
513 RestoreLiveRegisters(codegen, locations);
514 __ B(GetExitLabel());
515 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200516 }
517
518 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS"; }
519
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800520 bool IsFatal() const OVERRIDE { return is_fatal_; }
521
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200522 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800523 const bool is_fatal_;
524
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200525 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS);
526};
527
528class DeoptimizationSlowPathMIPS : public SlowPathCodeMIPS {
529 public:
Aart Bik42249c32016-01-07 15:33:50 -0800530 explicit DeoptimizationSlowPathMIPS(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000531 : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200532
533 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800534 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200535 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100536 LocationSummary* locations = instruction_->GetLocations();
537 SaveLiveRegisters(codegen, locations);
538 InvokeRuntimeCallingConvention calling_convention;
539 __ LoadConst32(calling_convention.GetRegisterAt(0),
540 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufca16662016-07-14 09:21:59 +0100541 mips_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100542 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200543 }
544
545 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS"; }
546
547 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200548 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS);
549};
550
Alexey Frunze15958152017-02-09 19:08:30 -0800551class ArraySetSlowPathMIPS : public SlowPathCodeMIPS {
552 public:
553 explicit ArraySetSlowPathMIPS(HInstruction* instruction) : SlowPathCodeMIPS(instruction) {}
554
555 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
556 LocationSummary* locations = instruction_->GetLocations();
557 __ Bind(GetEntryLabel());
558 SaveLiveRegisters(codegen, locations);
559
560 InvokeRuntimeCallingConvention calling_convention;
561 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
562 parallel_move.AddMove(
563 locations->InAt(0),
564 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
565 Primitive::kPrimNot,
566 nullptr);
567 parallel_move.AddMove(
568 locations->InAt(1),
569 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
570 Primitive::kPrimInt,
571 nullptr);
572 parallel_move.AddMove(
573 locations->InAt(2),
574 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
575 Primitive::kPrimNot,
576 nullptr);
577 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
578
579 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
580 mips_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
581 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
582 RestoreLiveRegisters(codegen, locations);
583 __ B(GetExitLabel());
584 }
585
586 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS"; }
587
588 private:
589 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS);
590};
591
592// Slow path marking an object reference `ref` during a read
593// barrier. The field `obj.field` in the object `obj` holding this
594// reference does not get updated by this slow path after marking (see
595// ReadBarrierMarkAndUpdateFieldSlowPathMIPS below for that).
596//
597// This means that after the execution of this slow path, `ref` will
598// always be up-to-date, but `obj.field` may not; i.e., after the
599// flip, `ref` will be a to-space reference, but `obj.field` will
600// probably still be a from-space reference (unless it gets updated by
601// another thread, or if another thread installed another object
602// reference (different from `ref`) in `obj.field`).
603//
604// If `entrypoint` is a valid location it is assumed to already be
605// holding the entrypoint. The case where the entrypoint is passed in
606// is for the GcRoot read barrier.
607class ReadBarrierMarkSlowPathMIPS : public SlowPathCodeMIPS {
608 public:
609 ReadBarrierMarkSlowPathMIPS(HInstruction* instruction,
610 Location ref,
611 Location entrypoint = Location::NoLocation())
612 : SlowPathCodeMIPS(instruction), ref_(ref), entrypoint_(entrypoint) {
613 DCHECK(kEmitCompilerReadBarrier);
614 }
615
616 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
617
618 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
619 LocationSummary* locations = instruction_->GetLocations();
620 Register ref_reg = ref_.AsRegister<Register>();
621 DCHECK(locations->CanCall());
622 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
623 DCHECK(instruction_->IsInstanceFieldGet() ||
624 instruction_->IsStaticFieldGet() ||
625 instruction_->IsArrayGet() ||
626 instruction_->IsArraySet() ||
627 instruction_->IsLoadClass() ||
628 instruction_->IsLoadString() ||
629 instruction_->IsInstanceOf() ||
630 instruction_->IsCheckCast() ||
631 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
632 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
633 << "Unexpected instruction in read barrier marking slow path: "
634 << instruction_->DebugName();
635
636 __ Bind(GetEntryLabel());
637 // No need to save live registers; it's taken care of by the
638 // entrypoint. Also, there is no need to update the stack mask,
639 // as this runtime call will not trigger a garbage collection.
640 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
641 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
642 (S2 <= ref_reg && ref_reg <= S7) ||
643 (ref_reg == FP)) << ref_reg;
644 // "Compact" slow path, saving two moves.
645 //
646 // Instead of using the standard runtime calling convention (input
647 // and output in A0 and V0 respectively):
648 //
649 // A0 <- ref
650 // V0 <- ReadBarrierMark(A0)
651 // ref <- V0
652 //
653 // we just use rX (the register containing `ref`) as input and output
654 // of a dedicated entrypoint:
655 //
656 // rX <- ReadBarrierMarkRegX(rX)
657 //
658 if (entrypoint_.IsValid()) {
659 mips_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
660 DCHECK_EQ(entrypoint_.AsRegister<Register>(), T9);
661 __ Jalr(entrypoint_.AsRegister<Register>());
662 __ NopIfNoReordering();
663 } else {
664 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100665 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800666 // This runtime call does not require a stack map.
667 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
668 instruction_,
669 this,
670 /* direct */ false);
671 }
672 __ B(GetExitLabel());
673 }
674
675 private:
676 // The location (register) of the marked object reference.
677 const Location ref_;
678
679 // The location of the entrypoint if already loaded.
680 const Location entrypoint_;
681
682 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS);
683};
684
685// Slow path marking an object reference `ref` during a read barrier,
686// and if needed, atomically updating the field `obj.field` in the
687// object `obj` holding this reference after marking (contrary to
688// ReadBarrierMarkSlowPathMIPS above, which never tries to update
689// `obj.field`).
690//
691// This means that after the execution of this slow path, both `ref`
692// and `obj.field` will be up-to-date; i.e., after the flip, both will
693// hold the same to-space reference (unless another thread installed
694// another object reference (different from `ref`) in `obj.field`).
695class ReadBarrierMarkAndUpdateFieldSlowPathMIPS : public SlowPathCodeMIPS {
696 public:
697 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(HInstruction* instruction,
698 Location ref,
699 Register obj,
700 Location field_offset,
701 Register temp1)
702 : SlowPathCodeMIPS(instruction),
703 ref_(ref),
704 obj_(obj),
705 field_offset_(field_offset),
706 temp1_(temp1) {
707 DCHECK(kEmitCompilerReadBarrier);
708 }
709
710 const char* GetDescription() const OVERRIDE {
711 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS";
712 }
713
714 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
715 LocationSummary* locations = instruction_->GetLocations();
716 Register ref_reg = ref_.AsRegister<Register>();
717 DCHECK(locations->CanCall());
718 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
719 // This slow path is only used by the UnsafeCASObject intrinsic.
720 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
721 << "Unexpected instruction in read barrier marking and field updating slow path: "
722 << instruction_->DebugName();
723 DCHECK(instruction_->GetLocations()->Intrinsified());
724 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
725 DCHECK(field_offset_.IsRegisterPair()) << field_offset_;
726
727 __ Bind(GetEntryLabel());
728
729 // Save the old reference.
730 // Note that we cannot use AT or TMP to save the old reference, as those
731 // are used by the code that follows, but we need the old reference after
732 // the call to the ReadBarrierMarkRegX entry point.
733 DCHECK_NE(temp1_, AT);
734 DCHECK_NE(temp1_, TMP);
735 __ Move(temp1_, ref_reg);
736
737 // No need to save live registers; it's taken care of by the
738 // entrypoint. Also, there is no need to update the stack mask,
739 // as this runtime call will not trigger a garbage collection.
740 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
741 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
742 (S2 <= ref_reg && ref_reg <= S7) ||
743 (ref_reg == FP)) << ref_reg;
744 // "Compact" slow path, saving two moves.
745 //
746 // Instead of using the standard runtime calling convention (input
747 // and output in A0 and V0 respectively):
748 //
749 // A0 <- ref
750 // V0 <- ReadBarrierMark(A0)
751 // ref <- V0
752 //
753 // we just use rX (the register containing `ref`) as input and output
754 // of a dedicated entrypoint:
755 //
756 // rX <- ReadBarrierMarkRegX(rX)
757 //
758 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100759 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800760 // This runtime call does not require a stack map.
761 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
762 instruction_,
763 this,
764 /* direct */ false);
765
766 // If the new reference is different from the old reference,
767 // update the field in the holder (`*(obj_ + field_offset_)`).
768 //
769 // Note that this field could also hold a different object, if
770 // another thread had concurrently changed it. In that case, the
771 // the compare-and-set (CAS) loop below would abort, leaving the
772 // field as-is.
773 MipsLabel done;
774 __ Beq(temp1_, ref_reg, &done);
775
776 // Update the the holder's field atomically. This may fail if
777 // mutator updates before us, but it's OK. This is achieved
778 // using a strong compare-and-set (CAS) operation with relaxed
779 // memory synchronization ordering, where the expected value is
780 // the old reference and the desired value is the new reference.
781
782 // Convenience aliases.
783 Register base = obj_;
784 // The UnsafeCASObject intrinsic uses a register pair as field
785 // offset ("long offset"), of which only the low part contains
786 // data.
787 Register offset = field_offset_.AsRegisterPairLow<Register>();
788 Register expected = temp1_;
789 Register value = ref_reg;
790 Register tmp_ptr = TMP; // Pointer to actual memory.
791 Register tmp = AT; // Value in memory.
792
793 __ Addu(tmp_ptr, base, offset);
794
795 if (kPoisonHeapReferences) {
796 __ PoisonHeapReference(expected);
797 // Do not poison `value` if it is the same register as
798 // `expected`, which has just been poisoned.
799 if (value != expected) {
800 __ PoisonHeapReference(value);
801 }
802 }
803
804 // do {
805 // tmp = [r_ptr] - expected;
806 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
807
808 bool is_r6 = mips_codegen->GetInstructionSetFeatures().IsR6();
809 MipsLabel loop_head, exit_loop;
810 __ Bind(&loop_head);
811 if (is_r6) {
812 __ LlR6(tmp, tmp_ptr);
813 } else {
814 __ LlR2(tmp, tmp_ptr);
815 }
816 __ Bne(tmp, expected, &exit_loop);
817 __ Move(tmp, value);
818 if (is_r6) {
819 __ ScR6(tmp, tmp_ptr);
820 } else {
821 __ ScR2(tmp, tmp_ptr);
822 }
823 __ Beqz(tmp, &loop_head);
824 __ Bind(&exit_loop);
825
826 if (kPoisonHeapReferences) {
827 __ UnpoisonHeapReference(expected);
828 // Do not unpoison `value` if it is the same register as
829 // `expected`, which has just been unpoisoned.
830 if (value != expected) {
831 __ UnpoisonHeapReference(value);
832 }
833 }
834
835 __ Bind(&done);
836 __ B(GetExitLabel());
837 }
838
839 private:
840 // The location (register) of the marked object reference.
841 const Location ref_;
842 // The register containing the object holding the marked object reference field.
843 const Register obj_;
844 // The location of the offset of the marked reference field within `obj_`.
845 Location field_offset_;
846
847 const Register temp1_;
848
849 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS);
850};
851
852// Slow path generating a read barrier for a heap reference.
853class ReadBarrierForHeapReferenceSlowPathMIPS : public SlowPathCodeMIPS {
854 public:
855 ReadBarrierForHeapReferenceSlowPathMIPS(HInstruction* instruction,
856 Location out,
857 Location ref,
858 Location obj,
859 uint32_t offset,
860 Location index)
861 : SlowPathCodeMIPS(instruction),
862 out_(out),
863 ref_(ref),
864 obj_(obj),
865 offset_(offset),
866 index_(index) {
867 DCHECK(kEmitCompilerReadBarrier);
868 // If `obj` is equal to `out` or `ref`, it means the initial object
869 // has been overwritten by (or after) the heap object reference load
870 // to be instrumented, e.g.:
871 //
872 // __ LoadFromOffset(kLoadWord, out, out, offset);
873 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
874 //
875 // In that case, we have lost the information about the original
876 // object, and the emitted read barrier cannot work properly.
877 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
878 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
879 }
880
881 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
882 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
883 LocationSummary* locations = instruction_->GetLocations();
884 Register reg_out = out_.AsRegister<Register>();
885 DCHECK(locations->CanCall());
886 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
887 DCHECK(instruction_->IsInstanceFieldGet() ||
888 instruction_->IsStaticFieldGet() ||
889 instruction_->IsArrayGet() ||
890 instruction_->IsInstanceOf() ||
891 instruction_->IsCheckCast() ||
892 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
893 << "Unexpected instruction in read barrier for heap reference slow path: "
894 << instruction_->DebugName();
895
896 __ Bind(GetEntryLabel());
897 SaveLiveRegisters(codegen, locations);
898
899 // We may have to change the index's value, but as `index_` is a
900 // constant member (like other "inputs" of this slow path),
901 // introduce a copy of it, `index`.
902 Location index = index_;
903 if (index_.IsValid()) {
904 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
905 if (instruction_->IsArrayGet()) {
906 // Compute the actual memory offset and store it in `index`.
907 Register index_reg = index_.AsRegister<Register>();
908 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
909 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
910 // We are about to change the value of `index_reg` (see the
911 // calls to art::mips::MipsAssembler::Sll and
912 // art::mips::MipsAssembler::Addiu32 below), but it has
913 // not been saved by the previous call to
914 // art::SlowPathCode::SaveLiveRegisters, as it is a
915 // callee-save register --
916 // art::SlowPathCode::SaveLiveRegisters does not consider
917 // callee-save registers, as it has been designed with the
918 // assumption that callee-save registers are supposed to be
919 // handled by the called function. So, as a callee-save
920 // register, `index_reg` _would_ eventually be saved onto
921 // the stack, but it would be too late: we would have
922 // changed its value earlier. Therefore, we manually save
923 // it here into another freely available register,
924 // `free_reg`, chosen of course among the caller-save
925 // registers (as a callee-save `free_reg` register would
926 // exhibit the same problem).
927 //
928 // Note we could have requested a temporary register from
929 // the register allocator instead; but we prefer not to, as
930 // this is a slow path, and we know we can find a
931 // caller-save register that is available.
932 Register free_reg = FindAvailableCallerSaveRegister(codegen);
933 __ Move(free_reg, index_reg);
934 index_reg = free_reg;
935 index = Location::RegisterLocation(index_reg);
936 } else {
937 // The initial register stored in `index_` has already been
938 // saved in the call to art::SlowPathCode::SaveLiveRegisters
939 // (as it is not a callee-save register), so we can freely
940 // use it.
941 }
942 // Shifting the index value contained in `index_reg` by the scale
943 // factor (2) cannot overflow in practice, as the runtime is
944 // unable to allocate object arrays with a size larger than
945 // 2^26 - 1 (that is, 2^28 - 4 bytes).
946 __ Sll(index_reg, index_reg, TIMES_4);
947 static_assert(
948 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
949 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
950 __ Addiu32(index_reg, index_reg, offset_);
951 } else {
952 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
953 // intrinsics, `index_` is not shifted by a scale factor of 2
954 // (as in the case of ArrayGet), as it is actually an offset
955 // to an object field within an object.
956 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
957 DCHECK(instruction_->GetLocations()->Intrinsified());
958 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
959 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
960 << instruction_->AsInvoke()->GetIntrinsic();
961 DCHECK_EQ(offset_, 0U);
962 DCHECK(index_.IsRegisterPair());
963 // UnsafeGet's offset location is a register pair, the low
964 // part contains the correct offset.
965 index = index_.ToLow();
966 }
967 }
968
969 // We're moving two or three locations to locations that could
970 // overlap, so we need a parallel move resolver.
971 InvokeRuntimeCallingConvention calling_convention;
972 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
973 parallel_move.AddMove(ref_,
974 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
975 Primitive::kPrimNot,
976 nullptr);
977 parallel_move.AddMove(obj_,
978 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
979 Primitive::kPrimNot,
980 nullptr);
981 if (index.IsValid()) {
982 parallel_move.AddMove(index,
983 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
984 Primitive::kPrimInt,
985 nullptr);
986 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
987 } else {
988 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
989 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
990 }
991 mips_codegen->InvokeRuntime(kQuickReadBarrierSlow,
992 instruction_,
993 instruction_->GetDexPc(),
994 this);
995 CheckEntrypointTypes<
996 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
Lena Djokic8098da92017-06-28 12:07:50 +0200997 mips_codegen->MoveLocation(out_,
998 calling_convention.GetReturnLocation(Primitive::kPrimNot),
999 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -08001000
1001 RestoreLiveRegisters(codegen, locations);
1002 __ B(GetExitLabel());
1003 }
1004
1005 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathMIPS"; }
1006
1007 private:
1008 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
1009 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
1010 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
1011 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1012 if (i != ref &&
1013 i != obj &&
1014 !codegen->IsCoreCalleeSaveRegister(i) &&
1015 !codegen->IsBlockedCoreRegister(i)) {
1016 return static_cast<Register>(i);
1017 }
1018 }
1019 // We shall never fail to find a free caller-save register, as
1020 // there are more than two core caller-save registers on MIPS
1021 // (meaning it is possible to find one which is different from
1022 // `ref` and `obj`).
1023 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1024 LOG(FATAL) << "Could not find a free caller-save register";
1025 UNREACHABLE();
1026 }
1027
1028 const Location out_;
1029 const Location ref_;
1030 const Location obj_;
1031 const uint32_t offset_;
1032 // An additional location containing an index to an array.
1033 // Only used for HArrayGet and the UnsafeGetObject &
1034 // UnsafeGetObjectVolatile intrinsics.
1035 const Location index_;
1036
1037 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS);
1038};
1039
1040// Slow path generating a read barrier for a GC root.
1041class ReadBarrierForRootSlowPathMIPS : public SlowPathCodeMIPS {
1042 public:
1043 ReadBarrierForRootSlowPathMIPS(HInstruction* instruction, Location out, Location root)
1044 : SlowPathCodeMIPS(instruction), out_(out), root_(root) {
1045 DCHECK(kEmitCompilerReadBarrier);
1046 }
1047
1048 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1049 LocationSummary* locations = instruction_->GetLocations();
1050 Register reg_out = out_.AsRegister<Register>();
1051 DCHECK(locations->CanCall());
1052 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
1053 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1054 << "Unexpected instruction in read barrier for GC root slow path: "
1055 << instruction_->DebugName();
1056
1057 __ Bind(GetEntryLabel());
1058 SaveLiveRegisters(codegen, locations);
1059
1060 InvokeRuntimeCallingConvention calling_convention;
1061 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Lena Djokic8098da92017-06-28 12:07:50 +02001062 mips_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
1063 root_,
1064 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -08001065 mips_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
1066 instruction_,
1067 instruction_->GetDexPc(),
1068 this);
1069 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
Lena Djokic8098da92017-06-28 12:07:50 +02001070 mips_codegen->MoveLocation(out_,
1071 calling_convention.GetReturnLocation(Primitive::kPrimNot),
1072 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -08001073
1074 RestoreLiveRegisters(codegen, locations);
1075 __ B(GetExitLabel());
1076 }
1077
1078 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS"; }
1079
1080 private:
1081 const Location out_;
1082 const Location root_;
1083
1084 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS);
1085};
1086
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001087CodeGeneratorMIPS::CodeGeneratorMIPS(HGraph* graph,
1088 const MipsInstructionSetFeatures& isa_features,
1089 const CompilerOptions& compiler_options,
1090 OptimizingCompilerStats* stats)
1091 : CodeGenerator(graph,
1092 kNumberOfCoreRegisters,
1093 kNumberOfFRegisters,
1094 kNumberOfRegisterPairs,
1095 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1096 arraysize(kCoreCalleeSaves)),
1097 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1098 arraysize(kFpuCalleeSaves)),
1099 compiler_options,
1100 stats),
1101 block_labels_(nullptr),
1102 location_builder_(graph, this),
1103 instruction_visitor_(graph, this),
1104 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001105 assembler_(graph->GetArena(), &isa_features),
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001106 isa_features_(isa_features),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001107 uint32_literals_(std::less<uint32_t>(),
1108 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001109 pc_relative_method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001110 method_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001111 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001112 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001113 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001114 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1115 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001116 clobbered_ra_(false) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001117 // Save RA (containing the return address) to mimic Quick.
1118 AddAllocatedRegister(Location::RegisterLocation(RA));
1119}
1120
1121#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001122// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1123#define __ down_cast<MipsAssembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001124#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001125
1126void CodeGeneratorMIPS::Finalize(CodeAllocator* allocator) {
1127 // Ensure that we fix up branches.
1128 __ FinalizeCode();
1129
1130 // Adjust native pc offsets in stack maps.
1131 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001132 uint32_t old_position =
1133 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001134 uint32_t new_position = __ GetAdjustedPosition(old_position);
1135 DCHECK_GE(new_position, old_position);
1136 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
1137 }
1138
1139 // Adjust pc offsets for the disassembly information.
1140 if (disasm_info_ != nullptr) {
1141 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1142 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1143 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1144 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1145 it.second.start = __ GetAdjustedPosition(it.second.start);
1146 it.second.end = __ GetAdjustedPosition(it.second.end);
1147 }
1148 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1149 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1150 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1151 }
1152 }
1153
1154 CodeGenerator::Finalize(allocator);
1155}
1156
1157MipsAssembler* ParallelMoveResolverMIPS::GetAssembler() const {
1158 return codegen_->GetAssembler();
1159}
1160
1161void ParallelMoveResolverMIPS::EmitMove(size_t index) {
1162 DCHECK_LT(index, moves_.size());
1163 MoveOperands* move = moves_[index];
1164 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1165}
1166
1167void ParallelMoveResolverMIPS::EmitSwap(size_t index) {
1168 DCHECK_LT(index, moves_.size());
1169 MoveOperands* move = moves_[index];
1170 Primitive::Type type = move->GetType();
1171 Location loc1 = move->GetDestination();
1172 Location loc2 = move->GetSource();
1173
1174 DCHECK(!loc1.IsConstant());
1175 DCHECK(!loc2.IsConstant());
1176
1177 if (loc1.Equals(loc2)) {
1178 return;
1179 }
1180
1181 if (loc1.IsRegister() && loc2.IsRegister()) {
1182 // Swap 2 GPRs.
1183 Register r1 = loc1.AsRegister<Register>();
1184 Register r2 = loc2.AsRegister<Register>();
1185 __ Move(TMP, r2);
1186 __ Move(r2, r1);
1187 __ Move(r1, TMP);
1188 } else if (loc1.IsFpuRegister() && loc2.IsFpuRegister()) {
1189 FRegister f1 = loc1.AsFpuRegister<FRegister>();
1190 FRegister f2 = loc2.AsFpuRegister<FRegister>();
1191 if (type == Primitive::kPrimFloat) {
1192 __ MovS(FTMP, f2);
1193 __ MovS(f2, f1);
1194 __ MovS(f1, FTMP);
1195 } else {
1196 DCHECK_EQ(type, Primitive::kPrimDouble);
1197 __ MovD(FTMP, f2);
1198 __ MovD(f2, f1);
1199 __ MovD(f1, FTMP);
1200 }
1201 } else if ((loc1.IsRegister() && loc2.IsFpuRegister()) ||
1202 (loc1.IsFpuRegister() && loc2.IsRegister())) {
1203 // Swap FPR and GPR.
1204 DCHECK_EQ(type, Primitive::kPrimFloat); // Can only swap a float.
1205 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1206 : loc2.AsFpuRegister<FRegister>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001207 Register r2 = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001208 __ Move(TMP, r2);
1209 __ Mfc1(r2, f1);
1210 __ Mtc1(TMP, f1);
1211 } else if (loc1.IsRegisterPair() && loc2.IsRegisterPair()) {
1212 // Swap 2 GPR register pairs.
1213 Register r1 = loc1.AsRegisterPairLow<Register>();
1214 Register r2 = loc2.AsRegisterPairLow<Register>();
1215 __ Move(TMP, r2);
1216 __ Move(r2, r1);
1217 __ Move(r1, TMP);
1218 r1 = loc1.AsRegisterPairHigh<Register>();
1219 r2 = loc2.AsRegisterPairHigh<Register>();
1220 __ Move(TMP, r2);
1221 __ Move(r2, r1);
1222 __ Move(r1, TMP);
1223 } else if ((loc1.IsRegisterPair() && loc2.IsFpuRegister()) ||
1224 (loc1.IsFpuRegister() && loc2.IsRegisterPair())) {
1225 // Swap FPR and GPR register pair.
1226 DCHECK_EQ(type, Primitive::kPrimDouble);
1227 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1228 : loc2.AsFpuRegister<FRegister>();
1229 Register r2_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1230 : loc2.AsRegisterPairLow<Register>();
1231 Register r2_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1232 : loc2.AsRegisterPairHigh<Register>();
1233 // Use 2 temporary registers because we can't first swap the low 32 bits of an FPR and
1234 // then swap the high 32 bits of the same FPR. mtc1 makes the high 32 bits of an FPR
1235 // unpredictable and the following mfch1 will fail.
1236 __ Mfc1(TMP, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001237 __ MoveFromFpuHigh(AT, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001238 __ Mtc1(r2_l, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001239 __ MoveToFpuHigh(r2_h, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001240 __ Move(r2_l, TMP);
1241 __ Move(r2_h, AT);
1242 } else if (loc1.IsStackSlot() && loc2.IsStackSlot()) {
1243 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ false);
1244 } else if (loc1.IsDoubleStackSlot() && loc2.IsDoubleStackSlot()) {
1245 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ true);
David Brazdilcc0f3112016-01-28 17:14:52 +00001246 } else if ((loc1.IsRegister() && loc2.IsStackSlot()) ||
1247 (loc1.IsStackSlot() && loc2.IsRegister())) {
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001248 Register reg = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
1249 intptr_t offset = loc1.IsStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001250 __ Move(TMP, reg);
1251 __ LoadFromOffset(kLoadWord, reg, SP, offset);
1252 __ StoreToOffset(kStoreWord, TMP, SP, offset);
1253 } else if ((loc1.IsRegisterPair() && loc2.IsDoubleStackSlot()) ||
1254 (loc1.IsDoubleStackSlot() && loc2.IsRegisterPair())) {
1255 Register reg_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1256 : loc2.AsRegisterPairLow<Register>();
1257 Register reg_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1258 : loc2.AsRegisterPairHigh<Register>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001259 intptr_t offset_l = loc1.IsDoubleStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001260 intptr_t offset_h = loc1.IsDoubleStackSlot() ? loc1.GetHighStackIndex(kMipsWordSize)
1261 : loc2.GetHighStackIndex(kMipsWordSize);
1262 __ Move(TMP, reg_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001263 __ LoadFromOffset(kLoadWord, reg_l, SP, offset_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001264 __ StoreToOffset(kStoreWord, TMP, SP, offset_l);
David Brazdil04d3e872016-01-29 09:50:09 +00001265 __ Move(TMP, reg_h);
1266 __ LoadFromOffset(kLoadWord, reg_h, SP, offset_h);
1267 __ StoreToOffset(kStoreWord, TMP, SP, offset_h);
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001268 } else if (loc1.IsFpuRegister() || loc2.IsFpuRegister()) {
1269 FRegister reg = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1270 : loc2.AsFpuRegister<FRegister>();
1271 intptr_t offset = loc1.IsFpuRegister() ? loc2.GetStackIndex() : loc1.GetStackIndex();
1272 if (type == Primitive::kPrimFloat) {
1273 __ MovS(FTMP, reg);
1274 __ LoadSFromOffset(reg, SP, offset);
1275 __ StoreSToOffset(FTMP, SP, offset);
1276 } else {
1277 DCHECK_EQ(type, Primitive::kPrimDouble);
1278 __ MovD(FTMP, reg);
1279 __ LoadDFromOffset(reg, SP, offset);
1280 __ StoreDToOffset(FTMP, SP, offset);
1281 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001282 } else {
1283 LOG(FATAL) << "Swap between " << loc1 << " and " << loc2 << " is unsupported";
1284 }
1285}
1286
1287void ParallelMoveResolverMIPS::RestoreScratch(int reg) {
1288 __ Pop(static_cast<Register>(reg));
1289}
1290
1291void ParallelMoveResolverMIPS::SpillScratch(int reg) {
1292 __ Push(static_cast<Register>(reg));
1293}
1294
1295void ParallelMoveResolverMIPS::Exchange(int index1, int index2, bool double_slot) {
1296 // Allocate a scratch register other than TMP, if available.
1297 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1298 // automatically unspilled when the scratch scope object is destroyed).
1299 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1300 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
1301 int stack_offset = ensure_scratch.IsSpilled() ? kMipsWordSize : 0;
1302 for (int i = 0; i <= (double_slot ? 1 : 0); i++, stack_offset += kMipsWordSize) {
1303 __ LoadFromOffset(kLoadWord,
1304 Register(ensure_scratch.GetRegister()),
1305 SP,
1306 index1 + stack_offset);
1307 __ LoadFromOffset(kLoadWord,
1308 TMP,
1309 SP,
1310 index2 + stack_offset);
1311 __ StoreToOffset(kStoreWord,
1312 Register(ensure_scratch.GetRegister()),
1313 SP,
1314 index2 + stack_offset);
1315 __ StoreToOffset(kStoreWord, TMP, SP, index1 + stack_offset);
1316 }
1317}
1318
Alexey Frunze73296a72016-06-03 22:51:46 -07001319void CodeGeneratorMIPS::ComputeSpillMask() {
1320 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
1321 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
1322 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
1323 // If there're FPU callee-saved registers and there's an odd number of GPR callee-saved
1324 // registers, include the ZERO register to force alignment of FPU callee-saved registers
1325 // within the stack frame.
1326 if ((fpu_spill_mask_ != 0) && (POPCOUNT(core_spill_mask_) % 2 != 0)) {
1327 core_spill_mask_ |= (1 << ZERO);
1328 }
Alexey Frunze58320ce2016-08-30 21:40:46 -07001329}
1330
1331bool CodeGeneratorMIPS::HasAllocatedCalleeSaveRegisters() const {
Alexey Frunze06a46c42016-07-19 15:00:40 -07001332 // If RA is clobbered by PC-relative operations on R2 and it's the only spilled register
Alexey Frunze58320ce2016-08-30 21:40:46 -07001333 // (this can happen in leaf methods), force CodeGenerator::InitializeCodeGeneration()
1334 // into the path that creates a stack frame so that RA can be explicitly saved and restored.
1335 // RA can't otherwise be saved/restored when it's the only spilled register.
Alexey Frunze58320ce2016-08-30 21:40:46 -07001336 return CodeGenerator::HasAllocatedCalleeSaveRegisters() || clobbered_ra_;
Alexey Frunze73296a72016-06-03 22:51:46 -07001337}
1338
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001339static dwarf::Reg DWARFReg(Register reg) {
1340 return dwarf::Reg::MipsCore(static_cast<int>(reg));
1341}
1342
1343// TODO: mapping of floating-point registers to DWARF.
1344
1345void CodeGeneratorMIPS::GenerateFrameEntry() {
1346 __ Bind(&frame_entry_label_);
1347
1348 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips) || !IsLeafMethod();
1349
1350 if (do_overflow_check) {
1351 __ LoadFromOffset(kLoadWord,
1352 ZERO,
1353 SP,
1354 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips)));
1355 RecordPcInfo(nullptr, 0);
1356 }
1357
1358 if (HasEmptyFrame()) {
Alexey Frunze58320ce2016-08-30 21:40:46 -07001359 CHECK_EQ(fpu_spill_mask_, 0u);
1360 CHECK_EQ(core_spill_mask_, 1u << RA);
1361 CHECK(!clobbered_ra_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001362 return;
1363 }
1364
1365 // Make sure the frame size isn't unreasonably large.
1366 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips)) {
1367 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips) << " bytes";
1368 }
1369
1370 // Spill callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001371
Alexey Frunze73296a72016-06-03 22:51:46 -07001372 uint32_t ofs = GetFrameSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001373 __ IncreaseFrameSize(ofs);
1374
Alexey Frunze73296a72016-06-03 22:51:46 -07001375 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1376 Register reg = static_cast<Register>(MostSignificantBit(mask));
1377 mask ^= 1u << reg;
1378 ofs -= kMipsWordSize;
1379 // The ZERO register is only included for alignment.
1380 if (reg != ZERO) {
1381 __ StoreToOffset(kStoreWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001382 __ cfi().RelOffset(DWARFReg(reg), ofs);
1383 }
1384 }
1385
Alexey Frunze73296a72016-06-03 22:51:46 -07001386 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1387 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1388 mask ^= 1u << reg;
1389 ofs -= kMipsDoublewordSize;
1390 __ StoreDToOffset(reg, SP, ofs);
1391 // TODO: __ cfi().RelOffset(DWARFReg(reg), ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001392 }
1393
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001394 // Save the current method if we need it. Note that we do not
1395 // do this in HCurrentMethod, as the instruction might have been removed
1396 // in the SSA graph.
1397 if (RequiresCurrentMethod()) {
1398 __ StoreToOffset(kStoreWord, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
1399 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001400
1401 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1402 // Initialize should deoptimize flag to 0.
1403 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1404 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001405}
1406
1407void CodeGeneratorMIPS::GenerateFrameExit() {
1408 __ cfi().RememberState();
1409
1410 if (!HasEmptyFrame()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001411 // Restore callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001412
Alexey Frunze73296a72016-06-03 22:51:46 -07001413 // For better instruction scheduling restore RA before other registers.
1414 uint32_t ofs = GetFrameSize();
1415 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1416 Register reg = static_cast<Register>(MostSignificantBit(mask));
1417 mask ^= 1u << reg;
1418 ofs -= kMipsWordSize;
1419 // The ZERO register is only included for alignment.
1420 if (reg != ZERO) {
1421 __ LoadFromOffset(kLoadWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001422 __ cfi().Restore(DWARFReg(reg));
1423 }
1424 }
1425
Alexey Frunze73296a72016-06-03 22:51:46 -07001426 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1427 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1428 mask ^= 1u << reg;
1429 ofs -= kMipsDoublewordSize;
1430 __ LoadDFromOffset(reg, SP, ofs);
1431 // TODO: __ cfi().Restore(DWARFReg(reg));
1432 }
1433
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001434 size_t frame_size = GetFrameSize();
1435 // Adjust the stack pointer in the delay slot if doing so doesn't break CFI.
1436 bool exchange = IsInt<16>(static_cast<int32_t>(frame_size));
1437 bool reordering = __ SetReorder(false);
1438 if (exchange) {
1439 __ Jr(RA);
1440 __ DecreaseFrameSize(frame_size); // Single instruction in delay slot.
1441 } else {
1442 __ DecreaseFrameSize(frame_size);
1443 __ Jr(RA);
1444 __ Nop(); // In delay slot.
1445 }
1446 __ SetReorder(reordering);
1447 } else {
1448 __ Jr(RA);
1449 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001450 }
1451
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001452 __ cfi().RestoreState();
1453 __ cfi().DefCFAOffset(GetFrameSize());
1454}
1455
1456void CodeGeneratorMIPS::Bind(HBasicBlock* block) {
1457 __ Bind(GetLabelOf(block));
1458}
1459
Lena Djokicca8c2952017-05-29 11:31:46 +02001460VectorRegister VectorRegisterFrom(Location location) {
1461 DCHECK(location.IsFpuRegister());
1462 return static_cast<VectorRegister>(location.AsFpuRegister<FRegister>());
1463}
1464
Lena Djokic8098da92017-06-28 12:07:50 +02001465void CodeGeneratorMIPS::MoveLocation(Location destination,
1466 Location source,
1467 Primitive::Type dst_type) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001468 if (source.Equals(destination)) {
1469 return;
1470 }
1471
Lena Djokic8098da92017-06-28 12:07:50 +02001472 if (source.IsConstant()) {
1473 MoveConstant(destination, source.GetConstant());
1474 } else {
1475 if (destination.IsRegister()) {
1476 if (source.IsRegister()) {
1477 __ Move(destination.AsRegister<Register>(), source.AsRegister<Register>());
1478 } else if (source.IsFpuRegister()) {
1479 __ Mfc1(destination.AsRegister<Register>(), source.AsFpuRegister<FRegister>());
1480 } else {
1481 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001482 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
Lena Djokic8098da92017-06-28 12:07:50 +02001483 }
1484 } else if (destination.IsRegisterPair()) {
1485 if (source.IsRegisterPair()) {
1486 __ Move(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
1487 __ Move(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
1488 } else if (source.IsFpuRegister()) {
1489 Register dst_high = destination.AsRegisterPairHigh<Register>();
1490 Register dst_low = destination.AsRegisterPairLow<Register>();
1491 FRegister src = source.AsFpuRegister<FRegister>();
1492 __ Mfc1(dst_low, src);
1493 __ MoveFromFpuHigh(dst_high, src);
1494 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001495 DCHECK(source.IsDoubleStackSlot())
1496 << "Cannot move from " << source << " to " << destination;
Lena Djokic8098da92017-06-28 12:07:50 +02001497 int32_t off = source.GetStackIndex();
1498 Register r = destination.AsRegisterPairLow<Register>();
1499 __ LoadFromOffset(kLoadDoubleword, r, SP, off);
1500 }
1501 } else if (destination.IsFpuRegister()) {
1502 if (source.IsRegister()) {
1503 DCHECK(!Primitive::Is64BitType(dst_type));
1504 __ Mtc1(source.AsRegister<Register>(), destination.AsFpuRegister<FRegister>());
1505 } else if (source.IsRegisterPair()) {
1506 DCHECK(Primitive::Is64BitType(dst_type));
1507 FRegister dst = destination.AsFpuRegister<FRegister>();
1508 Register src_high = source.AsRegisterPairHigh<Register>();
1509 Register src_low = source.AsRegisterPairLow<Register>();
1510 __ Mtc1(src_low, dst);
1511 __ MoveToFpuHigh(src_high, dst);
1512 } else if (source.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001513 if (GetGraph()->HasSIMD()) {
1514 __ MoveV(VectorRegisterFrom(destination),
1515 VectorRegisterFrom(source));
Lena Djokic8098da92017-06-28 12:07:50 +02001516 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001517 if (Primitive::Is64BitType(dst_type)) {
1518 __ MovD(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1519 } else {
1520 DCHECK_EQ(dst_type, Primitive::kPrimFloat);
1521 __ MovS(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1522 }
Lena Djokic8098da92017-06-28 12:07:50 +02001523 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001524 } else if (source.IsSIMDStackSlot()) {
1525 __ LoadQFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
Lena Djokic8098da92017-06-28 12:07:50 +02001526 } else if (source.IsDoubleStackSlot()) {
1527 DCHECK(Primitive::Is64BitType(dst_type));
1528 __ LoadDFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1529 } else {
1530 DCHECK(!Primitive::Is64BitType(dst_type));
1531 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1532 __ LoadSFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1533 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001534 } else if (destination.IsSIMDStackSlot()) {
1535 if (source.IsFpuRegister()) {
1536 __ StoreQToOffset(source.AsFpuRegister<FRegister>(), SP, destination.GetStackIndex());
1537 } else {
1538 DCHECK(source.IsSIMDStackSlot());
1539 __ LoadQFromOffset(FTMP, SP, source.GetStackIndex());
1540 __ StoreQToOffset(FTMP, SP, destination.GetStackIndex());
1541 }
Lena Djokic8098da92017-06-28 12:07:50 +02001542 } else if (destination.IsDoubleStackSlot()) {
1543 int32_t dst_offset = destination.GetStackIndex();
1544 if (source.IsRegisterPair()) {
1545 __ StoreToOffset(kStoreDoubleword, source.AsRegisterPairLow<Register>(), SP, dst_offset);
1546 } else if (source.IsFpuRegister()) {
1547 __ StoreDToOffset(source.AsFpuRegister<FRegister>(), SP, dst_offset);
1548 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001549 DCHECK(source.IsDoubleStackSlot())
1550 << "Cannot move from " << source << " to " << destination;
Lena Djokic8098da92017-06-28 12:07:50 +02001551 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1552 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset);
1553 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex() + 4);
1554 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset + 4);
1555 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001556 } else {
Lena Djokic8098da92017-06-28 12:07:50 +02001557 DCHECK(destination.IsStackSlot()) << destination;
1558 int32_t dst_offset = destination.GetStackIndex();
1559 if (source.IsRegister()) {
1560 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, dst_offset);
1561 } else if (source.IsFpuRegister()) {
1562 __ StoreSToOffset(source.AsFpuRegister<FRegister>(), SP, dst_offset);
1563 } else {
1564 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1565 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1566 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset);
1567 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001568 }
1569 }
1570}
1571
1572void CodeGeneratorMIPS::MoveConstant(Location destination, HConstant* c) {
1573 if (c->IsIntConstant() || c->IsNullConstant()) {
1574 // Move 32 bit constant.
1575 int32_t value = GetInt32ValueOf(c);
1576 if (destination.IsRegister()) {
1577 Register dst = destination.AsRegister<Register>();
1578 __ LoadConst32(dst, value);
1579 } else {
1580 DCHECK(destination.IsStackSlot())
1581 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001582 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001583 }
1584 } else if (c->IsLongConstant()) {
1585 // Move 64 bit constant.
1586 int64_t value = GetInt64ValueOf(c);
1587 if (destination.IsRegisterPair()) {
1588 Register r_h = destination.AsRegisterPairHigh<Register>();
1589 Register r_l = destination.AsRegisterPairLow<Register>();
1590 __ LoadConst64(r_h, r_l, value);
1591 } else {
1592 DCHECK(destination.IsDoubleStackSlot())
1593 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001594 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001595 }
1596 } else if (c->IsFloatConstant()) {
1597 // Move 32 bit float constant.
1598 int32_t value = GetInt32ValueOf(c);
1599 if (destination.IsFpuRegister()) {
1600 __ LoadSConst32(destination.AsFpuRegister<FRegister>(), value, TMP);
1601 } else {
1602 DCHECK(destination.IsStackSlot())
1603 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001604 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001605 }
1606 } else {
1607 // Move 64 bit double constant.
1608 DCHECK(c->IsDoubleConstant()) << c->DebugName();
1609 int64_t value = GetInt64ValueOf(c);
1610 if (destination.IsFpuRegister()) {
1611 FRegister fd = destination.AsFpuRegister<FRegister>();
1612 __ LoadDConst64(fd, value, TMP);
1613 } else {
1614 DCHECK(destination.IsDoubleStackSlot())
1615 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001616 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001617 }
1618 }
1619}
1620
1621void CodeGeneratorMIPS::MoveConstant(Location destination, int32_t value) {
1622 DCHECK(destination.IsRegister());
1623 Register dst = destination.AsRegister<Register>();
1624 __ LoadConst32(dst, value);
1625}
1626
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001627void CodeGeneratorMIPS::AddLocationAsTemp(Location location, LocationSummary* locations) {
1628 if (location.IsRegister()) {
1629 locations->AddTemp(location);
Alexey Frunzec9e94f32015-10-26 16:11:39 -07001630 } else if (location.IsRegisterPair()) {
1631 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1632 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001633 } else {
1634 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1635 }
1636}
1637
Vladimir Markoaad75c62016-10-03 08:46:48 +00001638template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1639inline void CodeGeneratorMIPS::EmitPcRelativeLinkerPatches(
1640 const ArenaDeque<PcRelativePatchInfo>& infos,
1641 ArenaVector<LinkerPatch>* linker_patches) {
1642 for (const PcRelativePatchInfo& info : infos) {
1643 const DexFile& dex_file = info.target_dex_file;
1644 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001645 DCHECK(info.label.IsBound());
1646 uint32_t literal_offset = __ GetLabelLocation(&info.label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001647 // On R2 we use HMipsComputeBaseMethodAddress and patch relative to
1648 // the assembler's base label used for PC-relative addressing.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001649 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1650 uint32_t pc_rel_offset = info_high.pc_rel_label.IsBound()
1651 ? __ GetLabelLocation(&info_high.pc_rel_label)
Vladimir Markoaad75c62016-10-03 08:46:48 +00001652 : __ GetPcRelBaseLabelLocation();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001653 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001654 }
1655}
1656
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001657void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1658 DCHECK(linker_patches->empty());
1659 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001660 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001661 method_bss_entry_patches_.size() +
Alexey Frunze06a46c42016-07-19 15:00:40 -07001662 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001663 type_bss_entry_patches_.size() +
1664 pc_relative_string_patches_.size();
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001665 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001666 if (GetCompilerOptions().IsBootImage()) {
1667 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
Vladimir Markoaad75c62016-10-03 08:46:48 +00001668 linker_patches);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001669 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
1670 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001671 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
1672 linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001673 } else {
1674 DCHECK(pc_relative_method_patches_.empty());
1675 DCHECK(pc_relative_type_patches_.empty());
1676 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
1677 linker_patches);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001678 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001679 EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
1680 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001681 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1682 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001683 DCHECK_EQ(size, linker_patches->size());
Alexey Frunze06a46c42016-07-19 15:00:40 -07001684}
1685
Vladimir Marko65979462017-05-19 17:25:12 +01001686CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001687 MethodReference target_method,
1688 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001689 return NewPcRelativePatch(*target_method.dex_file,
1690 target_method.dex_method_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001691 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001692 &pc_relative_method_patches_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001693}
1694
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001695CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001696 MethodReference target_method,
1697 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001698 return NewPcRelativePatch(*target_method.dex_file,
1699 target_method.dex_method_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001700 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001701 &method_bss_entry_patches_);
1702}
1703
Alexey Frunze06a46c42016-07-19 15:00:40 -07001704CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001705 const DexFile& dex_file,
1706 dex::TypeIndex type_index,
1707 const PcRelativePatchInfo* info_high) {
1708 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001709}
1710
Vladimir Marko1998cd02017-01-13 13:02:58 +00001711CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001712 const DexFile& dex_file,
1713 dex::TypeIndex type_index,
1714 const PcRelativePatchInfo* info_high) {
1715 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001716}
1717
Vladimir Marko65979462017-05-19 17:25:12 +01001718CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001719 const DexFile& dex_file,
1720 dex::StringIndex string_index,
1721 const PcRelativePatchInfo* info_high) {
1722 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001723}
1724
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001725CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001726 const DexFile& dex_file,
1727 uint32_t offset_or_index,
1728 const PcRelativePatchInfo* info_high,
1729 ArenaDeque<PcRelativePatchInfo>* patches) {
1730 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001731 return &patches->back();
1732}
1733
Alexey Frunze06a46c42016-07-19 15:00:40 -07001734Literal* CodeGeneratorMIPS::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1735 return map->GetOrCreate(
1736 value,
1737 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1738}
1739
Alexey Frunze06a46c42016-07-19 15:00:40 -07001740Literal* CodeGeneratorMIPS::DeduplicateBootImageAddressLiteral(uint32_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001741 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001742}
1743
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001744void CodeGeneratorMIPS::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001745 Register out,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001746 Register base,
1747 PcRelativePatchInfo* info_low) {
1748 DCHECK(!info_high->patch_info_high);
Alexey Frunze6079dca2017-05-28 19:10:28 -07001749 DCHECK_NE(out, base);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001750 if (GetInstructionSetFeatures().IsR6()) {
1751 DCHECK_EQ(base, ZERO);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001752 __ Bind(&info_high->label);
1753 __ Bind(&info_high->pc_rel_label);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001754 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001755 __ Auipc(out, /* placeholder */ 0x1234);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001756 } else {
1757 // If base is ZERO, emit NAL to obtain the actual base.
1758 if (base == ZERO) {
1759 // Generate a dummy PC-relative call to obtain PC.
1760 __ Nal();
1761 }
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001762 __ Bind(&info_high->label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001763 __ Lui(out, /* placeholder */ 0x1234);
1764 // If we emitted the NAL, bind the pc_rel_label, otherwise base is a register holding
1765 // the HMipsComputeBaseMethodAddress which has its own label stored in MipsAssembler.
1766 if (base == ZERO) {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001767 __ Bind(&info_high->pc_rel_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001768 }
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001769 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001770 __ Addu(out, out, (base == ZERO) ? RA : base);
1771 }
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001772 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001773 // offset to `out` (e.g. lw, jialc, addiu).
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001774 if (info_low != nullptr) {
1775 DCHECK_EQ(info_low->patch_info_high, info_high);
1776 __ Bind(&info_low->label);
1777 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00001778}
1779
Alexey Frunze627c1a02017-01-30 19:28:14 -08001780CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootStringPatch(
1781 const DexFile& dex_file,
1782 dex::StringIndex dex_index,
1783 Handle<mirror::String> handle) {
1784 jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index),
1785 reinterpret_cast64<uint64_t>(handle.GetReference()));
1786 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
1787 return &jit_string_patches_.back();
1788}
1789
1790CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootClassPatch(
1791 const DexFile& dex_file,
1792 dex::TypeIndex dex_index,
1793 Handle<mirror::Class> handle) {
1794 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index),
1795 reinterpret_cast64<uint64_t>(handle.GetReference()));
1796 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
1797 return &jit_class_patches_.back();
1798}
1799
1800void CodeGeneratorMIPS::PatchJitRootUse(uint8_t* code,
1801 const uint8_t* roots_data,
1802 const CodeGeneratorMIPS::JitPatchInfo& info,
1803 uint64_t index_in_table) const {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001804 uint32_t high_literal_offset = GetAssembler().GetLabelLocation(&info.high_label);
1805 uint32_t low_literal_offset = GetAssembler().GetLabelLocation(&info.low_label);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001806 uintptr_t address =
1807 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1808 uint32_t addr32 = dchecked_integral_cast<uint32_t>(address);
1809 // lui reg, addr32_high
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001810 DCHECK_EQ(code[high_literal_offset + 0], 0x34);
1811 DCHECK_EQ(code[high_literal_offset + 1], 0x12);
1812 DCHECK_EQ((code[high_literal_offset + 2] & 0xE0), 0x00);
1813 DCHECK_EQ(code[high_literal_offset + 3], 0x3C);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001814 // instr reg, reg, addr32_low
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001815 DCHECK_EQ(code[low_literal_offset + 0], 0x78);
1816 DCHECK_EQ(code[low_literal_offset + 1], 0x56);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001817 addr32 += (addr32 & 0x8000) << 1; // Account for sign extension in "instr reg, reg, addr32_low".
Alexey Frunze627c1a02017-01-30 19:28:14 -08001818 // lui reg, addr32_high
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001819 code[high_literal_offset + 0] = static_cast<uint8_t>(addr32 >> 16);
1820 code[high_literal_offset + 1] = static_cast<uint8_t>(addr32 >> 24);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001821 // instr reg, reg, addr32_low
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001822 code[low_literal_offset + 0] = static_cast<uint8_t>(addr32 >> 0);
1823 code[low_literal_offset + 1] = static_cast<uint8_t>(addr32 >> 8);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001824}
1825
1826void CodeGeneratorMIPS::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1827 for (const JitPatchInfo& info : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001828 const auto it = jit_string_roots_.find(StringReference(&info.target_dex_file,
1829 dex::StringIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001830 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001831 uint64_t index_in_table = it->second;
1832 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001833 }
1834 for (const JitPatchInfo& info : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001835 const auto it = jit_class_roots_.find(TypeReference(&info.target_dex_file,
1836 dex::TypeIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001837 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001838 uint64_t index_in_table = it->second;
1839 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001840 }
1841}
1842
Goran Jakovljevice114da22016-12-26 14:21:43 +01001843void CodeGeneratorMIPS::MarkGCCard(Register object,
1844 Register value,
1845 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001846 MipsLabel done;
1847 Register card = AT;
1848 Register temp = TMP;
Goran Jakovljevice114da22016-12-26 14:21:43 +01001849 if (value_can_be_null) {
1850 __ Beqz(value, &done);
1851 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001852 __ LoadFromOffset(kLoadWord,
1853 card,
1854 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001855 Thread::CardTableOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001856 __ Srl(temp, object, gc::accounting::CardTable::kCardShift);
1857 __ Addu(temp, card, temp);
1858 __ Sb(card, temp, 0);
Goran Jakovljevice114da22016-12-26 14:21:43 +01001859 if (value_can_be_null) {
1860 __ Bind(&done);
1861 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001862}
1863
David Brazdil58282f42016-01-14 12:45:10 +00001864void CodeGeneratorMIPS::SetupBlockedRegisters() const {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001865 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1866 blocked_core_registers_[ZERO] = true;
1867 blocked_core_registers_[K0] = true;
1868 blocked_core_registers_[K1] = true;
1869 blocked_core_registers_[GP] = true;
1870 blocked_core_registers_[SP] = true;
1871 blocked_core_registers_[RA] = true;
1872
1873 // AT and TMP(T8) are used as temporary/scratch registers
1874 // (similar to how AT is used by MIPS assemblers).
1875 blocked_core_registers_[AT] = true;
1876 blocked_core_registers_[TMP] = true;
1877 blocked_fpu_registers_[FTMP] = true;
1878
1879 // Reserve suspend and thread registers.
1880 blocked_core_registers_[S0] = true;
1881 blocked_core_registers_[TR] = true;
1882
1883 // Reserve T9 for function calls
1884 blocked_core_registers_[T9] = true;
1885
1886 // Reserve odd-numbered FPU registers.
1887 for (size_t i = 1; i < kNumberOfFRegisters; i += 2) {
1888 blocked_fpu_registers_[i] = true;
1889 }
1890
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02001891 if (GetGraph()->IsDebuggable()) {
1892 // Stubs do not save callee-save floating point registers. If the graph
1893 // is debuggable, we need to deal with these registers differently. For
1894 // now, just block them.
1895 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1896 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1897 }
1898 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001899}
1900
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001901size_t CodeGeneratorMIPS::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1902 __ StoreToOffset(kStoreWord, Register(reg_id), SP, stack_index);
1903 return kMipsWordSize;
1904}
1905
1906size_t CodeGeneratorMIPS::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1907 __ LoadFromOffset(kLoadWord, Register(reg_id), SP, stack_index);
1908 return kMipsWordSize;
1909}
1910
1911size_t CodeGeneratorMIPS::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001912 if (GetGraph()->HasSIMD()) {
1913 __ StoreQToOffset(FRegister(reg_id), SP, stack_index);
1914 } else {
1915 __ StoreDToOffset(FRegister(reg_id), SP, stack_index);
1916 }
1917 return GetFloatingPointSpillSlotSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001918}
1919
1920size_t CodeGeneratorMIPS::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001921 if (GetGraph()->HasSIMD()) {
1922 __ LoadQFromOffset(FRegister(reg_id), SP, stack_index);
1923 } else {
1924 __ LoadDFromOffset(FRegister(reg_id), SP, stack_index);
1925 }
1926 return GetFloatingPointSpillSlotSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001927}
1928
1929void CodeGeneratorMIPS::DumpCoreRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001930 stream << Register(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001931}
1932
1933void CodeGeneratorMIPS::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001934 stream << FRegister(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001935}
1936
Serban Constantinescufca16662016-07-14 09:21:59 +01001937constexpr size_t kMipsDirectEntrypointRuntimeOffset = 16;
1938
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001939void CodeGeneratorMIPS::InvokeRuntime(QuickEntrypointEnum entrypoint,
1940 HInstruction* instruction,
1941 uint32_t dex_pc,
1942 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001943 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001944 GenerateInvokeRuntime(GetThreadOffset<kMipsPointerSize>(entrypoint).Int32Value(),
1945 IsDirectEntrypoint(entrypoint));
1946 if (EntrypointRequiresStackMap(entrypoint)) {
1947 RecordPcInfo(instruction, dex_pc, slow_path);
1948 }
1949}
1950
1951void CodeGeneratorMIPS::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1952 HInstruction* instruction,
1953 SlowPathCode* slow_path,
1954 bool direct) {
1955 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1956 GenerateInvokeRuntime(entry_point_offset, direct);
1957}
1958
1959void CodeGeneratorMIPS::GenerateInvokeRuntime(int32_t entry_point_offset, bool direct) {
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001960 bool reordering = __ SetReorder(false);
Alexey Frunze15958152017-02-09 19:08:30 -08001961 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001962 __ Jalr(T9);
Alexey Frunze15958152017-02-09 19:08:30 -08001963 if (direct) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001964 // Reserve argument space on stack (for $a0-$a3) for
1965 // entrypoints that directly reference native implementations.
1966 // Called function may use this space to store $a0-$a3 regs.
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001967 __ IncreaseFrameSize(kMipsDirectEntrypointRuntimeOffset); // Single instruction in delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001968 __ DecreaseFrameSize(kMipsDirectEntrypointRuntimeOffset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001969 } else {
1970 __ Nop(); // In delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001971 }
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001972 __ SetReorder(reordering);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001973}
1974
1975void InstructionCodeGeneratorMIPS::GenerateClassInitializationCheck(SlowPathCodeMIPS* slow_path,
1976 Register class_reg) {
1977 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1978 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1979 __ Blt(TMP, AT, slow_path->GetEntryLabel());
1980 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1981 __ Sync(0);
1982 __ Bind(slow_path->GetExitLabel());
1983}
1984
1985void InstructionCodeGeneratorMIPS::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1986 __ Sync(0); // Only stype 0 is supported.
1987}
1988
1989void InstructionCodeGeneratorMIPS::GenerateSuspendCheck(HSuspendCheck* instruction,
1990 HBasicBlock* successor) {
1991 SuspendCheckSlowPathMIPS* slow_path =
1992 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS(instruction, successor);
1993 codegen_->AddSlowPath(slow_path);
1994
1995 __ LoadFromOffset(kLoadUnsignedHalfword,
1996 TMP,
1997 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001998 Thread::ThreadFlagsOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001999 if (successor == nullptr) {
2000 __ Bnez(TMP, slow_path->GetEntryLabel());
2001 __ Bind(slow_path->GetReturnLabel());
2002 } else {
2003 __ Beqz(TMP, codegen_->GetLabelOf(successor));
2004 __ B(slow_path->GetEntryLabel());
2005 // slow_path will return to GetLabelOf(successor).
2006 }
2007}
2008
2009InstructionCodeGeneratorMIPS::InstructionCodeGeneratorMIPS(HGraph* graph,
2010 CodeGeneratorMIPS* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08002011 : InstructionCodeGenerator(graph, codegen),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002012 assembler_(codegen->GetAssembler()),
2013 codegen_(codegen) {}
2014
2015void LocationsBuilderMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
2016 DCHECK_EQ(instruction->InputCount(), 2U);
2017 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2018 Primitive::Type type = instruction->GetResultType();
2019 switch (type) {
2020 case Primitive::kPrimInt: {
2021 locations->SetInAt(0, Location::RequiresRegister());
2022 HInstruction* right = instruction->InputAt(1);
2023 bool can_use_imm = false;
2024 if (right->IsConstant()) {
2025 int32_t imm = CodeGenerator::GetInt32ValueOf(right->AsConstant());
2026 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
2027 can_use_imm = IsUint<16>(imm);
2028 } else if (instruction->IsAdd()) {
2029 can_use_imm = IsInt<16>(imm);
2030 } else {
2031 DCHECK(instruction->IsSub());
2032 can_use_imm = IsInt<16>(-imm);
2033 }
2034 }
2035 if (can_use_imm)
2036 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
2037 else
2038 locations->SetInAt(1, Location::RequiresRegister());
2039 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2040 break;
2041 }
2042
2043 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002044 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002045 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2046 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002047 break;
2048 }
2049
2050 case Primitive::kPrimFloat:
2051 case Primitive::kPrimDouble:
2052 DCHECK(instruction->IsAdd() || instruction->IsSub());
2053 locations->SetInAt(0, Location::RequiresFpuRegister());
2054 locations->SetInAt(1, Location::RequiresFpuRegister());
2055 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2056 break;
2057
2058 default:
2059 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
2060 }
2061}
2062
2063void InstructionCodeGeneratorMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
2064 Primitive::Type type = instruction->GetType();
2065 LocationSummary* locations = instruction->GetLocations();
2066
2067 switch (type) {
2068 case Primitive::kPrimInt: {
2069 Register dst = locations->Out().AsRegister<Register>();
2070 Register lhs = locations->InAt(0).AsRegister<Register>();
2071 Location rhs_location = locations->InAt(1);
2072
2073 Register rhs_reg = ZERO;
2074 int32_t rhs_imm = 0;
2075 bool use_imm = rhs_location.IsConstant();
2076 if (use_imm) {
2077 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2078 } else {
2079 rhs_reg = rhs_location.AsRegister<Register>();
2080 }
2081
2082 if (instruction->IsAnd()) {
2083 if (use_imm)
2084 __ Andi(dst, lhs, rhs_imm);
2085 else
2086 __ And(dst, lhs, rhs_reg);
2087 } else if (instruction->IsOr()) {
2088 if (use_imm)
2089 __ Ori(dst, lhs, rhs_imm);
2090 else
2091 __ Or(dst, lhs, rhs_reg);
2092 } else if (instruction->IsXor()) {
2093 if (use_imm)
2094 __ Xori(dst, lhs, rhs_imm);
2095 else
2096 __ Xor(dst, lhs, rhs_reg);
2097 } else if (instruction->IsAdd()) {
2098 if (use_imm)
2099 __ Addiu(dst, lhs, rhs_imm);
2100 else
2101 __ Addu(dst, lhs, rhs_reg);
2102 } else {
2103 DCHECK(instruction->IsSub());
2104 if (use_imm)
2105 __ Addiu(dst, lhs, -rhs_imm);
2106 else
2107 __ Subu(dst, lhs, rhs_reg);
2108 }
2109 break;
2110 }
2111
2112 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002113 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2114 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2115 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2116 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002117 Location rhs_location = locations->InAt(1);
2118 bool use_imm = rhs_location.IsConstant();
2119 if (!use_imm) {
2120 Register rhs_high = rhs_location.AsRegisterPairHigh<Register>();
2121 Register rhs_low = rhs_location.AsRegisterPairLow<Register>();
2122 if (instruction->IsAnd()) {
2123 __ And(dst_low, lhs_low, rhs_low);
2124 __ And(dst_high, lhs_high, rhs_high);
2125 } else if (instruction->IsOr()) {
2126 __ Or(dst_low, lhs_low, rhs_low);
2127 __ Or(dst_high, lhs_high, rhs_high);
2128 } else if (instruction->IsXor()) {
2129 __ Xor(dst_low, lhs_low, rhs_low);
2130 __ Xor(dst_high, lhs_high, rhs_high);
2131 } else if (instruction->IsAdd()) {
2132 if (lhs_low == rhs_low) {
2133 // Special case for lhs = rhs and the sum potentially overwriting both lhs and rhs.
2134 __ Slt(TMP, lhs_low, ZERO);
2135 __ Addu(dst_low, lhs_low, rhs_low);
2136 } else {
2137 __ Addu(dst_low, lhs_low, rhs_low);
2138 // If the sum overwrites rhs, lhs remains unchanged, otherwise rhs remains unchanged.
2139 __ Sltu(TMP, dst_low, (dst_low == rhs_low) ? lhs_low : rhs_low);
2140 }
2141 __ Addu(dst_high, lhs_high, rhs_high);
2142 __ Addu(dst_high, dst_high, TMP);
2143 } else {
2144 DCHECK(instruction->IsSub());
2145 __ Sltu(TMP, lhs_low, rhs_low);
2146 __ Subu(dst_low, lhs_low, rhs_low);
2147 __ Subu(dst_high, lhs_high, rhs_high);
2148 __ Subu(dst_high, dst_high, TMP);
2149 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002150 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002151 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
2152 if (instruction->IsOr()) {
2153 uint32_t low = Low32Bits(value);
2154 uint32_t high = High32Bits(value);
2155 if (IsUint<16>(low)) {
2156 if (dst_low != lhs_low || low != 0) {
2157 __ Ori(dst_low, lhs_low, low);
2158 }
2159 } else {
2160 __ LoadConst32(TMP, low);
2161 __ Or(dst_low, lhs_low, TMP);
2162 }
2163 if (IsUint<16>(high)) {
2164 if (dst_high != lhs_high || high != 0) {
2165 __ Ori(dst_high, lhs_high, high);
2166 }
2167 } else {
2168 if (high != low) {
2169 __ LoadConst32(TMP, high);
2170 }
2171 __ Or(dst_high, lhs_high, TMP);
2172 }
2173 } else if (instruction->IsXor()) {
2174 uint32_t low = Low32Bits(value);
2175 uint32_t high = High32Bits(value);
2176 if (IsUint<16>(low)) {
2177 if (dst_low != lhs_low || low != 0) {
2178 __ Xori(dst_low, lhs_low, low);
2179 }
2180 } else {
2181 __ LoadConst32(TMP, low);
2182 __ Xor(dst_low, lhs_low, TMP);
2183 }
2184 if (IsUint<16>(high)) {
2185 if (dst_high != lhs_high || high != 0) {
2186 __ Xori(dst_high, lhs_high, high);
2187 }
2188 } else {
2189 if (high != low) {
2190 __ LoadConst32(TMP, high);
2191 }
2192 __ Xor(dst_high, lhs_high, TMP);
2193 }
2194 } else if (instruction->IsAnd()) {
2195 uint32_t low = Low32Bits(value);
2196 uint32_t high = High32Bits(value);
2197 if (IsUint<16>(low)) {
2198 __ Andi(dst_low, lhs_low, low);
2199 } else if (low != 0xFFFFFFFF) {
2200 __ LoadConst32(TMP, low);
2201 __ And(dst_low, lhs_low, TMP);
2202 } else if (dst_low != lhs_low) {
2203 __ Move(dst_low, lhs_low);
2204 }
2205 if (IsUint<16>(high)) {
2206 __ Andi(dst_high, lhs_high, high);
2207 } else if (high != 0xFFFFFFFF) {
2208 if (high != low) {
2209 __ LoadConst32(TMP, high);
2210 }
2211 __ And(dst_high, lhs_high, TMP);
2212 } else if (dst_high != lhs_high) {
2213 __ Move(dst_high, lhs_high);
2214 }
2215 } else {
2216 if (instruction->IsSub()) {
2217 value = -value;
2218 } else {
2219 DCHECK(instruction->IsAdd());
2220 }
2221 int32_t low = Low32Bits(value);
2222 int32_t high = High32Bits(value);
2223 if (IsInt<16>(low)) {
2224 if (dst_low != lhs_low || low != 0) {
2225 __ Addiu(dst_low, lhs_low, low);
2226 }
2227 if (low != 0) {
2228 __ Sltiu(AT, dst_low, low);
2229 }
2230 } else {
2231 __ LoadConst32(TMP, low);
2232 __ Addu(dst_low, lhs_low, TMP);
2233 __ Sltu(AT, dst_low, TMP);
2234 }
2235 if (IsInt<16>(high)) {
2236 if (dst_high != lhs_high || high != 0) {
2237 __ Addiu(dst_high, lhs_high, high);
2238 }
2239 } else {
2240 if (high != low) {
2241 __ LoadConst32(TMP, high);
2242 }
2243 __ Addu(dst_high, lhs_high, TMP);
2244 }
2245 if (low != 0) {
2246 __ Addu(dst_high, dst_high, AT);
2247 }
2248 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002249 }
2250 break;
2251 }
2252
2253 case Primitive::kPrimFloat:
2254 case Primitive::kPrimDouble: {
2255 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
2256 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
2257 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
2258 if (instruction->IsAdd()) {
2259 if (type == Primitive::kPrimFloat) {
2260 __ AddS(dst, lhs, rhs);
2261 } else {
2262 __ AddD(dst, lhs, rhs);
2263 }
2264 } else {
2265 DCHECK(instruction->IsSub());
2266 if (type == Primitive::kPrimFloat) {
2267 __ SubS(dst, lhs, rhs);
2268 } else {
2269 __ SubD(dst, lhs, rhs);
2270 }
2271 }
2272 break;
2273 }
2274
2275 default:
2276 LOG(FATAL) << "Unexpected binary operation type " << type;
2277 }
2278}
2279
2280void LocationsBuilderMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002281 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002282
2283 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2284 Primitive::Type type = instr->GetResultType();
2285 switch (type) {
2286 case Primitive::kPrimInt:
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002287 locations->SetInAt(0, Location::RequiresRegister());
2288 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2289 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2290 break;
2291 case Primitive::kPrimLong:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002292 locations->SetInAt(0, Location::RequiresRegister());
2293 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2294 locations->SetOut(Location::RequiresRegister());
2295 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002296 default:
2297 LOG(FATAL) << "Unexpected shift type " << type;
2298 }
2299}
2300
2301static constexpr size_t kMipsBitsPerWord = kMipsWordSize * kBitsPerByte;
2302
2303void InstructionCodeGeneratorMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002304 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002305 LocationSummary* locations = instr->GetLocations();
2306 Primitive::Type type = instr->GetType();
2307
2308 Location rhs_location = locations->InAt(1);
2309 bool use_imm = rhs_location.IsConstant();
2310 Register rhs_reg = use_imm ? ZERO : rhs_location.AsRegister<Register>();
2311 int64_t rhs_imm = use_imm ? CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()) : 0;
Roland Levillain5b5b9312016-03-22 14:57:31 +00002312 const uint32_t shift_mask =
2313 (type == Primitive::kPrimInt) ? kMaxIntShiftDistance : kMaxLongShiftDistance;
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002314 const uint32_t shift_value = rhs_imm & shift_mask;
Alexey Frunze92d90602015-12-18 18:16:36 -08002315 // Are the INS (Insert Bit Field) and ROTR instructions supported?
2316 bool has_ins_rotr = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002317
2318 switch (type) {
2319 case Primitive::kPrimInt: {
2320 Register dst = locations->Out().AsRegister<Register>();
2321 Register lhs = locations->InAt(0).AsRegister<Register>();
2322 if (use_imm) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002323 if (shift_value == 0) {
2324 if (dst != lhs) {
2325 __ Move(dst, lhs);
2326 }
2327 } else if (instr->IsShl()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002328 __ Sll(dst, lhs, shift_value);
2329 } else if (instr->IsShr()) {
2330 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002331 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002332 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002333 } else {
2334 if (has_ins_rotr) {
2335 __ Rotr(dst, lhs, shift_value);
2336 } else {
2337 __ Sll(TMP, lhs, (kMipsBitsPerWord - shift_value) & shift_mask);
2338 __ Srl(dst, lhs, shift_value);
2339 __ Or(dst, dst, TMP);
2340 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002341 }
2342 } else {
2343 if (instr->IsShl()) {
2344 __ Sllv(dst, lhs, rhs_reg);
2345 } else if (instr->IsShr()) {
2346 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002347 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002348 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002349 } else {
2350 if (has_ins_rotr) {
2351 __ Rotrv(dst, lhs, rhs_reg);
2352 } else {
2353 __ Subu(TMP, ZERO, rhs_reg);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002354 // 32-bit shift instructions use the 5 least significant bits of the shift count, so
2355 // shifting by `-rhs_reg` is equivalent to shifting by `(32 - rhs_reg) & 31`. The case
2356 // when `rhs_reg & 31 == 0` is OK even though we don't shift `lhs` left all the way out
2357 // by 32, because the result in this case is computed as `(lhs >> 0) | (lhs << 0)`,
2358 // IOW, the OR'd values are equal.
Alexey Frunze92d90602015-12-18 18:16:36 -08002359 __ Sllv(TMP, lhs, TMP);
2360 __ Srlv(dst, lhs, rhs_reg);
2361 __ Or(dst, dst, TMP);
2362 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002363 }
2364 }
2365 break;
2366 }
2367
2368 case Primitive::kPrimLong: {
2369 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2370 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2371 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2372 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
2373 if (use_imm) {
2374 if (shift_value == 0) {
Lena Djokic8098da92017-06-28 12:07:50 +02002375 codegen_->MoveLocation(locations->Out(), locations->InAt(0), type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002376 } else if (shift_value < kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002377 if (has_ins_rotr) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002378 if (instr->IsShl()) {
2379 __ Srl(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2380 __ Ins(dst_high, lhs_high, shift_value, kMipsBitsPerWord - shift_value);
2381 __ Sll(dst_low, lhs_low, shift_value);
2382 } else if (instr->IsShr()) {
2383 __ Srl(dst_low, lhs_low, shift_value);
2384 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2385 __ Sra(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002386 } else if (instr->IsUShr()) {
2387 __ Srl(dst_low, lhs_low, shift_value);
2388 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2389 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002390 } else {
2391 __ Srl(dst_low, lhs_low, shift_value);
2392 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2393 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002394 __ Ins(dst_high, lhs_low, kMipsBitsPerWord - shift_value, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002395 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002396 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002397 if (instr->IsShl()) {
2398 __ Sll(dst_low, lhs_low, shift_value);
2399 __ Srl(TMP, lhs_low, kMipsBitsPerWord - shift_value);
2400 __ Sll(dst_high, lhs_high, shift_value);
2401 __ Or(dst_high, dst_high, TMP);
2402 } else if (instr->IsShr()) {
2403 __ Sra(dst_high, lhs_high, shift_value);
2404 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2405 __ Srl(dst_low, lhs_low, shift_value);
2406 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002407 } else if (instr->IsUShr()) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002408 __ Srl(dst_high, lhs_high, shift_value);
2409 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2410 __ Srl(dst_low, lhs_low, shift_value);
2411 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002412 } else {
2413 __ Srl(TMP, lhs_low, shift_value);
2414 __ Sll(dst_low, lhs_high, kMipsBitsPerWord - shift_value);
2415 __ Or(dst_low, dst_low, TMP);
2416 __ Srl(TMP, lhs_high, shift_value);
2417 __ Sll(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2418 __ Or(dst_high, dst_high, TMP);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002419 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002420 }
2421 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002422 const uint32_t shift_value_high = shift_value - kMipsBitsPerWord;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002423 if (instr->IsShl()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002424 __ Sll(dst_high, lhs_low, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002425 __ Move(dst_low, ZERO);
2426 } else if (instr->IsShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002427 __ Sra(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002428 __ Sra(dst_high, dst_low, kMipsBitsPerWord - 1);
Alexey Frunze92d90602015-12-18 18:16:36 -08002429 } else if (instr->IsUShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002430 __ Srl(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002431 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002432 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002433 if (shift_value == kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002434 // 64-bit rotation by 32 is just a swap.
2435 __ Move(dst_low, lhs_high);
2436 __ Move(dst_high, lhs_low);
2437 } else {
2438 if (has_ins_rotr) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002439 __ Srl(dst_low, lhs_high, shift_value_high);
2440 __ Ins(dst_low, lhs_low, kMipsBitsPerWord - shift_value_high, shift_value_high);
2441 __ Srl(dst_high, lhs_low, shift_value_high);
2442 __ Ins(dst_high, lhs_high, kMipsBitsPerWord - shift_value_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002443 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002444 __ Sll(TMP, lhs_low, kMipsBitsPerWord - shift_value_high);
2445 __ Srl(dst_low, lhs_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002446 __ Or(dst_low, dst_low, TMP);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002447 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value_high);
2448 __ Srl(dst_high, lhs_low, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002449 __ Or(dst_high, dst_high, TMP);
2450 }
2451 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002452 }
2453 }
2454 } else {
2455 MipsLabel done;
2456 if (instr->IsShl()) {
2457 __ Sllv(dst_low, lhs_low, rhs_reg);
2458 __ Nor(AT, ZERO, rhs_reg);
2459 __ Srl(TMP, lhs_low, 1);
2460 __ Srlv(TMP, TMP, AT);
2461 __ Sllv(dst_high, lhs_high, rhs_reg);
2462 __ Or(dst_high, dst_high, TMP);
2463 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2464 __ Beqz(TMP, &done);
2465 __ Move(dst_high, dst_low);
2466 __ Move(dst_low, ZERO);
2467 } else if (instr->IsShr()) {
2468 __ Srav(dst_high, lhs_high, rhs_reg);
2469 __ Nor(AT, ZERO, rhs_reg);
2470 __ Sll(TMP, lhs_high, 1);
2471 __ Sllv(TMP, TMP, AT);
2472 __ Srlv(dst_low, lhs_low, rhs_reg);
2473 __ Or(dst_low, dst_low, TMP);
2474 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2475 __ Beqz(TMP, &done);
2476 __ Move(dst_low, dst_high);
2477 __ Sra(dst_high, dst_high, 31);
Alexey Frunze92d90602015-12-18 18:16:36 -08002478 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002479 __ Srlv(dst_high, lhs_high, rhs_reg);
2480 __ Nor(AT, ZERO, rhs_reg);
2481 __ Sll(TMP, lhs_high, 1);
2482 __ Sllv(TMP, TMP, AT);
2483 __ Srlv(dst_low, lhs_low, rhs_reg);
2484 __ Or(dst_low, dst_low, TMP);
2485 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2486 __ Beqz(TMP, &done);
2487 __ Move(dst_low, dst_high);
2488 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002489 } else {
2490 __ Nor(AT, ZERO, rhs_reg);
2491 __ Srlv(TMP, lhs_low, rhs_reg);
2492 __ Sll(dst_low, lhs_high, 1);
2493 __ Sllv(dst_low, dst_low, AT);
2494 __ Or(dst_low, dst_low, TMP);
2495 __ Srlv(TMP, lhs_high, rhs_reg);
2496 __ Sll(dst_high, lhs_low, 1);
2497 __ Sllv(dst_high, dst_high, AT);
2498 __ Or(dst_high, dst_high, TMP);
2499 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2500 __ Beqz(TMP, &done);
2501 __ Move(TMP, dst_high);
2502 __ Move(dst_high, dst_low);
2503 __ Move(dst_low, TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002504 }
2505 __ Bind(&done);
2506 }
2507 break;
2508 }
2509
2510 default:
2511 LOG(FATAL) << "Unexpected shift operation type " << type;
2512 }
2513}
2514
2515void LocationsBuilderMIPS::VisitAdd(HAdd* instruction) {
2516 HandleBinaryOp(instruction);
2517}
2518
2519void InstructionCodeGeneratorMIPS::VisitAdd(HAdd* instruction) {
2520 HandleBinaryOp(instruction);
2521}
2522
2523void LocationsBuilderMIPS::VisitAnd(HAnd* instruction) {
2524 HandleBinaryOp(instruction);
2525}
2526
2527void InstructionCodeGeneratorMIPS::VisitAnd(HAnd* instruction) {
2528 HandleBinaryOp(instruction);
2529}
2530
2531void LocationsBuilderMIPS::VisitArrayGet(HArrayGet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002532 Primitive::Type type = instruction->GetType();
2533 bool object_array_get_with_read_barrier =
2534 kEmitCompilerReadBarrier && (type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002535 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08002536 new (GetGraph()->GetArena()) LocationSummary(instruction,
2537 object_array_get_with_read_barrier
2538 ? LocationSummary::kCallOnSlowPath
2539 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002540 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2541 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2542 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002543 locations->SetInAt(0, Location::RequiresRegister());
2544 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexey Frunze15958152017-02-09 19:08:30 -08002545 if (Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002546 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2547 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002548 // The output overlaps in the case of an object array get with
2549 // read barriers enabled: we do not want the move to overwrite the
2550 // array's location, as we need it to emit the read barrier.
2551 locations->SetOut(Location::RequiresRegister(),
2552 object_array_get_with_read_barrier
2553 ? Location::kOutputOverlap
2554 : Location::kNoOutputOverlap);
2555 }
2556 // We need a temporary register for the read barrier marking slow
2557 // path in CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier.
2558 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002559 bool temp_needed = instruction->GetIndex()->IsConstant()
2560 ? !kBakerReadBarrierThunksEnableForFields
2561 : !kBakerReadBarrierThunksEnableForArrays;
2562 if (temp_needed) {
2563 locations->AddTemp(Location::RequiresRegister());
2564 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002565 }
2566}
2567
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002568static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS* codegen) {
2569 auto null_checker = [codegen, instruction]() {
2570 codegen->MaybeRecordImplicitNullCheck(instruction);
Alexey Frunze2923db72016-08-20 01:55:47 -07002571 };
2572 return null_checker;
2573}
2574
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002575void InstructionCodeGeneratorMIPS::VisitArrayGet(HArrayGet* instruction) {
2576 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002577 Location obj_loc = locations->InAt(0);
2578 Register obj = obj_loc.AsRegister<Register>();
2579 Location out_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002580 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002581 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002582 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002583
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002584 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002585 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2586 instruction->IsStringCharAt();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002587 switch (type) {
2588 case Primitive::kPrimBoolean: {
Alexey Frunze15958152017-02-09 19:08:30 -08002589 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002590 if (index.IsConstant()) {
2591 size_t offset =
2592 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002593 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002594 } else {
2595 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002596 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002597 }
2598 break;
2599 }
2600
2601 case Primitive::kPrimByte: {
Alexey Frunze15958152017-02-09 19:08:30 -08002602 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002603 if (index.IsConstant()) {
2604 size_t offset =
2605 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002606 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002607 } else {
2608 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002609 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002610 }
2611 break;
2612 }
2613
2614 case Primitive::kPrimShort: {
Alexey Frunze15958152017-02-09 19:08:30 -08002615 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002616 if (index.IsConstant()) {
2617 size_t offset =
2618 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002619 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002620 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002621 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_2, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002622 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002623 }
2624 break;
2625 }
2626
2627 case Primitive::kPrimChar: {
Alexey Frunze15958152017-02-09 19:08:30 -08002628 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002629 if (maybe_compressed_char_at) {
2630 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2631 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
2632 __ Sll(TMP, TMP, 31); // Extract compression flag into the most significant bit of TMP.
2633 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2634 "Expecting 0=compressed, 1=uncompressed");
2635 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002636 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002637 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2638 if (maybe_compressed_char_at) {
2639 MipsLabel uncompressed_load, done;
2640 __ Bnez(TMP, &uncompressed_load);
2641 __ LoadFromOffset(kLoadUnsignedByte,
2642 out,
2643 obj,
2644 data_offset + (const_index << TIMES_1));
2645 __ B(&done);
2646 __ Bind(&uncompressed_load);
2647 __ LoadFromOffset(kLoadUnsignedHalfword,
2648 out,
2649 obj,
2650 data_offset + (const_index << TIMES_2));
2651 __ Bind(&done);
2652 } else {
2653 __ LoadFromOffset(kLoadUnsignedHalfword,
2654 out,
2655 obj,
2656 data_offset + (const_index << TIMES_2),
2657 null_checker);
2658 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002659 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002660 Register index_reg = index.AsRegister<Register>();
2661 if (maybe_compressed_char_at) {
2662 MipsLabel uncompressed_load, done;
2663 __ Bnez(TMP, &uncompressed_load);
2664 __ Addu(TMP, obj, index_reg);
2665 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2666 __ B(&done);
2667 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002668 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002669 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2670 __ Bind(&done);
2671 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002672 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002673 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
2674 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002675 }
2676 break;
2677 }
2678
Alexey Frunze15958152017-02-09 19:08:30 -08002679 case Primitive::kPrimInt: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002680 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002681 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002682 if (index.IsConstant()) {
2683 size_t offset =
2684 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002685 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002686 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002687 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002688 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002689 }
2690 break;
2691 }
2692
Alexey Frunze15958152017-02-09 19:08:30 -08002693 case Primitive::kPrimNot: {
2694 static_assert(
2695 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2696 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2697 // /* HeapReference<Object> */ out =
2698 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2699 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002700 bool temp_needed = index.IsConstant()
2701 ? !kBakerReadBarrierThunksEnableForFields
2702 : !kBakerReadBarrierThunksEnableForArrays;
2703 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002704 // Note that a potential implicit null check is handled in this
2705 // CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002706 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2707 if (index.IsConstant()) {
2708 // Array load with a constant index can be treated as a field load.
2709 size_t offset =
2710 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2711 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2712 out_loc,
2713 obj,
2714 offset,
2715 temp,
2716 /* needs_null_check */ false);
2717 } else {
2718 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2719 out_loc,
2720 obj,
2721 data_offset,
2722 index,
2723 temp,
2724 /* needs_null_check */ false);
2725 }
Alexey Frunze15958152017-02-09 19:08:30 -08002726 } else {
2727 Register out = out_loc.AsRegister<Register>();
2728 if (index.IsConstant()) {
2729 size_t offset =
2730 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2731 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
2732 // If read barriers are enabled, emit read barriers other than
2733 // Baker's using a slow path (and also unpoison the loaded
2734 // reference, if heap poisoning is enabled).
2735 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2736 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002737 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08002738 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
2739 // If read barriers are enabled, emit read barriers other than
2740 // Baker's using a slow path (and also unpoison the loaded
2741 // reference, if heap poisoning is enabled).
2742 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2743 out_loc,
2744 out_loc,
2745 obj_loc,
2746 data_offset,
2747 index);
2748 }
2749 }
2750 break;
2751 }
2752
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002753 case Primitive::kPrimLong: {
Alexey Frunze15958152017-02-09 19:08:30 -08002754 Register out = out_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002755 if (index.IsConstant()) {
2756 size_t offset =
2757 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002758 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002759 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002760 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002761 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002762 }
2763 break;
2764 }
2765
2766 case Primitive::kPrimFloat: {
Alexey Frunze15958152017-02-09 19:08:30 -08002767 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002768 if (index.IsConstant()) {
2769 size_t offset =
2770 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002771 __ LoadSFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002772 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002773 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002774 __ LoadSFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002775 }
2776 break;
2777 }
2778
2779 case Primitive::kPrimDouble: {
Alexey Frunze15958152017-02-09 19:08:30 -08002780 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002781 if (index.IsConstant()) {
2782 size_t offset =
2783 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002784 __ LoadDFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002785 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002786 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002787 __ LoadDFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002788 }
2789 break;
2790 }
2791
2792 case Primitive::kPrimVoid:
2793 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2794 UNREACHABLE();
2795 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002796}
2797
2798void LocationsBuilderMIPS::VisitArrayLength(HArrayLength* instruction) {
2799 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2800 locations->SetInAt(0, Location::RequiresRegister());
2801 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2802}
2803
2804void InstructionCodeGeneratorMIPS::VisitArrayLength(HArrayLength* instruction) {
2805 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002806 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002807 Register obj = locations->InAt(0).AsRegister<Register>();
2808 Register out = locations->Out().AsRegister<Register>();
2809 __ LoadFromOffset(kLoadWord, out, obj, offset);
2810 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002811 // Mask out compression flag from String's array length.
2812 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2813 __ Srl(out, out, 1u);
2814 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002815}
2816
Alexey Frunzef58b2482016-09-02 22:14:06 -07002817Location LocationsBuilderMIPS::RegisterOrZeroConstant(HInstruction* instruction) {
2818 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2819 ? Location::ConstantLocation(instruction->AsConstant())
2820 : Location::RequiresRegister();
2821}
2822
2823Location LocationsBuilderMIPS::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2824 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2825 // We can store a non-zero float or double constant without first loading it into the FPU,
2826 // but we should only prefer this if the constant has a single use.
2827 if (instruction->IsConstant() &&
2828 (instruction->AsConstant()->IsZeroBitPattern() ||
2829 instruction->GetUses().HasExactlyOneElement())) {
2830 return Location::ConstantLocation(instruction->AsConstant());
2831 // Otherwise fall through and require an FPU register for the constant.
2832 }
2833 return Location::RequiresFpuRegister();
2834}
2835
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002836void LocationsBuilderMIPS::VisitArraySet(HArraySet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002837 Primitive::Type value_type = instruction->GetComponentType();
2838
2839 bool needs_write_barrier =
2840 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2841 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2842
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002843 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2844 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002845 may_need_runtime_call_for_type_check ?
2846 LocationSummary::kCallOnSlowPath :
2847 LocationSummary::kNoCall);
2848
2849 locations->SetInAt(0, Location::RequiresRegister());
2850 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2851 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
2852 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002853 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002854 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2855 }
2856 if (needs_write_barrier) {
2857 // Temporary register for the write barrier.
2858 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002859 }
2860}
2861
2862void InstructionCodeGeneratorMIPS::VisitArraySet(HArraySet* instruction) {
2863 LocationSummary* locations = instruction->GetLocations();
2864 Register obj = locations->InAt(0).AsRegister<Register>();
2865 Location index = locations->InAt(1);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002866 Location value_location = locations->InAt(2);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002867 Primitive::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002868 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002869 bool needs_write_barrier =
2870 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002871 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002872 Register base_reg = index.IsConstant() ? obj : TMP;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002873
2874 switch (value_type) {
2875 case Primitive::kPrimBoolean:
2876 case Primitive::kPrimByte: {
2877 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002878 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002879 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002880 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002881 __ Addu(base_reg, obj, index.AsRegister<Register>());
2882 }
2883 if (value_location.IsConstant()) {
2884 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2885 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2886 } else {
2887 Register value = value_location.AsRegister<Register>();
2888 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002889 }
2890 break;
2891 }
2892
2893 case Primitive::kPrimShort:
2894 case Primitive::kPrimChar: {
2895 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002896 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002897 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002898 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002899 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_2, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002900 }
2901 if (value_location.IsConstant()) {
2902 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2903 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2904 } else {
2905 Register value = value_location.AsRegister<Register>();
2906 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002907 }
2908 break;
2909 }
2910
Alexey Frunze15958152017-02-09 19:08:30 -08002911 case Primitive::kPrimInt: {
2912 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2913 if (index.IsConstant()) {
2914 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2915 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002916 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08002917 }
2918 if (value_location.IsConstant()) {
2919 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2920 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2921 } else {
2922 Register value = value_location.AsRegister<Register>();
2923 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2924 }
2925 break;
2926 }
2927
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002928 case Primitive::kPrimNot: {
Alexey Frunze15958152017-02-09 19:08:30 -08002929 if (value_location.IsConstant()) {
2930 // Just setting null.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002931 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002932 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002933 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002934 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002935 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002936 }
Alexey Frunze15958152017-02-09 19:08:30 -08002937 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2938 DCHECK_EQ(value, 0);
2939 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2940 DCHECK(!needs_write_barrier);
2941 DCHECK(!may_need_runtime_call_for_type_check);
2942 break;
2943 }
2944
2945 DCHECK(needs_write_barrier);
2946 Register value = value_location.AsRegister<Register>();
2947 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
2948 Register temp2 = TMP; // Doesn't need to survive slow path.
2949 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2950 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2951 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2952 MipsLabel done;
2953 SlowPathCodeMIPS* slow_path = nullptr;
2954
2955 if (may_need_runtime_call_for_type_check) {
2956 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS(instruction);
2957 codegen_->AddSlowPath(slow_path);
2958 if (instruction->GetValueCanBeNull()) {
2959 MipsLabel non_zero;
2960 __ Bnez(value, &non_zero);
2961 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2962 if (index.IsConstant()) {
2963 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunzec061de12017-02-14 13:27:23 -08002964 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002965 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzec061de12017-02-14 13:27:23 -08002966 }
Alexey Frunze15958152017-02-09 19:08:30 -08002967 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2968 __ B(&done);
2969 __ Bind(&non_zero);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002970 }
Alexey Frunze15958152017-02-09 19:08:30 -08002971
2972 // Note that when read barriers are enabled, the type checks
2973 // are performed without read barriers. This is fine, even in
2974 // the case where a class object is in the from-space after
2975 // the flip, as a comparison involving such a type would not
2976 // produce a false positive; it may of course produce a false
2977 // negative, in which case we would take the ArraySet slow
2978 // path.
2979
2980 // /* HeapReference<Class> */ temp1 = obj->klass_
2981 __ LoadFromOffset(kLoadWord, temp1, obj, class_offset, null_checker);
2982 __ MaybeUnpoisonHeapReference(temp1);
2983
2984 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2985 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
2986 // /* HeapReference<Class> */ temp2 = value->klass_
2987 __ LoadFromOffset(kLoadWord, temp2, value, class_offset);
2988 // If heap poisoning is enabled, no need to unpoison `temp1`
2989 // nor `temp2`, as we are comparing two poisoned references.
2990
2991 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2992 MipsLabel do_put;
2993 __ Beq(temp1, temp2, &do_put);
2994 // If heap poisoning is enabled, the `temp1` reference has
2995 // not been unpoisoned yet; unpoison it now.
2996 __ MaybeUnpoisonHeapReference(temp1);
2997
2998 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2999 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
3000 // If heap poisoning is enabled, no need to unpoison
3001 // `temp1`, as we are comparing against null below.
3002 __ Bnez(temp1, slow_path->GetEntryLabel());
3003 __ Bind(&do_put);
3004 } else {
3005 __ Bne(temp1, temp2, slow_path->GetEntryLabel());
3006 }
3007 }
3008
3009 Register source = value;
3010 if (kPoisonHeapReferences) {
3011 // Note that in the case where `value` is a null reference,
3012 // we do not enter this block, as a null reference does not
3013 // need poisoning.
3014 __ Move(temp1, value);
3015 __ PoisonHeapReference(temp1);
3016 source = temp1;
3017 }
3018
3019 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3020 if (index.IsConstant()) {
3021 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003022 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003023 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08003024 }
3025 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
3026
3027 if (!may_need_runtime_call_for_type_check) {
3028 codegen_->MaybeRecordImplicitNullCheck(instruction);
3029 }
3030
3031 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
3032
3033 if (done.IsLinked()) {
3034 __ Bind(&done);
3035 }
3036
3037 if (slow_path != nullptr) {
3038 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003039 }
3040 break;
3041 }
3042
3043 case Primitive::kPrimLong: {
3044 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003045 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003046 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003047 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003048 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003049 }
3050 if (value_location.IsConstant()) {
3051 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
3052 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
3053 } else {
3054 Register value = value_location.AsRegisterPairLow<Register>();
3055 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003056 }
3057 break;
3058 }
3059
3060 case Primitive::kPrimFloat: {
3061 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003062 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003063 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003064 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003065 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003066 }
3067 if (value_location.IsConstant()) {
3068 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
3069 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
3070 } else {
3071 FRegister value = value_location.AsFpuRegister<FRegister>();
3072 __ StoreSToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003073 }
3074 break;
3075 }
3076
3077 case Primitive::kPrimDouble: {
3078 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003079 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003080 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003081 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003082 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003083 }
3084 if (value_location.IsConstant()) {
3085 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
3086 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
3087 } else {
3088 FRegister value = value_location.AsFpuRegister<FRegister>();
3089 __ StoreDToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003090 }
3091 break;
3092 }
3093
3094 case Primitive::kPrimVoid:
3095 LOG(FATAL) << "Unreachable type " << instruction->GetType();
3096 UNREACHABLE();
3097 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003098}
3099
3100void LocationsBuilderMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003101 RegisterSet caller_saves = RegisterSet::Empty();
3102 InvokeRuntimeCallingConvention calling_convention;
3103 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3104 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3105 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003106 locations->SetInAt(0, Location::RequiresRegister());
3107 locations->SetInAt(1, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003108}
3109
3110void InstructionCodeGeneratorMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
3111 LocationSummary* locations = instruction->GetLocations();
3112 BoundsCheckSlowPathMIPS* slow_path =
3113 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS(instruction);
3114 codegen_->AddSlowPath(slow_path);
3115
3116 Register index = locations->InAt(0).AsRegister<Register>();
3117 Register length = locations->InAt(1).AsRegister<Register>();
3118
3119 // length is limited by the maximum positive signed 32-bit integer.
3120 // Unsigned comparison of length and index checks for index < 0
3121 // and for length <= index simultaneously.
3122 __ Bgeu(index, length, slow_path->GetEntryLabel());
3123}
3124
Alexey Frunze15958152017-02-09 19:08:30 -08003125// Temp is used for read barrier.
3126static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3127 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07003128 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08003129 (kUseBakerReadBarrier ||
3130 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3131 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3132 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3133 return 1;
3134 }
3135 return 0;
3136}
3137
3138// Extra temp is used for read barrier.
3139static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3140 return 1 + NumberOfInstanceOfTemps(type_check_kind);
3141}
3142
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003143void LocationsBuilderMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003144 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3145 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3146
3147 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3148 switch (type_check_kind) {
3149 case TypeCheckKind::kExactCheck:
3150 case TypeCheckKind::kAbstractClassCheck:
3151 case TypeCheckKind::kClassHierarchyCheck:
3152 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08003153 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003154 ? LocationSummary::kCallOnSlowPath
3155 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
3156 break;
3157 case TypeCheckKind::kArrayCheck:
3158 case TypeCheckKind::kUnresolvedCheck:
3159 case TypeCheckKind::kInterfaceCheck:
3160 call_kind = LocationSummary::kCallOnSlowPath;
3161 break;
3162 }
3163
3164 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003165 locations->SetInAt(0, Location::RequiresRegister());
3166 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08003167 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003168}
3169
3170void InstructionCodeGeneratorMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003171 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003172 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08003173 Location obj_loc = locations->InAt(0);
3174 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003175 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08003176 Location temp_loc = locations->GetTemp(0);
3177 Register temp = temp_loc.AsRegister<Register>();
3178 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3179 DCHECK_LE(num_temps, 2u);
3180 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003181 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3182 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3183 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3184 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3185 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3186 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3187 const uint32_t object_array_data_offset =
3188 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
3189 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003190
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003191 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
3192 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
3193 // read barriers is done for performance and code size reasons.
3194 bool is_type_check_slow_path_fatal = false;
3195 if (!kEmitCompilerReadBarrier) {
3196 is_type_check_slow_path_fatal =
3197 (type_check_kind == TypeCheckKind::kExactCheck ||
3198 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3199 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3200 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3201 !instruction->CanThrowIntoCatchBlock();
3202 }
3203 SlowPathCodeMIPS* slow_path =
3204 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
3205 is_type_check_slow_path_fatal);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003206 codegen_->AddSlowPath(slow_path);
3207
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003208 // Avoid this check if we know `obj` is not null.
3209 if (instruction->MustDoNullCheck()) {
3210 __ Beqz(obj, &done);
3211 }
3212
3213 switch (type_check_kind) {
3214 case TypeCheckKind::kExactCheck:
3215 case TypeCheckKind::kArrayCheck: {
3216 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003217 GenerateReferenceLoadTwoRegisters(instruction,
3218 temp_loc,
3219 obj_loc,
3220 class_offset,
3221 maybe_temp2_loc,
3222 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003223 // Jump to slow path for throwing the exception or doing a
3224 // more involved array check.
3225 __ Bne(temp, cls, slow_path->GetEntryLabel());
3226 break;
3227 }
3228
3229 case TypeCheckKind::kAbstractClassCheck: {
3230 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003231 GenerateReferenceLoadTwoRegisters(instruction,
3232 temp_loc,
3233 obj_loc,
3234 class_offset,
3235 maybe_temp2_loc,
3236 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003237 // If the class is abstract, we eagerly fetch the super class of the
3238 // object to avoid doing a comparison we know will fail.
3239 MipsLabel loop;
3240 __ Bind(&loop);
3241 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003242 GenerateReferenceLoadOneRegister(instruction,
3243 temp_loc,
3244 super_offset,
3245 maybe_temp2_loc,
3246 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003247 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3248 // exception.
3249 __ Beqz(temp, slow_path->GetEntryLabel());
3250 // Otherwise, compare the classes.
3251 __ Bne(temp, cls, &loop);
3252 break;
3253 }
3254
3255 case TypeCheckKind::kClassHierarchyCheck: {
3256 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003257 GenerateReferenceLoadTwoRegisters(instruction,
3258 temp_loc,
3259 obj_loc,
3260 class_offset,
3261 maybe_temp2_loc,
3262 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003263 // Walk over the class hierarchy to find a match.
3264 MipsLabel loop;
3265 __ Bind(&loop);
3266 __ Beq(temp, cls, &done);
3267 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003268 GenerateReferenceLoadOneRegister(instruction,
3269 temp_loc,
3270 super_offset,
3271 maybe_temp2_loc,
3272 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003273 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3274 // exception. Otherwise, jump to the beginning of the loop.
3275 __ Bnez(temp, &loop);
3276 __ B(slow_path->GetEntryLabel());
3277 break;
3278 }
3279
3280 case TypeCheckKind::kArrayObjectCheck: {
3281 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003282 GenerateReferenceLoadTwoRegisters(instruction,
3283 temp_loc,
3284 obj_loc,
3285 class_offset,
3286 maybe_temp2_loc,
3287 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003288 // Do an exact check.
3289 __ Beq(temp, cls, &done);
3290 // Otherwise, we need to check that the object's class is a non-primitive array.
3291 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08003292 GenerateReferenceLoadOneRegister(instruction,
3293 temp_loc,
3294 component_offset,
3295 maybe_temp2_loc,
3296 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003297 // If the component type is null, jump to the slow path to throw the exception.
3298 __ Beqz(temp, slow_path->GetEntryLabel());
3299 // Otherwise, the object is indeed an array, further check that this component
3300 // type is not a primitive type.
3301 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
3302 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3303 __ Bnez(temp, slow_path->GetEntryLabel());
3304 break;
3305 }
3306
3307 case TypeCheckKind::kUnresolvedCheck:
3308 // We always go into the type check slow path for the unresolved check case.
3309 // We cannot directly call the CheckCast runtime entry point
3310 // without resorting to a type checking slow path here (i.e. by
3311 // calling InvokeRuntime directly), as it would require to
3312 // assign fixed registers for the inputs of this HInstanceOf
3313 // instruction (following the runtime calling convention), which
3314 // might be cluttered by the potential first read barrier
3315 // emission at the beginning of this method.
3316 __ B(slow_path->GetEntryLabel());
3317 break;
3318
3319 case TypeCheckKind::kInterfaceCheck: {
3320 // Avoid read barriers to improve performance of the fast path. We can not get false
3321 // positives by doing this.
3322 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003323 GenerateReferenceLoadTwoRegisters(instruction,
3324 temp_loc,
3325 obj_loc,
3326 class_offset,
3327 maybe_temp2_loc,
3328 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003329 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003330 GenerateReferenceLoadTwoRegisters(instruction,
3331 temp_loc,
3332 temp_loc,
3333 iftable_offset,
3334 maybe_temp2_loc,
3335 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003336 // Iftable is never null.
3337 __ Lw(TMP, temp, array_length_offset);
3338 // Loop through the iftable and check if any class matches.
3339 MipsLabel loop;
3340 __ Bind(&loop);
3341 __ Addiu(temp, temp, 2 * kHeapReferenceSize); // Possibly in delay slot on R2.
3342 __ Beqz(TMP, slow_path->GetEntryLabel());
3343 __ Lw(AT, temp, object_array_data_offset - 2 * kHeapReferenceSize);
3344 __ MaybeUnpoisonHeapReference(AT);
3345 // Go to next interface.
3346 __ Addiu(TMP, TMP, -2);
3347 // Compare the classes and continue the loop if they do not match.
3348 __ Bne(AT, cls, &loop);
3349 break;
3350 }
3351 }
3352
3353 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003354 __ Bind(slow_path->GetExitLabel());
3355}
3356
3357void LocationsBuilderMIPS::VisitClinitCheck(HClinitCheck* check) {
3358 LocationSummary* locations =
3359 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3360 locations->SetInAt(0, Location::RequiresRegister());
3361 if (check->HasUses()) {
3362 locations->SetOut(Location::SameAsFirstInput());
3363 }
3364}
3365
3366void InstructionCodeGeneratorMIPS::VisitClinitCheck(HClinitCheck* check) {
3367 // We assume the class is not null.
3368 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
3369 check->GetLoadClass(),
3370 check,
3371 check->GetDexPc(),
3372 true);
3373 codegen_->AddSlowPath(slow_path);
3374 GenerateClassInitializationCheck(slow_path,
3375 check->GetLocations()->InAt(0).AsRegister<Register>());
3376}
3377
3378void LocationsBuilderMIPS::VisitCompare(HCompare* compare) {
3379 Primitive::Type in_type = compare->InputAt(0)->GetType();
3380
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003381 LocationSummary* locations =
3382 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003383
3384 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003385 case Primitive::kPrimBoolean:
3386 case Primitive::kPrimByte:
3387 case Primitive::kPrimShort:
3388 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003389 case Primitive::kPrimInt:
Alexey Frunzee7697712016-09-15 21:37:49 -07003390 locations->SetInAt(0, Location::RequiresRegister());
3391 locations->SetInAt(1, Location::RequiresRegister());
3392 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3393 break;
3394
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003395 case Primitive::kPrimLong:
3396 locations->SetInAt(0, Location::RequiresRegister());
3397 locations->SetInAt(1, Location::RequiresRegister());
3398 // Output overlaps because it is written before doing the low comparison.
3399 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3400 break;
3401
3402 case Primitive::kPrimFloat:
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003403 case Primitive::kPrimDouble:
3404 locations->SetInAt(0, Location::RequiresFpuRegister());
3405 locations->SetInAt(1, Location::RequiresFpuRegister());
3406 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003407 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003408
3409 default:
3410 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3411 }
3412}
3413
3414void InstructionCodeGeneratorMIPS::VisitCompare(HCompare* instruction) {
3415 LocationSummary* locations = instruction->GetLocations();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003416 Register res = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003417 Primitive::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003418 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003419
3420 // 0 if: left == right
3421 // 1 if: left > right
3422 // -1 if: left < right
3423 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003424 case Primitive::kPrimBoolean:
3425 case Primitive::kPrimByte:
3426 case Primitive::kPrimShort:
3427 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003428 case Primitive::kPrimInt: {
3429 Register lhs = locations->InAt(0).AsRegister<Register>();
3430 Register rhs = locations->InAt(1).AsRegister<Register>();
3431 __ Slt(TMP, lhs, rhs);
3432 __ Slt(res, rhs, lhs);
3433 __ Subu(res, res, TMP);
3434 break;
3435 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003436 case Primitive::kPrimLong: {
3437 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003438 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
3439 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
3440 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
3441 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
3442 // TODO: more efficient (direct) comparison with a constant.
3443 __ Slt(TMP, lhs_high, rhs_high);
3444 __ Slt(AT, rhs_high, lhs_high); // Inverted: is actually gt.
3445 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3446 __ Bnez(res, &done); // If we compared ==, check if lower bits are also equal.
3447 __ Sltu(TMP, lhs_low, rhs_low);
3448 __ Sltu(AT, rhs_low, lhs_low); // Inverted: is actually gt.
3449 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3450 __ Bind(&done);
3451 break;
3452 }
3453
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003454 case Primitive::kPrimFloat: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003455 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003456 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3457 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3458 MipsLabel done;
3459 if (isR6) {
3460 __ CmpEqS(FTMP, lhs, rhs);
3461 __ LoadConst32(res, 0);
3462 __ Bc1nez(FTMP, &done);
3463 if (gt_bias) {
3464 __ CmpLtS(FTMP, lhs, rhs);
3465 __ LoadConst32(res, -1);
3466 __ Bc1nez(FTMP, &done);
3467 __ LoadConst32(res, 1);
3468 } else {
3469 __ CmpLtS(FTMP, rhs, lhs);
3470 __ LoadConst32(res, 1);
3471 __ Bc1nez(FTMP, &done);
3472 __ LoadConst32(res, -1);
3473 }
3474 } else {
3475 if (gt_bias) {
3476 __ ColtS(0, lhs, rhs);
3477 __ LoadConst32(res, -1);
3478 __ Bc1t(0, &done);
3479 __ CeqS(0, lhs, rhs);
3480 __ LoadConst32(res, 1);
3481 __ Movt(res, ZERO, 0);
3482 } else {
3483 __ ColtS(0, rhs, lhs);
3484 __ LoadConst32(res, 1);
3485 __ Bc1t(0, &done);
3486 __ CeqS(0, lhs, rhs);
3487 __ LoadConst32(res, -1);
3488 __ Movt(res, ZERO, 0);
3489 }
3490 }
3491 __ Bind(&done);
3492 break;
3493 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003494 case Primitive::kPrimDouble: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003495 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003496 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3497 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3498 MipsLabel done;
3499 if (isR6) {
3500 __ CmpEqD(FTMP, lhs, rhs);
3501 __ LoadConst32(res, 0);
3502 __ Bc1nez(FTMP, &done);
3503 if (gt_bias) {
3504 __ CmpLtD(FTMP, lhs, rhs);
3505 __ LoadConst32(res, -1);
3506 __ Bc1nez(FTMP, &done);
3507 __ LoadConst32(res, 1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003508 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003509 __ CmpLtD(FTMP, rhs, lhs);
3510 __ LoadConst32(res, 1);
3511 __ Bc1nez(FTMP, &done);
3512 __ LoadConst32(res, -1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003513 }
3514 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003515 if (gt_bias) {
3516 __ ColtD(0, lhs, rhs);
3517 __ LoadConst32(res, -1);
3518 __ Bc1t(0, &done);
3519 __ CeqD(0, lhs, rhs);
3520 __ LoadConst32(res, 1);
3521 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003522 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003523 __ ColtD(0, rhs, lhs);
3524 __ LoadConst32(res, 1);
3525 __ Bc1t(0, &done);
3526 __ CeqD(0, lhs, rhs);
3527 __ LoadConst32(res, -1);
3528 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003529 }
3530 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003531 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003532 break;
3533 }
3534
3535 default:
3536 LOG(FATAL) << "Unimplemented compare type " << in_type;
3537 }
3538}
3539
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003540void LocationsBuilderMIPS::HandleCondition(HCondition* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003541 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003542 switch (instruction->InputAt(0)->GetType()) {
3543 default:
3544 case Primitive::kPrimLong:
3545 locations->SetInAt(0, Location::RequiresRegister());
3546 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3547 break;
3548
3549 case Primitive::kPrimFloat:
3550 case Primitive::kPrimDouble:
3551 locations->SetInAt(0, Location::RequiresFpuRegister());
3552 locations->SetInAt(1, Location::RequiresFpuRegister());
3553 break;
3554 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003555 if (!instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003556 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3557 }
3558}
3559
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003560void InstructionCodeGeneratorMIPS::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003561 if (instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003562 return;
3563 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003564
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003565 Primitive::Type type = instruction->InputAt(0)->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003566 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003567
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003568 switch (type) {
3569 default:
3570 // Integer case.
3571 GenerateIntCompare(instruction->GetCondition(), locations);
3572 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003573
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003574 case Primitive::kPrimLong:
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01003575 GenerateLongCompare(instruction->GetCondition(), locations);
3576 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003577
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003578 case Primitive::kPrimFloat:
3579 case Primitive::kPrimDouble:
Alexey Frunze2ddb7172016-09-06 17:04:55 -07003580 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3581 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003582 }
3583}
3584
Alexey Frunze7e99e052015-11-24 19:28:01 -08003585void InstructionCodeGeneratorMIPS::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3586 DCHECK(instruction->IsDiv() || instruction->IsRem());
3587 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3588
3589 LocationSummary* locations = instruction->GetLocations();
3590 Location second = locations->InAt(1);
3591 DCHECK(second.IsConstant());
3592
3593 Register out = locations->Out().AsRegister<Register>();
3594 Register dividend = locations->InAt(0).AsRegister<Register>();
3595 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3596 DCHECK(imm == 1 || imm == -1);
3597
3598 if (instruction->IsRem()) {
3599 __ Move(out, ZERO);
3600 } else {
3601 if (imm == -1) {
3602 __ Subu(out, ZERO, dividend);
3603 } else if (out != dividend) {
3604 __ Move(out, dividend);
3605 }
3606 }
3607}
3608
3609void InstructionCodeGeneratorMIPS::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3610 DCHECK(instruction->IsDiv() || instruction->IsRem());
3611 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3612
3613 LocationSummary* locations = instruction->GetLocations();
3614 Location second = locations->InAt(1);
3615 DCHECK(second.IsConstant());
3616
3617 Register out = locations->Out().AsRegister<Register>();
3618 Register dividend = locations->InAt(0).AsRegister<Register>();
3619 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003620 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Alexey Frunze7e99e052015-11-24 19:28:01 -08003621 int ctz_imm = CTZ(abs_imm);
3622
3623 if (instruction->IsDiv()) {
3624 if (ctz_imm == 1) {
3625 // Fast path for division by +/-2, which is very common.
3626 __ Srl(TMP, dividend, 31);
3627 } else {
3628 __ Sra(TMP, dividend, 31);
3629 __ Srl(TMP, TMP, 32 - ctz_imm);
3630 }
3631 __ Addu(out, dividend, TMP);
3632 __ Sra(out, out, ctz_imm);
3633 if (imm < 0) {
3634 __ Subu(out, ZERO, out);
3635 }
3636 } else {
3637 if (ctz_imm == 1) {
3638 // Fast path for modulo +/-2, which is very common.
3639 __ Sra(TMP, dividend, 31);
3640 __ Subu(out, dividend, TMP);
3641 __ Andi(out, out, 1);
3642 __ Addu(out, out, TMP);
3643 } else {
3644 __ Sra(TMP, dividend, 31);
3645 __ Srl(TMP, TMP, 32 - ctz_imm);
3646 __ Addu(out, dividend, TMP);
3647 if (IsUint<16>(abs_imm - 1)) {
3648 __ Andi(out, out, abs_imm - 1);
3649 } else {
3650 __ Sll(out, out, 32 - ctz_imm);
3651 __ Srl(out, out, 32 - ctz_imm);
3652 }
3653 __ Subu(out, out, TMP);
3654 }
3655 }
3656}
3657
3658void InstructionCodeGeneratorMIPS::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3659 DCHECK(instruction->IsDiv() || instruction->IsRem());
3660 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3661
3662 LocationSummary* locations = instruction->GetLocations();
3663 Location second = locations->InAt(1);
3664 DCHECK(second.IsConstant());
3665
3666 Register out = locations->Out().AsRegister<Register>();
3667 Register dividend = locations->InAt(0).AsRegister<Register>();
3668 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3669
3670 int64_t magic;
3671 int shift;
3672 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3673
3674 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3675
3676 __ LoadConst32(TMP, magic);
3677 if (isR6) {
3678 __ MuhR6(TMP, dividend, TMP);
3679 } else {
3680 __ MultR2(dividend, TMP);
3681 __ Mfhi(TMP);
3682 }
3683 if (imm > 0 && magic < 0) {
3684 __ Addu(TMP, TMP, dividend);
3685 } else if (imm < 0 && magic > 0) {
3686 __ Subu(TMP, TMP, dividend);
3687 }
3688
3689 if (shift != 0) {
3690 __ Sra(TMP, TMP, shift);
3691 }
3692
3693 if (instruction->IsDiv()) {
3694 __ Sra(out, TMP, 31);
3695 __ Subu(out, TMP, out);
3696 } else {
3697 __ Sra(AT, TMP, 31);
3698 __ Subu(AT, TMP, AT);
3699 __ LoadConst32(TMP, imm);
3700 if (isR6) {
3701 __ MulR6(TMP, AT, TMP);
3702 } else {
3703 __ MulR2(TMP, AT, TMP);
3704 }
3705 __ Subu(out, dividend, TMP);
3706 }
3707}
3708
3709void InstructionCodeGeneratorMIPS::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3710 DCHECK(instruction->IsDiv() || instruction->IsRem());
3711 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3712
3713 LocationSummary* locations = instruction->GetLocations();
3714 Register out = locations->Out().AsRegister<Register>();
3715 Location second = locations->InAt(1);
3716
3717 if (second.IsConstant()) {
3718 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3719 if (imm == 0) {
3720 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3721 } else if (imm == 1 || imm == -1) {
3722 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003723 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003724 DivRemByPowerOfTwo(instruction);
3725 } else {
3726 DCHECK(imm <= -2 || imm >= 2);
3727 GenerateDivRemWithAnyConstant(instruction);
3728 }
3729 } else {
3730 Register dividend = locations->InAt(0).AsRegister<Register>();
3731 Register divisor = second.AsRegister<Register>();
3732 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3733 if (instruction->IsDiv()) {
3734 if (isR6) {
3735 __ DivR6(out, dividend, divisor);
3736 } else {
3737 __ DivR2(out, dividend, divisor);
3738 }
3739 } else {
3740 if (isR6) {
3741 __ ModR6(out, dividend, divisor);
3742 } else {
3743 __ ModR2(out, dividend, divisor);
3744 }
3745 }
3746 }
3747}
3748
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003749void LocationsBuilderMIPS::VisitDiv(HDiv* div) {
3750 Primitive::Type type = div->GetResultType();
3751 LocationSummary::CallKind call_kind = (type == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003752 ? LocationSummary::kCallOnMainOnly
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003753 : LocationSummary::kNoCall;
3754
3755 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
3756
3757 switch (type) {
3758 case Primitive::kPrimInt:
3759 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08003760 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003761 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3762 break;
3763
3764 case Primitive::kPrimLong: {
3765 InvokeRuntimeCallingConvention calling_convention;
3766 locations->SetInAt(0, Location::RegisterPairLocation(
3767 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3768 locations->SetInAt(1, Location::RegisterPairLocation(
3769 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3770 locations->SetOut(calling_convention.GetReturnLocation(type));
3771 break;
3772 }
3773
3774 case Primitive::kPrimFloat:
3775 case Primitive::kPrimDouble:
3776 locations->SetInAt(0, Location::RequiresFpuRegister());
3777 locations->SetInAt(1, Location::RequiresFpuRegister());
3778 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3779 break;
3780
3781 default:
3782 LOG(FATAL) << "Unexpected div type " << type;
3783 }
3784}
3785
3786void InstructionCodeGeneratorMIPS::VisitDiv(HDiv* instruction) {
3787 Primitive::Type type = instruction->GetType();
3788 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003789
3790 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003791 case Primitive::kPrimInt:
3792 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003793 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003794 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01003795 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003796 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
3797 break;
3798 }
3799 case Primitive::kPrimFloat:
3800 case Primitive::kPrimDouble: {
3801 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
3802 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3803 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3804 if (type == Primitive::kPrimFloat) {
3805 __ DivS(dst, lhs, rhs);
3806 } else {
3807 __ DivD(dst, lhs, rhs);
3808 }
3809 break;
3810 }
3811 default:
3812 LOG(FATAL) << "Unexpected div type " << type;
3813 }
3814}
3815
3816void LocationsBuilderMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003817 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003818 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003819}
3820
3821void InstructionCodeGeneratorMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3822 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS(instruction);
3823 codegen_->AddSlowPath(slow_path);
3824 Location value = instruction->GetLocations()->InAt(0);
3825 Primitive::Type type = instruction->GetType();
3826
3827 switch (type) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003828 case Primitive::kPrimBoolean:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003829 case Primitive::kPrimByte:
3830 case Primitive::kPrimChar:
3831 case Primitive::kPrimShort:
3832 case Primitive::kPrimInt: {
3833 if (value.IsConstant()) {
3834 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3835 __ B(slow_path->GetEntryLabel());
3836 } else {
3837 // A division by a non-null constant is valid. We don't need to perform
3838 // any check, so simply fall through.
3839 }
3840 } else {
3841 DCHECK(value.IsRegister()) << value;
3842 __ Beqz(value.AsRegister<Register>(), slow_path->GetEntryLabel());
3843 }
3844 break;
3845 }
3846 case Primitive::kPrimLong: {
3847 if (value.IsConstant()) {
3848 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3849 __ B(slow_path->GetEntryLabel());
3850 } else {
3851 // A division by a non-null constant is valid. We don't need to perform
3852 // any check, so simply fall through.
3853 }
3854 } else {
3855 DCHECK(value.IsRegisterPair()) << value;
3856 __ Or(TMP, value.AsRegisterPairHigh<Register>(), value.AsRegisterPairLow<Register>());
3857 __ Beqz(TMP, slow_path->GetEntryLabel());
3858 }
3859 break;
3860 }
3861 default:
3862 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
3863 }
3864}
3865
3866void LocationsBuilderMIPS::VisitDoubleConstant(HDoubleConstant* constant) {
3867 LocationSummary* locations =
3868 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3869 locations->SetOut(Location::ConstantLocation(constant));
3870}
3871
3872void InstructionCodeGeneratorMIPS::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3873 // Will be generated at use site.
3874}
3875
3876void LocationsBuilderMIPS::VisitExit(HExit* exit) {
3877 exit->SetLocations(nullptr);
3878}
3879
3880void InstructionCodeGeneratorMIPS::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3881}
3882
3883void LocationsBuilderMIPS::VisitFloatConstant(HFloatConstant* constant) {
3884 LocationSummary* locations =
3885 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3886 locations->SetOut(Location::ConstantLocation(constant));
3887}
3888
3889void InstructionCodeGeneratorMIPS::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3890 // Will be generated at use site.
3891}
3892
3893void LocationsBuilderMIPS::VisitGoto(HGoto* got) {
3894 got->SetLocations(nullptr);
3895}
3896
3897void InstructionCodeGeneratorMIPS::HandleGoto(HInstruction* got, HBasicBlock* successor) {
3898 DCHECK(!successor->IsExitBlock());
3899 HBasicBlock* block = got->GetBlock();
3900 HInstruction* previous = got->GetPrevious();
3901 HLoopInformation* info = block->GetLoopInformation();
3902
3903 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3904 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3905 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3906 return;
3907 }
3908 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3909 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3910 }
3911 if (!codegen_->GoesToNextBlock(block, successor)) {
3912 __ B(codegen_->GetLabelOf(successor));
3913 }
3914}
3915
3916void InstructionCodeGeneratorMIPS::VisitGoto(HGoto* got) {
3917 HandleGoto(got, got->GetSuccessor());
3918}
3919
3920void LocationsBuilderMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3921 try_boundary->SetLocations(nullptr);
3922}
3923
3924void InstructionCodeGeneratorMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3925 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3926 if (!successor->IsExitBlock()) {
3927 HandleGoto(try_boundary, successor);
3928 }
3929}
3930
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003931void InstructionCodeGeneratorMIPS::GenerateIntCompare(IfCondition cond,
3932 LocationSummary* locations) {
3933 Register dst = locations->Out().AsRegister<Register>();
3934 Register lhs = locations->InAt(0).AsRegister<Register>();
3935 Location rhs_location = locations->InAt(1);
3936 Register rhs_reg = ZERO;
3937 int64_t rhs_imm = 0;
3938 bool use_imm = rhs_location.IsConstant();
3939 if (use_imm) {
3940 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3941 } else {
3942 rhs_reg = rhs_location.AsRegister<Register>();
3943 }
3944
3945 switch (cond) {
3946 case kCondEQ:
3947 case kCondNE:
Alexey Frunzee7697712016-09-15 21:37:49 -07003948 if (use_imm && IsInt<16>(-rhs_imm)) {
3949 if (rhs_imm == 0) {
3950 if (cond == kCondEQ) {
3951 __ Sltiu(dst, lhs, 1);
3952 } else {
3953 __ Sltu(dst, ZERO, lhs);
3954 }
3955 } else {
3956 __ Addiu(dst, lhs, -rhs_imm);
3957 if (cond == kCondEQ) {
3958 __ Sltiu(dst, dst, 1);
3959 } else {
3960 __ Sltu(dst, ZERO, dst);
3961 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003962 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003963 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07003964 if (use_imm && IsUint<16>(rhs_imm)) {
3965 __ Xori(dst, lhs, rhs_imm);
3966 } else {
3967 if (use_imm) {
3968 rhs_reg = TMP;
3969 __ LoadConst32(rhs_reg, rhs_imm);
3970 }
3971 __ Xor(dst, lhs, rhs_reg);
3972 }
3973 if (cond == kCondEQ) {
3974 __ Sltiu(dst, dst, 1);
3975 } else {
3976 __ Sltu(dst, ZERO, dst);
3977 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003978 }
3979 break;
3980
3981 case kCondLT:
3982 case kCondGE:
3983 if (use_imm && IsInt<16>(rhs_imm)) {
3984 __ Slti(dst, lhs, rhs_imm);
3985 } else {
3986 if (use_imm) {
3987 rhs_reg = TMP;
3988 __ LoadConst32(rhs_reg, rhs_imm);
3989 }
3990 __ Slt(dst, lhs, rhs_reg);
3991 }
3992 if (cond == kCondGE) {
3993 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3994 // only the slt instruction but no sge.
3995 __ Xori(dst, dst, 1);
3996 }
3997 break;
3998
3999 case kCondLE:
4000 case kCondGT:
4001 if (use_imm && IsInt<16>(rhs_imm + 1)) {
4002 // Simulate lhs <= rhs via lhs < rhs + 1.
4003 __ Slti(dst, lhs, rhs_imm + 1);
4004 if (cond == kCondGT) {
4005 // Simulate lhs > rhs via !(lhs <= rhs) since there's
4006 // only the slti instruction but no sgti.
4007 __ Xori(dst, dst, 1);
4008 }
4009 } else {
4010 if (use_imm) {
4011 rhs_reg = TMP;
4012 __ LoadConst32(rhs_reg, rhs_imm);
4013 }
4014 __ Slt(dst, rhs_reg, lhs);
4015 if (cond == kCondLE) {
4016 // Simulate lhs <= rhs via !(rhs < lhs) since there's
4017 // only the slt instruction but no sle.
4018 __ Xori(dst, dst, 1);
4019 }
4020 }
4021 break;
4022
4023 case kCondB:
4024 case kCondAE:
4025 if (use_imm && IsInt<16>(rhs_imm)) {
4026 // Sltiu sign-extends its 16-bit immediate operand before
4027 // the comparison and thus lets us compare directly with
4028 // unsigned values in the ranges [0, 0x7fff] and
4029 // [0xffff8000, 0xffffffff].
4030 __ Sltiu(dst, lhs, rhs_imm);
4031 } else {
4032 if (use_imm) {
4033 rhs_reg = TMP;
4034 __ LoadConst32(rhs_reg, rhs_imm);
4035 }
4036 __ Sltu(dst, lhs, rhs_reg);
4037 }
4038 if (cond == kCondAE) {
4039 // Simulate lhs >= rhs via !(lhs < rhs) since there's
4040 // only the sltu instruction but no sgeu.
4041 __ Xori(dst, dst, 1);
4042 }
4043 break;
4044
4045 case kCondBE:
4046 case kCondA:
4047 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4048 // Simulate lhs <= rhs via lhs < rhs + 1.
4049 // Note that this only works if rhs + 1 does not overflow
4050 // to 0, hence the check above.
4051 // Sltiu sign-extends its 16-bit immediate operand before
4052 // the comparison and thus lets us compare directly with
4053 // unsigned values in the ranges [0, 0x7fff] and
4054 // [0xffff8000, 0xffffffff].
4055 __ Sltiu(dst, lhs, rhs_imm + 1);
4056 if (cond == kCondA) {
4057 // Simulate lhs > rhs via !(lhs <= rhs) since there's
4058 // only the sltiu instruction but no sgtiu.
4059 __ Xori(dst, dst, 1);
4060 }
4061 } else {
4062 if (use_imm) {
4063 rhs_reg = TMP;
4064 __ LoadConst32(rhs_reg, rhs_imm);
4065 }
4066 __ Sltu(dst, rhs_reg, lhs);
4067 if (cond == kCondBE) {
4068 // Simulate lhs <= rhs via !(rhs < lhs) since there's
4069 // only the sltu instruction but no sleu.
4070 __ Xori(dst, dst, 1);
4071 }
4072 }
4073 break;
4074 }
4075}
4076
Alexey Frunze674b9ee2016-09-20 14:54:15 -07004077bool InstructionCodeGeneratorMIPS::MaterializeIntCompare(IfCondition cond,
4078 LocationSummary* input_locations,
4079 Register dst) {
4080 Register lhs = input_locations->InAt(0).AsRegister<Register>();
4081 Location rhs_location = input_locations->InAt(1);
4082 Register rhs_reg = ZERO;
4083 int64_t rhs_imm = 0;
4084 bool use_imm = rhs_location.IsConstant();
4085 if (use_imm) {
4086 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4087 } else {
4088 rhs_reg = rhs_location.AsRegister<Register>();
4089 }
4090
4091 switch (cond) {
4092 case kCondEQ:
4093 case kCondNE:
4094 if (use_imm && IsInt<16>(-rhs_imm)) {
4095 __ Addiu(dst, lhs, -rhs_imm);
4096 } else if (use_imm && IsUint<16>(rhs_imm)) {
4097 __ Xori(dst, lhs, rhs_imm);
4098 } else {
4099 if (use_imm) {
4100 rhs_reg = TMP;
4101 __ LoadConst32(rhs_reg, rhs_imm);
4102 }
4103 __ Xor(dst, lhs, rhs_reg);
4104 }
4105 return (cond == kCondEQ);
4106
4107 case kCondLT:
4108 case kCondGE:
4109 if (use_imm && IsInt<16>(rhs_imm)) {
4110 __ Slti(dst, lhs, rhs_imm);
4111 } else {
4112 if (use_imm) {
4113 rhs_reg = TMP;
4114 __ LoadConst32(rhs_reg, rhs_imm);
4115 }
4116 __ Slt(dst, lhs, rhs_reg);
4117 }
4118 return (cond == kCondGE);
4119
4120 case kCondLE:
4121 case kCondGT:
4122 if (use_imm && IsInt<16>(rhs_imm + 1)) {
4123 // Simulate lhs <= rhs via lhs < rhs + 1.
4124 __ Slti(dst, lhs, rhs_imm + 1);
4125 return (cond == kCondGT);
4126 } else {
4127 if (use_imm) {
4128 rhs_reg = TMP;
4129 __ LoadConst32(rhs_reg, rhs_imm);
4130 }
4131 __ Slt(dst, rhs_reg, lhs);
4132 return (cond == kCondLE);
4133 }
4134
4135 case kCondB:
4136 case kCondAE:
4137 if (use_imm && IsInt<16>(rhs_imm)) {
4138 // Sltiu sign-extends its 16-bit immediate operand before
4139 // the comparison and thus lets us compare directly with
4140 // unsigned values in the ranges [0, 0x7fff] and
4141 // [0xffff8000, 0xffffffff].
4142 __ Sltiu(dst, lhs, rhs_imm);
4143 } else {
4144 if (use_imm) {
4145 rhs_reg = TMP;
4146 __ LoadConst32(rhs_reg, rhs_imm);
4147 }
4148 __ Sltu(dst, lhs, rhs_reg);
4149 }
4150 return (cond == kCondAE);
4151
4152 case kCondBE:
4153 case kCondA:
4154 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4155 // Simulate lhs <= rhs via lhs < rhs + 1.
4156 // Note that this only works if rhs + 1 does not overflow
4157 // to 0, hence the check above.
4158 // Sltiu sign-extends its 16-bit immediate operand before
4159 // the comparison and thus lets us compare directly with
4160 // unsigned values in the ranges [0, 0x7fff] and
4161 // [0xffff8000, 0xffffffff].
4162 __ Sltiu(dst, lhs, rhs_imm + 1);
4163 return (cond == kCondA);
4164 } else {
4165 if (use_imm) {
4166 rhs_reg = TMP;
4167 __ LoadConst32(rhs_reg, rhs_imm);
4168 }
4169 __ Sltu(dst, rhs_reg, lhs);
4170 return (cond == kCondBE);
4171 }
4172 }
4173}
4174
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004175void InstructionCodeGeneratorMIPS::GenerateIntCompareAndBranch(IfCondition cond,
4176 LocationSummary* locations,
4177 MipsLabel* label) {
4178 Register lhs = locations->InAt(0).AsRegister<Register>();
4179 Location rhs_location = locations->InAt(1);
4180 Register rhs_reg = ZERO;
Alexey Frunzee7697712016-09-15 21:37:49 -07004181 int64_t rhs_imm = 0;
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004182 bool use_imm = rhs_location.IsConstant();
4183 if (use_imm) {
4184 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4185 } else {
4186 rhs_reg = rhs_location.AsRegister<Register>();
4187 }
4188
4189 if (use_imm && rhs_imm == 0) {
4190 switch (cond) {
4191 case kCondEQ:
4192 case kCondBE: // <= 0 if zero
4193 __ Beqz(lhs, label);
4194 break;
4195 case kCondNE:
4196 case kCondA: // > 0 if non-zero
4197 __ Bnez(lhs, label);
4198 break;
4199 case kCondLT:
4200 __ Bltz(lhs, label);
4201 break;
4202 case kCondGE:
4203 __ Bgez(lhs, label);
4204 break;
4205 case kCondLE:
4206 __ Blez(lhs, label);
4207 break;
4208 case kCondGT:
4209 __ Bgtz(lhs, label);
4210 break;
4211 case kCondB: // always false
4212 break;
4213 case kCondAE: // always true
4214 __ B(label);
4215 break;
4216 }
4217 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07004218 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4219 if (isR6 || !use_imm) {
4220 if (use_imm) {
4221 rhs_reg = TMP;
4222 __ LoadConst32(rhs_reg, rhs_imm);
4223 }
4224 switch (cond) {
4225 case kCondEQ:
4226 __ Beq(lhs, rhs_reg, label);
4227 break;
4228 case kCondNE:
4229 __ Bne(lhs, rhs_reg, label);
4230 break;
4231 case kCondLT:
4232 __ Blt(lhs, rhs_reg, label);
4233 break;
4234 case kCondGE:
4235 __ Bge(lhs, rhs_reg, label);
4236 break;
4237 case kCondLE:
4238 __ Bge(rhs_reg, lhs, label);
4239 break;
4240 case kCondGT:
4241 __ Blt(rhs_reg, lhs, label);
4242 break;
4243 case kCondB:
4244 __ Bltu(lhs, rhs_reg, label);
4245 break;
4246 case kCondAE:
4247 __ Bgeu(lhs, rhs_reg, label);
4248 break;
4249 case kCondBE:
4250 __ Bgeu(rhs_reg, lhs, label);
4251 break;
4252 case kCondA:
4253 __ Bltu(rhs_reg, lhs, label);
4254 break;
4255 }
4256 } else {
4257 // Special cases for more efficient comparison with constants on R2.
4258 switch (cond) {
4259 case kCondEQ:
4260 __ LoadConst32(TMP, rhs_imm);
4261 __ Beq(lhs, TMP, label);
4262 break;
4263 case kCondNE:
4264 __ LoadConst32(TMP, rhs_imm);
4265 __ Bne(lhs, TMP, label);
4266 break;
4267 case kCondLT:
4268 if (IsInt<16>(rhs_imm)) {
4269 __ Slti(TMP, lhs, rhs_imm);
4270 __ Bnez(TMP, label);
4271 } else {
4272 __ LoadConst32(TMP, rhs_imm);
4273 __ Blt(lhs, TMP, label);
4274 }
4275 break;
4276 case kCondGE:
4277 if (IsInt<16>(rhs_imm)) {
4278 __ Slti(TMP, lhs, rhs_imm);
4279 __ Beqz(TMP, label);
4280 } else {
4281 __ LoadConst32(TMP, rhs_imm);
4282 __ Bge(lhs, TMP, label);
4283 }
4284 break;
4285 case kCondLE:
4286 if (IsInt<16>(rhs_imm + 1)) {
4287 // Simulate lhs <= rhs via lhs < rhs + 1.
4288 __ Slti(TMP, lhs, rhs_imm + 1);
4289 __ Bnez(TMP, label);
4290 } else {
4291 __ LoadConst32(TMP, rhs_imm);
4292 __ Bge(TMP, lhs, label);
4293 }
4294 break;
4295 case kCondGT:
4296 if (IsInt<16>(rhs_imm + 1)) {
4297 // Simulate lhs > rhs via !(lhs < rhs + 1).
4298 __ Slti(TMP, lhs, rhs_imm + 1);
4299 __ Beqz(TMP, label);
4300 } else {
4301 __ LoadConst32(TMP, rhs_imm);
4302 __ Blt(TMP, lhs, label);
4303 }
4304 break;
4305 case kCondB:
4306 if (IsInt<16>(rhs_imm)) {
4307 __ Sltiu(TMP, lhs, rhs_imm);
4308 __ Bnez(TMP, label);
4309 } else {
4310 __ LoadConst32(TMP, rhs_imm);
4311 __ Bltu(lhs, TMP, label);
4312 }
4313 break;
4314 case kCondAE:
4315 if (IsInt<16>(rhs_imm)) {
4316 __ Sltiu(TMP, lhs, rhs_imm);
4317 __ Beqz(TMP, label);
4318 } else {
4319 __ LoadConst32(TMP, rhs_imm);
4320 __ Bgeu(lhs, TMP, label);
4321 }
4322 break;
4323 case kCondBE:
4324 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4325 // Simulate lhs <= rhs via lhs < rhs + 1.
4326 // Note that this only works if rhs + 1 does not overflow
4327 // to 0, hence the check above.
4328 __ Sltiu(TMP, lhs, rhs_imm + 1);
4329 __ Bnez(TMP, label);
4330 } else {
4331 __ LoadConst32(TMP, rhs_imm);
4332 __ Bgeu(TMP, lhs, label);
4333 }
4334 break;
4335 case kCondA:
4336 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4337 // Simulate lhs > rhs via !(lhs < rhs + 1).
4338 // Note that this only works if rhs + 1 does not overflow
4339 // to 0, hence the check above.
4340 __ Sltiu(TMP, lhs, rhs_imm + 1);
4341 __ Beqz(TMP, label);
4342 } else {
4343 __ LoadConst32(TMP, rhs_imm);
4344 __ Bltu(TMP, lhs, label);
4345 }
4346 break;
4347 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004348 }
4349 }
4350}
4351
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01004352void InstructionCodeGeneratorMIPS::GenerateLongCompare(IfCondition cond,
4353 LocationSummary* locations) {
4354 Register dst = locations->Out().AsRegister<Register>();
4355 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4356 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4357 Location rhs_location = locations->InAt(1);
4358 Register rhs_high = ZERO;
4359 Register rhs_low = ZERO;
4360 int64_t imm = 0;
4361 uint32_t imm_high = 0;
4362 uint32_t imm_low = 0;
4363 bool use_imm = rhs_location.IsConstant();
4364 if (use_imm) {
4365 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4366 imm_high = High32Bits(imm);
4367 imm_low = Low32Bits(imm);
4368 } else {
4369 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4370 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4371 }
4372 if (use_imm && imm == 0) {
4373 switch (cond) {
4374 case kCondEQ:
4375 case kCondBE: // <= 0 if zero
4376 __ Or(dst, lhs_high, lhs_low);
4377 __ Sltiu(dst, dst, 1);
4378 break;
4379 case kCondNE:
4380 case kCondA: // > 0 if non-zero
4381 __ Or(dst, lhs_high, lhs_low);
4382 __ Sltu(dst, ZERO, dst);
4383 break;
4384 case kCondLT:
4385 __ Slt(dst, lhs_high, ZERO);
4386 break;
4387 case kCondGE:
4388 __ Slt(dst, lhs_high, ZERO);
4389 __ Xori(dst, dst, 1);
4390 break;
4391 case kCondLE:
4392 __ Or(TMP, lhs_high, lhs_low);
4393 __ Sra(AT, lhs_high, 31);
4394 __ Sltu(dst, AT, TMP);
4395 __ Xori(dst, dst, 1);
4396 break;
4397 case kCondGT:
4398 __ Or(TMP, lhs_high, lhs_low);
4399 __ Sra(AT, lhs_high, 31);
4400 __ Sltu(dst, AT, TMP);
4401 break;
4402 case kCondB: // always false
4403 __ Andi(dst, dst, 0);
4404 break;
4405 case kCondAE: // always true
4406 __ Ori(dst, ZERO, 1);
4407 break;
4408 }
4409 } else if (use_imm) {
4410 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4411 switch (cond) {
4412 case kCondEQ:
4413 __ LoadConst32(TMP, imm_high);
4414 __ Xor(TMP, TMP, lhs_high);
4415 __ LoadConst32(AT, imm_low);
4416 __ Xor(AT, AT, lhs_low);
4417 __ Or(dst, TMP, AT);
4418 __ Sltiu(dst, dst, 1);
4419 break;
4420 case kCondNE:
4421 __ LoadConst32(TMP, imm_high);
4422 __ Xor(TMP, TMP, lhs_high);
4423 __ LoadConst32(AT, imm_low);
4424 __ Xor(AT, AT, lhs_low);
4425 __ Or(dst, TMP, AT);
4426 __ Sltu(dst, ZERO, dst);
4427 break;
4428 case kCondLT:
4429 case kCondGE:
4430 if (dst == lhs_low) {
4431 __ LoadConst32(TMP, imm_low);
4432 __ Sltu(dst, lhs_low, TMP);
4433 }
4434 __ LoadConst32(TMP, imm_high);
4435 __ Slt(AT, lhs_high, TMP);
4436 __ Slt(TMP, TMP, lhs_high);
4437 if (dst != lhs_low) {
4438 __ LoadConst32(dst, imm_low);
4439 __ Sltu(dst, lhs_low, dst);
4440 }
4441 __ Slt(dst, TMP, dst);
4442 __ Or(dst, dst, AT);
4443 if (cond == kCondGE) {
4444 __ Xori(dst, dst, 1);
4445 }
4446 break;
4447 case kCondGT:
4448 case kCondLE:
4449 if (dst == lhs_low) {
4450 __ LoadConst32(TMP, imm_low);
4451 __ Sltu(dst, TMP, lhs_low);
4452 }
4453 __ LoadConst32(TMP, imm_high);
4454 __ Slt(AT, TMP, lhs_high);
4455 __ Slt(TMP, lhs_high, TMP);
4456 if (dst != lhs_low) {
4457 __ LoadConst32(dst, imm_low);
4458 __ Sltu(dst, dst, lhs_low);
4459 }
4460 __ Slt(dst, TMP, dst);
4461 __ Or(dst, dst, AT);
4462 if (cond == kCondLE) {
4463 __ Xori(dst, dst, 1);
4464 }
4465 break;
4466 case kCondB:
4467 case kCondAE:
4468 if (dst == lhs_low) {
4469 __ LoadConst32(TMP, imm_low);
4470 __ Sltu(dst, lhs_low, TMP);
4471 }
4472 __ LoadConst32(TMP, imm_high);
4473 __ Sltu(AT, lhs_high, TMP);
4474 __ Sltu(TMP, TMP, lhs_high);
4475 if (dst != lhs_low) {
4476 __ LoadConst32(dst, imm_low);
4477 __ Sltu(dst, lhs_low, dst);
4478 }
4479 __ Slt(dst, TMP, dst);
4480 __ Or(dst, dst, AT);
4481 if (cond == kCondAE) {
4482 __ Xori(dst, dst, 1);
4483 }
4484 break;
4485 case kCondA:
4486 case kCondBE:
4487 if (dst == lhs_low) {
4488 __ LoadConst32(TMP, imm_low);
4489 __ Sltu(dst, TMP, lhs_low);
4490 }
4491 __ LoadConst32(TMP, imm_high);
4492 __ Sltu(AT, TMP, lhs_high);
4493 __ Sltu(TMP, lhs_high, TMP);
4494 if (dst != lhs_low) {
4495 __ LoadConst32(dst, imm_low);
4496 __ Sltu(dst, dst, lhs_low);
4497 }
4498 __ Slt(dst, TMP, dst);
4499 __ Or(dst, dst, AT);
4500 if (cond == kCondBE) {
4501 __ Xori(dst, dst, 1);
4502 }
4503 break;
4504 }
4505 } else {
4506 switch (cond) {
4507 case kCondEQ:
4508 __ Xor(TMP, lhs_high, rhs_high);
4509 __ Xor(AT, lhs_low, rhs_low);
4510 __ Or(dst, TMP, AT);
4511 __ Sltiu(dst, dst, 1);
4512 break;
4513 case kCondNE:
4514 __ Xor(TMP, lhs_high, rhs_high);
4515 __ Xor(AT, lhs_low, rhs_low);
4516 __ Or(dst, TMP, AT);
4517 __ Sltu(dst, ZERO, dst);
4518 break;
4519 case kCondLT:
4520 case kCondGE:
4521 __ Slt(TMP, rhs_high, lhs_high);
4522 __ Sltu(AT, lhs_low, rhs_low);
4523 __ Slt(TMP, TMP, AT);
4524 __ Slt(AT, lhs_high, rhs_high);
4525 __ Or(dst, AT, TMP);
4526 if (cond == kCondGE) {
4527 __ Xori(dst, dst, 1);
4528 }
4529 break;
4530 case kCondGT:
4531 case kCondLE:
4532 __ Slt(TMP, lhs_high, rhs_high);
4533 __ Sltu(AT, rhs_low, lhs_low);
4534 __ Slt(TMP, TMP, AT);
4535 __ Slt(AT, rhs_high, lhs_high);
4536 __ Or(dst, AT, TMP);
4537 if (cond == kCondLE) {
4538 __ Xori(dst, dst, 1);
4539 }
4540 break;
4541 case kCondB:
4542 case kCondAE:
4543 __ Sltu(TMP, rhs_high, lhs_high);
4544 __ Sltu(AT, lhs_low, rhs_low);
4545 __ Slt(TMP, TMP, AT);
4546 __ Sltu(AT, lhs_high, rhs_high);
4547 __ Or(dst, AT, TMP);
4548 if (cond == kCondAE) {
4549 __ Xori(dst, dst, 1);
4550 }
4551 break;
4552 case kCondA:
4553 case kCondBE:
4554 __ Sltu(TMP, lhs_high, rhs_high);
4555 __ Sltu(AT, rhs_low, lhs_low);
4556 __ Slt(TMP, TMP, AT);
4557 __ Sltu(AT, rhs_high, lhs_high);
4558 __ Or(dst, AT, TMP);
4559 if (cond == kCondBE) {
4560 __ Xori(dst, dst, 1);
4561 }
4562 break;
4563 }
4564 }
4565}
4566
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004567void InstructionCodeGeneratorMIPS::GenerateLongCompareAndBranch(IfCondition cond,
4568 LocationSummary* locations,
4569 MipsLabel* label) {
4570 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4571 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4572 Location rhs_location = locations->InAt(1);
4573 Register rhs_high = ZERO;
4574 Register rhs_low = ZERO;
4575 int64_t imm = 0;
4576 uint32_t imm_high = 0;
4577 uint32_t imm_low = 0;
4578 bool use_imm = rhs_location.IsConstant();
4579 if (use_imm) {
4580 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4581 imm_high = High32Bits(imm);
4582 imm_low = Low32Bits(imm);
4583 } else {
4584 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4585 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4586 }
4587
4588 if (use_imm && imm == 0) {
4589 switch (cond) {
4590 case kCondEQ:
4591 case kCondBE: // <= 0 if zero
4592 __ Or(TMP, lhs_high, lhs_low);
4593 __ Beqz(TMP, label);
4594 break;
4595 case kCondNE:
4596 case kCondA: // > 0 if non-zero
4597 __ Or(TMP, lhs_high, lhs_low);
4598 __ Bnez(TMP, label);
4599 break;
4600 case kCondLT:
4601 __ Bltz(lhs_high, label);
4602 break;
4603 case kCondGE:
4604 __ Bgez(lhs_high, label);
4605 break;
4606 case kCondLE:
4607 __ Or(TMP, lhs_high, lhs_low);
4608 __ Sra(AT, lhs_high, 31);
4609 __ Bgeu(AT, TMP, label);
4610 break;
4611 case kCondGT:
4612 __ Or(TMP, lhs_high, lhs_low);
4613 __ Sra(AT, lhs_high, 31);
4614 __ Bltu(AT, TMP, label);
4615 break;
4616 case kCondB: // always false
4617 break;
4618 case kCondAE: // always true
4619 __ B(label);
4620 break;
4621 }
4622 } else if (use_imm) {
4623 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4624 switch (cond) {
4625 case kCondEQ:
4626 __ LoadConst32(TMP, imm_high);
4627 __ Xor(TMP, TMP, lhs_high);
4628 __ LoadConst32(AT, imm_low);
4629 __ Xor(AT, AT, lhs_low);
4630 __ Or(TMP, TMP, AT);
4631 __ Beqz(TMP, label);
4632 break;
4633 case kCondNE:
4634 __ LoadConst32(TMP, imm_high);
4635 __ Xor(TMP, TMP, lhs_high);
4636 __ LoadConst32(AT, imm_low);
4637 __ Xor(AT, AT, lhs_low);
4638 __ Or(TMP, TMP, AT);
4639 __ Bnez(TMP, label);
4640 break;
4641 case kCondLT:
4642 __ LoadConst32(TMP, imm_high);
4643 __ Blt(lhs_high, TMP, label);
4644 __ Slt(TMP, TMP, lhs_high);
4645 __ LoadConst32(AT, imm_low);
4646 __ Sltu(AT, lhs_low, AT);
4647 __ Blt(TMP, AT, label);
4648 break;
4649 case kCondGE:
4650 __ LoadConst32(TMP, imm_high);
4651 __ Blt(TMP, lhs_high, label);
4652 __ Slt(TMP, lhs_high, TMP);
4653 __ LoadConst32(AT, imm_low);
4654 __ Sltu(AT, lhs_low, AT);
4655 __ Or(TMP, TMP, AT);
4656 __ Beqz(TMP, label);
4657 break;
4658 case kCondLE:
4659 __ LoadConst32(TMP, imm_high);
4660 __ Blt(lhs_high, TMP, label);
4661 __ Slt(TMP, TMP, lhs_high);
4662 __ LoadConst32(AT, imm_low);
4663 __ Sltu(AT, AT, lhs_low);
4664 __ Or(TMP, TMP, AT);
4665 __ Beqz(TMP, label);
4666 break;
4667 case kCondGT:
4668 __ LoadConst32(TMP, imm_high);
4669 __ Blt(TMP, lhs_high, label);
4670 __ Slt(TMP, lhs_high, TMP);
4671 __ LoadConst32(AT, imm_low);
4672 __ Sltu(AT, AT, lhs_low);
4673 __ Blt(TMP, AT, label);
4674 break;
4675 case kCondB:
4676 __ LoadConst32(TMP, imm_high);
4677 __ Bltu(lhs_high, TMP, label);
4678 __ Sltu(TMP, TMP, lhs_high);
4679 __ LoadConst32(AT, imm_low);
4680 __ Sltu(AT, lhs_low, AT);
4681 __ Blt(TMP, AT, label);
4682 break;
4683 case kCondAE:
4684 __ LoadConst32(TMP, imm_high);
4685 __ Bltu(TMP, lhs_high, label);
4686 __ Sltu(TMP, lhs_high, TMP);
4687 __ LoadConst32(AT, imm_low);
4688 __ Sltu(AT, lhs_low, AT);
4689 __ Or(TMP, TMP, AT);
4690 __ Beqz(TMP, label);
4691 break;
4692 case kCondBE:
4693 __ LoadConst32(TMP, imm_high);
4694 __ Bltu(lhs_high, TMP, label);
4695 __ Sltu(TMP, TMP, lhs_high);
4696 __ LoadConst32(AT, imm_low);
4697 __ Sltu(AT, AT, lhs_low);
4698 __ Or(TMP, TMP, AT);
4699 __ Beqz(TMP, label);
4700 break;
4701 case kCondA:
4702 __ LoadConst32(TMP, imm_high);
4703 __ Bltu(TMP, lhs_high, label);
4704 __ Sltu(TMP, lhs_high, TMP);
4705 __ LoadConst32(AT, imm_low);
4706 __ Sltu(AT, AT, lhs_low);
4707 __ Blt(TMP, AT, label);
4708 break;
4709 }
4710 } else {
4711 switch (cond) {
4712 case kCondEQ:
4713 __ Xor(TMP, lhs_high, rhs_high);
4714 __ Xor(AT, lhs_low, rhs_low);
4715 __ Or(TMP, TMP, AT);
4716 __ Beqz(TMP, label);
4717 break;
4718 case kCondNE:
4719 __ Xor(TMP, lhs_high, rhs_high);
4720 __ Xor(AT, lhs_low, rhs_low);
4721 __ Or(TMP, TMP, AT);
4722 __ Bnez(TMP, label);
4723 break;
4724 case kCondLT:
4725 __ Blt(lhs_high, rhs_high, label);
4726 __ Slt(TMP, rhs_high, lhs_high);
4727 __ Sltu(AT, lhs_low, rhs_low);
4728 __ Blt(TMP, AT, label);
4729 break;
4730 case kCondGE:
4731 __ Blt(rhs_high, lhs_high, label);
4732 __ Slt(TMP, lhs_high, rhs_high);
4733 __ Sltu(AT, lhs_low, rhs_low);
4734 __ Or(TMP, TMP, AT);
4735 __ Beqz(TMP, label);
4736 break;
4737 case kCondLE:
4738 __ Blt(lhs_high, rhs_high, label);
4739 __ Slt(TMP, rhs_high, lhs_high);
4740 __ Sltu(AT, rhs_low, lhs_low);
4741 __ Or(TMP, TMP, AT);
4742 __ Beqz(TMP, label);
4743 break;
4744 case kCondGT:
4745 __ Blt(rhs_high, lhs_high, label);
4746 __ Slt(TMP, lhs_high, rhs_high);
4747 __ Sltu(AT, rhs_low, lhs_low);
4748 __ Blt(TMP, AT, label);
4749 break;
4750 case kCondB:
4751 __ Bltu(lhs_high, rhs_high, label);
4752 __ Sltu(TMP, rhs_high, lhs_high);
4753 __ Sltu(AT, lhs_low, rhs_low);
4754 __ Blt(TMP, AT, label);
4755 break;
4756 case kCondAE:
4757 __ Bltu(rhs_high, lhs_high, label);
4758 __ Sltu(TMP, lhs_high, rhs_high);
4759 __ Sltu(AT, lhs_low, rhs_low);
4760 __ Or(TMP, TMP, AT);
4761 __ Beqz(TMP, label);
4762 break;
4763 case kCondBE:
4764 __ Bltu(lhs_high, rhs_high, label);
4765 __ Sltu(TMP, rhs_high, lhs_high);
4766 __ Sltu(AT, rhs_low, lhs_low);
4767 __ Or(TMP, TMP, AT);
4768 __ Beqz(TMP, label);
4769 break;
4770 case kCondA:
4771 __ Bltu(rhs_high, lhs_high, label);
4772 __ Sltu(TMP, lhs_high, rhs_high);
4773 __ Sltu(AT, rhs_low, lhs_low);
4774 __ Blt(TMP, AT, label);
4775 break;
4776 }
4777 }
4778}
4779
Alexey Frunze2ddb7172016-09-06 17:04:55 -07004780void InstructionCodeGeneratorMIPS::GenerateFpCompare(IfCondition cond,
4781 bool gt_bias,
4782 Primitive::Type type,
4783 LocationSummary* locations) {
4784 Register dst = locations->Out().AsRegister<Register>();
4785 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
4786 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
4787 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4788 if (type == Primitive::kPrimFloat) {
4789 if (isR6) {
4790 switch (cond) {
4791 case kCondEQ:
4792 __ CmpEqS(FTMP, lhs, rhs);
4793 __ Mfc1(dst, FTMP);
4794 __ Andi(dst, dst, 1);
4795 break;
4796 case kCondNE:
4797 __ CmpEqS(FTMP, lhs, rhs);
4798 __ Mfc1(dst, FTMP);
4799 __ Addiu(dst, dst, 1);
4800 break;
4801 case kCondLT:
4802 if (gt_bias) {
4803 __ CmpLtS(FTMP, lhs, rhs);
4804 } else {
4805 __ CmpUltS(FTMP, lhs, rhs);
4806 }
4807 __ Mfc1(dst, FTMP);
4808 __ Andi(dst, dst, 1);
4809 break;
4810 case kCondLE:
4811 if (gt_bias) {
4812 __ CmpLeS(FTMP, lhs, rhs);
4813 } else {
4814 __ CmpUleS(FTMP, lhs, rhs);
4815 }
4816 __ Mfc1(dst, FTMP);
4817 __ Andi(dst, dst, 1);
4818 break;
4819 case kCondGT:
4820 if (gt_bias) {
4821 __ CmpUltS(FTMP, rhs, lhs);
4822 } else {
4823 __ CmpLtS(FTMP, rhs, lhs);
4824 }
4825 __ Mfc1(dst, FTMP);
4826 __ Andi(dst, dst, 1);
4827 break;
4828 case kCondGE:
4829 if (gt_bias) {
4830 __ CmpUleS(FTMP, rhs, lhs);
4831 } else {
4832 __ CmpLeS(FTMP, rhs, lhs);
4833 }
4834 __ Mfc1(dst, FTMP);
4835 __ Andi(dst, dst, 1);
4836 break;
4837 default:
4838 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4839 UNREACHABLE();
4840 }
4841 } else {
4842 switch (cond) {
4843 case kCondEQ:
4844 __ CeqS(0, lhs, rhs);
4845 __ LoadConst32(dst, 1);
4846 __ Movf(dst, ZERO, 0);
4847 break;
4848 case kCondNE:
4849 __ CeqS(0, lhs, rhs);
4850 __ LoadConst32(dst, 1);
4851 __ Movt(dst, ZERO, 0);
4852 break;
4853 case kCondLT:
4854 if (gt_bias) {
4855 __ ColtS(0, lhs, rhs);
4856 } else {
4857 __ CultS(0, lhs, rhs);
4858 }
4859 __ LoadConst32(dst, 1);
4860 __ Movf(dst, ZERO, 0);
4861 break;
4862 case kCondLE:
4863 if (gt_bias) {
4864 __ ColeS(0, lhs, rhs);
4865 } else {
4866 __ CuleS(0, lhs, rhs);
4867 }
4868 __ LoadConst32(dst, 1);
4869 __ Movf(dst, ZERO, 0);
4870 break;
4871 case kCondGT:
4872 if (gt_bias) {
4873 __ CultS(0, rhs, lhs);
4874 } else {
4875 __ ColtS(0, rhs, lhs);
4876 }
4877 __ LoadConst32(dst, 1);
4878 __ Movf(dst, ZERO, 0);
4879 break;
4880 case kCondGE:
4881 if (gt_bias) {
4882 __ CuleS(0, rhs, lhs);
4883 } else {
4884 __ ColeS(0, rhs, lhs);
4885 }
4886 __ LoadConst32(dst, 1);
4887 __ Movf(dst, ZERO, 0);
4888 break;
4889 default:
4890 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4891 UNREACHABLE();
4892 }
4893 }
4894 } else {
4895 DCHECK_EQ(type, Primitive::kPrimDouble);
4896 if (isR6) {
4897 switch (cond) {
4898 case kCondEQ:
4899 __ CmpEqD(FTMP, lhs, rhs);
4900 __ Mfc1(dst, FTMP);
4901 __ Andi(dst, dst, 1);
4902 break;
4903 case kCondNE:
4904 __ CmpEqD(FTMP, lhs, rhs);
4905 __ Mfc1(dst, FTMP);
4906 __ Addiu(dst, dst, 1);
4907 break;
4908 case kCondLT:
4909 if (gt_bias) {
4910 __ CmpLtD(FTMP, lhs, rhs);
4911 } else {
4912 __ CmpUltD(FTMP, lhs, rhs);
4913 }
4914 __ Mfc1(dst, FTMP);
4915 __ Andi(dst, dst, 1);
4916 break;
4917 case kCondLE:
4918 if (gt_bias) {
4919 __ CmpLeD(FTMP, lhs, rhs);
4920 } else {
4921 __ CmpUleD(FTMP, lhs, rhs);
4922 }
4923 __ Mfc1(dst, FTMP);
4924 __ Andi(dst, dst, 1);
4925 break;
4926 case kCondGT:
4927 if (gt_bias) {
4928 __ CmpUltD(FTMP, rhs, lhs);
4929 } else {
4930 __ CmpLtD(FTMP, rhs, lhs);
4931 }
4932 __ Mfc1(dst, FTMP);
4933 __ Andi(dst, dst, 1);
4934 break;
4935 case kCondGE:
4936 if (gt_bias) {
4937 __ CmpUleD(FTMP, rhs, lhs);
4938 } else {
4939 __ CmpLeD(FTMP, rhs, lhs);
4940 }
4941 __ Mfc1(dst, FTMP);
4942 __ Andi(dst, dst, 1);
4943 break;
4944 default:
4945 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4946 UNREACHABLE();
4947 }
4948 } else {
4949 switch (cond) {
4950 case kCondEQ:
4951 __ CeqD(0, lhs, rhs);
4952 __ LoadConst32(dst, 1);
4953 __ Movf(dst, ZERO, 0);
4954 break;
4955 case kCondNE:
4956 __ CeqD(0, lhs, rhs);
4957 __ LoadConst32(dst, 1);
4958 __ Movt(dst, ZERO, 0);
4959 break;
4960 case kCondLT:
4961 if (gt_bias) {
4962 __ ColtD(0, lhs, rhs);
4963 } else {
4964 __ CultD(0, lhs, rhs);
4965 }
4966 __ LoadConst32(dst, 1);
4967 __ Movf(dst, ZERO, 0);
4968 break;
4969 case kCondLE:
4970 if (gt_bias) {
4971 __ ColeD(0, lhs, rhs);
4972 } else {
4973 __ CuleD(0, lhs, rhs);
4974 }
4975 __ LoadConst32(dst, 1);
4976 __ Movf(dst, ZERO, 0);
4977 break;
4978 case kCondGT:
4979 if (gt_bias) {
4980 __ CultD(0, rhs, lhs);
4981 } else {
4982 __ ColtD(0, rhs, lhs);
4983 }
4984 __ LoadConst32(dst, 1);
4985 __ Movf(dst, ZERO, 0);
4986 break;
4987 case kCondGE:
4988 if (gt_bias) {
4989 __ CuleD(0, rhs, lhs);
4990 } else {
4991 __ ColeD(0, rhs, lhs);
4992 }
4993 __ LoadConst32(dst, 1);
4994 __ Movf(dst, ZERO, 0);
4995 break;
4996 default:
4997 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4998 UNREACHABLE();
4999 }
5000 }
5001 }
5002}
5003
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005004bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR2(IfCondition cond,
5005 bool gt_bias,
5006 Primitive::Type type,
5007 LocationSummary* input_locations,
5008 int cc) {
5009 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
5010 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
5011 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
5012 if (type == Primitive::kPrimFloat) {
5013 switch (cond) {
5014 case kCondEQ:
5015 __ CeqS(cc, lhs, rhs);
5016 return false;
5017 case kCondNE:
5018 __ CeqS(cc, lhs, rhs);
5019 return true;
5020 case kCondLT:
5021 if (gt_bias) {
5022 __ ColtS(cc, lhs, rhs);
5023 } else {
5024 __ CultS(cc, lhs, rhs);
5025 }
5026 return false;
5027 case kCondLE:
5028 if (gt_bias) {
5029 __ ColeS(cc, lhs, rhs);
5030 } else {
5031 __ CuleS(cc, lhs, rhs);
5032 }
5033 return false;
5034 case kCondGT:
5035 if (gt_bias) {
5036 __ CultS(cc, rhs, lhs);
5037 } else {
5038 __ ColtS(cc, rhs, lhs);
5039 }
5040 return false;
5041 case kCondGE:
5042 if (gt_bias) {
5043 __ CuleS(cc, rhs, lhs);
5044 } else {
5045 __ ColeS(cc, rhs, lhs);
5046 }
5047 return false;
5048 default:
5049 LOG(FATAL) << "Unexpected non-floating-point condition";
5050 UNREACHABLE();
5051 }
5052 } else {
5053 DCHECK_EQ(type, Primitive::kPrimDouble);
5054 switch (cond) {
5055 case kCondEQ:
5056 __ CeqD(cc, lhs, rhs);
5057 return false;
5058 case kCondNE:
5059 __ CeqD(cc, lhs, rhs);
5060 return true;
5061 case kCondLT:
5062 if (gt_bias) {
5063 __ ColtD(cc, lhs, rhs);
5064 } else {
5065 __ CultD(cc, lhs, rhs);
5066 }
5067 return false;
5068 case kCondLE:
5069 if (gt_bias) {
5070 __ ColeD(cc, lhs, rhs);
5071 } else {
5072 __ CuleD(cc, lhs, rhs);
5073 }
5074 return false;
5075 case kCondGT:
5076 if (gt_bias) {
5077 __ CultD(cc, rhs, lhs);
5078 } else {
5079 __ ColtD(cc, rhs, lhs);
5080 }
5081 return false;
5082 case kCondGE:
5083 if (gt_bias) {
5084 __ CuleD(cc, rhs, lhs);
5085 } else {
5086 __ ColeD(cc, rhs, lhs);
5087 }
5088 return false;
5089 default:
5090 LOG(FATAL) << "Unexpected non-floating-point condition";
5091 UNREACHABLE();
5092 }
5093 }
5094}
5095
5096bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR6(IfCondition cond,
5097 bool gt_bias,
5098 Primitive::Type type,
5099 LocationSummary* input_locations,
5100 FRegister dst) {
5101 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
5102 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
5103 CHECK(codegen_->GetInstructionSetFeatures().IsR6());
5104 if (type == Primitive::kPrimFloat) {
5105 switch (cond) {
5106 case kCondEQ:
5107 __ CmpEqS(dst, lhs, rhs);
5108 return false;
5109 case kCondNE:
5110 __ CmpEqS(dst, lhs, rhs);
5111 return true;
5112 case kCondLT:
5113 if (gt_bias) {
5114 __ CmpLtS(dst, lhs, rhs);
5115 } else {
5116 __ CmpUltS(dst, lhs, rhs);
5117 }
5118 return false;
5119 case kCondLE:
5120 if (gt_bias) {
5121 __ CmpLeS(dst, lhs, rhs);
5122 } else {
5123 __ CmpUleS(dst, lhs, rhs);
5124 }
5125 return false;
5126 case kCondGT:
5127 if (gt_bias) {
5128 __ CmpUltS(dst, rhs, lhs);
5129 } else {
5130 __ CmpLtS(dst, rhs, lhs);
5131 }
5132 return false;
5133 case kCondGE:
5134 if (gt_bias) {
5135 __ CmpUleS(dst, rhs, lhs);
5136 } else {
5137 __ CmpLeS(dst, rhs, lhs);
5138 }
5139 return false;
5140 default:
5141 LOG(FATAL) << "Unexpected non-floating-point condition";
5142 UNREACHABLE();
5143 }
5144 } else {
5145 DCHECK_EQ(type, Primitive::kPrimDouble);
5146 switch (cond) {
5147 case kCondEQ:
5148 __ CmpEqD(dst, lhs, rhs);
5149 return false;
5150 case kCondNE:
5151 __ CmpEqD(dst, lhs, rhs);
5152 return true;
5153 case kCondLT:
5154 if (gt_bias) {
5155 __ CmpLtD(dst, lhs, rhs);
5156 } else {
5157 __ CmpUltD(dst, lhs, rhs);
5158 }
5159 return false;
5160 case kCondLE:
5161 if (gt_bias) {
5162 __ CmpLeD(dst, lhs, rhs);
5163 } else {
5164 __ CmpUleD(dst, lhs, rhs);
5165 }
5166 return false;
5167 case kCondGT:
5168 if (gt_bias) {
5169 __ CmpUltD(dst, rhs, lhs);
5170 } else {
5171 __ CmpLtD(dst, rhs, lhs);
5172 }
5173 return false;
5174 case kCondGE:
5175 if (gt_bias) {
5176 __ CmpUleD(dst, rhs, lhs);
5177 } else {
5178 __ CmpLeD(dst, rhs, lhs);
5179 }
5180 return false;
5181 default:
5182 LOG(FATAL) << "Unexpected non-floating-point condition";
5183 UNREACHABLE();
5184 }
5185 }
5186}
5187
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005188void InstructionCodeGeneratorMIPS::GenerateFpCompareAndBranch(IfCondition cond,
5189 bool gt_bias,
5190 Primitive::Type type,
5191 LocationSummary* locations,
5192 MipsLabel* label) {
5193 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
5194 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
5195 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
5196 if (type == Primitive::kPrimFloat) {
5197 if (isR6) {
5198 switch (cond) {
5199 case kCondEQ:
5200 __ CmpEqS(FTMP, lhs, rhs);
5201 __ Bc1nez(FTMP, label);
5202 break;
5203 case kCondNE:
5204 __ CmpEqS(FTMP, lhs, rhs);
5205 __ Bc1eqz(FTMP, label);
5206 break;
5207 case kCondLT:
5208 if (gt_bias) {
5209 __ CmpLtS(FTMP, lhs, rhs);
5210 } else {
5211 __ CmpUltS(FTMP, lhs, rhs);
5212 }
5213 __ Bc1nez(FTMP, label);
5214 break;
5215 case kCondLE:
5216 if (gt_bias) {
5217 __ CmpLeS(FTMP, lhs, rhs);
5218 } else {
5219 __ CmpUleS(FTMP, lhs, rhs);
5220 }
5221 __ Bc1nez(FTMP, label);
5222 break;
5223 case kCondGT:
5224 if (gt_bias) {
5225 __ CmpUltS(FTMP, rhs, lhs);
5226 } else {
5227 __ CmpLtS(FTMP, rhs, lhs);
5228 }
5229 __ Bc1nez(FTMP, label);
5230 break;
5231 case kCondGE:
5232 if (gt_bias) {
5233 __ CmpUleS(FTMP, rhs, lhs);
5234 } else {
5235 __ CmpLeS(FTMP, rhs, lhs);
5236 }
5237 __ Bc1nez(FTMP, label);
5238 break;
5239 default:
5240 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005241 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005242 }
5243 } else {
5244 switch (cond) {
5245 case kCondEQ:
5246 __ CeqS(0, lhs, rhs);
5247 __ Bc1t(0, label);
5248 break;
5249 case kCondNE:
5250 __ CeqS(0, lhs, rhs);
5251 __ Bc1f(0, label);
5252 break;
5253 case kCondLT:
5254 if (gt_bias) {
5255 __ ColtS(0, lhs, rhs);
5256 } else {
5257 __ CultS(0, lhs, rhs);
5258 }
5259 __ Bc1t(0, label);
5260 break;
5261 case kCondLE:
5262 if (gt_bias) {
5263 __ ColeS(0, lhs, rhs);
5264 } else {
5265 __ CuleS(0, lhs, rhs);
5266 }
5267 __ Bc1t(0, label);
5268 break;
5269 case kCondGT:
5270 if (gt_bias) {
5271 __ CultS(0, rhs, lhs);
5272 } else {
5273 __ ColtS(0, rhs, lhs);
5274 }
5275 __ Bc1t(0, label);
5276 break;
5277 case kCondGE:
5278 if (gt_bias) {
5279 __ CuleS(0, rhs, lhs);
5280 } else {
5281 __ ColeS(0, rhs, lhs);
5282 }
5283 __ Bc1t(0, label);
5284 break;
5285 default:
5286 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005287 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005288 }
5289 }
5290 } else {
5291 DCHECK_EQ(type, Primitive::kPrimDouble);
5292 if (isR6) {
5293 switch (cond) {
5294 case kCondEQ:
5295 __ CmpEqD(FTMP, lhs, rhs);
5296 __ Bc1nez(FTMP, label);
5297 break;
5298 case kCondNE:
5299 __ CmpEqD(FTMP, lhs, rhs);
5300 __ Bc1eqz(FTMP, label);
5301 break;
5302 case kCondLT:
5303 if (gt_bias) {
5304 __ CmpLtD(FTMP, lhs, rhs);
5305 } else {
5306 __ CmpUltD(FTMP, lhs, rhs);
5307 }
5308 __ Bc1nez(FTMP, label);
5309 break;
5310 case kCondLE:
5311 if (gt_bias) {
5312 __ CmpLeD(FTMP, lhs, rhs);
5313 } else {
5314 __ CmpUleD(FTMP, lhs, rhs);
5315 }
5316 __ Bc1nez(FTMP, label);
5317 break;
5318 case kCondGT:
5319 if (gt_bias) {
5320 __ CmpUltD(FTMP, rhs, lhs);
5321 } else {
5322 __ CmpLtD(FTMP, rhs, lhs);
5323 }
5324 __ Bc1nez(FTMP, label);
5325 break;
5326 case kCondGE:
5327 if (gt_bias) {
5328 __ CmpUleD(FTMP, rhs, lhs);
5329 } else {
5330 __ CmpLeD(FTMP, rhs, lhs);
5331 }
5332 __ Bc1nez(FTMP, label);
5333 break;
5334 default:
5335 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005336 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005337 }
5338 } else {
5339 switch (cond) {
5340 case kCondEQ:
5341 __ CeqD(0, lhs, rhs);
5342 __ Bc1t(0, label);
5343 break;
5344 case kCondNE:
5345 __ CeqD(0, lhs, rhs);
5346 __ Bc1f(0, label);
5347 break;
5348 case kCondLT:
5349 if (gt_bias) {
5350 __ ColtD(0, lhs, rhs);
5351 } else {
5352 __ CultD(0, lhs, rhs);
5353 }
5354 __ Bc1t(0, label);
5355 break;
5356 case kCondLE:
5357 if (gt_bias) {
5358 __ ColeD(0, lhs, rhs);
5359 } else {
5360 __ CuleD(0, lhs, rhs);
5361 }
5362 __ Bc1t(0, label);
5363 break;
5364 case kCondGT:
5365 if (gt_bias) {
5366 __ CultD(0, rhs, lhs);
5367 } else {
5368 __ ColtD(0, rhs, lhs);
5369 }
5370 __ Bc1t(0, label);
5371 break;
5372 case kCondGE:
5373 if (gt_bias) {
5374 __ CuleD(0, rhs, lhs);
5375 } else {
5376 __ ColeD(0, rhs, lhs);
5377 }
5378 __ Bc1t(0, label);
5379 break;
5380 default:
5381 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005382 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005383 }
5384 }
5385 }
5386}
5387
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005388void InstructionCodeGeneratorMIPS::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00005389 size_t condition_input_index,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005390 MipsLabel* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00005391 MipsLabel* false_target) {
5392 HInstruction* cond = instruction->InputAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005393
David Brazdil0debae72015-11-12 18:37:00 +00005394 if (true_target == nullptr && false_target == nullptr) {
5395 // Nothing to do. The code always falls through.
5396 return;
5397 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00005398 // Constant condition, statically compared against "true" (integer value 1).
5399 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00005400 if (true_target != nullptr) {
5401 __ B(true_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005402 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005403 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00005404 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00005405 if (false_target != nullptr) {
5406 __ B(false_target);
5407 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005408 }
David Brazdil0debae72015-11-12 18:37:00 +00005409 return;
5410 }
5411
5412 // The following code generates these patterns:
5413 // (1) true_target == nullptr && false_target != nullptr
5414 // - opposite condition true => branch to false_target
5415 // (2) true_target != nullptr && false_target == nullptr
5416 // - condition true => branch to true_target
5417 // (3) true_target != nullptr && false_target != nullptr
5418 // - condition true => branch to true_target
5419 // - branch to false_target
5420 if (IsBooleanValueOrMaterializedCondition(cond)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005421 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00005422 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005423 DCHECK(cond_val.IsRegister());
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005424 if (true_target == nullptr) {
David Brazdil0debae72015-11-12 18:37:00 +00005425 __ Beqz(cond_val.AsRegister<Register>(), false_target);
5426 } else {
5427 __ Bnez(cond_val.AsRegister<Register>(), true_target);
5428 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005429 } else {
5430 // The condition instruction has not been materialized, use its inputs as
5431 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00005432 HCondition* condition = cond->AsCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005433 Primitive::Type type = condition->InputAt(0)->GetType();
5434 LocationSummary* locations = cond->GetLocations();
5435 IfCondition if_cond = condition->GetCondition();
5436 MipsLabel* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00005437
David Brazdil0debae72015-11-12 18:37:00 +00005438 if (true_target == nullptr) {
5439 if_cond = condition->GetOppositeCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005440 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00005441 }
5442
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005443 switch (type) {
5444 default:
5445 GenerateIntCompareAndBranch(if_cond, locations, branch_target);
5446 break;
5447 case Primitive::kPrimLong:
5448 GenerateLongCompareAndBranch(if_cond, locations, branch_target);
5449 break;
5450 case Primitive::kPrimFloat:
5451 case Primitive::kPrimDouble:
5452 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
5453 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005454 }
5455 }
David Brazdil0debae72015-11-12 18:37:00 +00005456
5457 // If neither branch falls through (case 3), the conditional branch to `true_target`
5458 // was already emitted (case 2) and we need to emit a jump to `false_target`.
5459 if (true_target != nullptr && false_target != nullptr) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005460 __ B(false_target);
5461 }
5462}
5463
5464void LocationsBuilderMIPS::VisitIf(HIf* if_instr) {
5465 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00005466 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005467 locations->SetInAt(0, Location::RequiresRegister());
5468 }
5469}
5470
5471void InstructionCodeGeneratorMIPS::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00005472 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
5473 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
5474 MipsLabel* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
5475 nullptr : codegen_->GetLabelOf(true_successor);
5476 MipsLabel* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
5477 nullptr : codegen_->GetLabelOf(false_successor);
5478 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005479}
5480
5481void LocationsBuilderMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
5482 LocationSummary* locations = new (GetGraph()->GetArena())
5483 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01005484 InvokeRuntimeCallingConvention calling_convention;
5485 RegisterSet caller_saves = RegisterSet::Empty();
5486 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5487 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00005488 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005489 locations->SetInAt(0, Location::RequiresRegister());
5490 }
5491}
5492
5493void InstructionCodeGeneratorMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08005494 SlowPathCodeMIPS* slow_path =
5495 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00005496 GenerateTestAndBranch(deoptimize,
5497 /* condition_input_index */ 0,
5498 slow_path->GetEntryLabel(),
5499 /* false_target */ nullptr);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005500}
5501
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005502// This function returns true if a conditional move can be generated for HSelect.
5503// Otherwise it returns false and HSelect must be implemented in terms of conditonal
5504// branches and regular moves.
5505//
5506// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
5507//
5508// While determining feasibility of a conditional move and setting inputs/outputs
5509// are two distinct tasks, this function does both because they share quite a bit
5510// of common logic.
5511static bool CanMoveConditionally(HSelect* select, bool is_r6, LocationSummary* locations_to_set) {
5512 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
5513 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5514 HCondition* condition = cond->AsCondition();
5515
5516 Primitive::Type cond_type = materialized ? Primitive::kPrimInt : condition->InputAt(0)->GetType();
5517 Primitive::Type dst_type = select->GetType();
5518
5519 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
5520 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
5521 bool is_true_value_zero_constant =
5522 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
5523 bool is_false_value_zero_constant =
5524 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
5525
5526 bool can_move_conditionally = false;
5527 bool use_const_for_false_in = false;
5528 bool use_const_for_true_in = false;
5529
5530 if (!cond->IsConstant()) {
5531 switch (cond_type) {
5532 default:
5533 switch (dst_type) {
5534 default:
5535 // Moving int on int condition.
5536 if (is_r6) {
5537 if (is_true_value_zero_constant) {
5538 // seleqz out_reg, false_reg, cond_reg
5539 can_move_conditionally = true;
5540 use_const_for_true_in = true;
5541 } else if (is_false_value_zero_constant) {
5542 // selnez out_reg, true_reg, cond_reg
5543 can_move_conditionally = true;
5544 use_const_for_false_in = true;
5545 } else if (materialized) {
5546 // Not materializing unmaterialized int conditions
5547 // to keep the instruction count low.
5548 // selnez AT, true_reg, cond_reg
5549 // seleqz TMP, false_reg, cond_reg
5550 // or out_reg, AT, TMP
5551 can_move_conditionally = true;
5552 }
5553 } else {
5554 // movn out_reg, true_reg/ZERO, cond_reg
5555 can_move_conditionally = true;
5556 use_const_for_true_in = is_true_value_zero_constant;
5557 }
5558 break;
5559 case Primitive::kPrimLong:
5560 // Moving long on int condition.
5561 if (is_r6) {
5562 if (is_true_value_zero_constant) {
5563 // seleqz out_reg_lo, false_reg_lo, cond_reg
5564 // seleqz out_reg_hi, false_reg_hi, cond_reg
5565 can_move_conditionally = true;
5566 use_const_for_true_in = true;
5567 } else if (is_false_value_zero_constant) {
5568 // selnez out_reg_lo, true_reg_lo, cond_reg
5569 // selnez out_reg_hi, true_reg_hi, cond_reg
5570 can_move_conditionally = true;
5571 use_const_for_false_in = true;
5572 }
5573 // Other long conditional moves would generate 6+ instructions,
5574 // which is too many.
5575 } else {
5576 // movn out_reg_lo, true_reg_lo/ZERO, cond_reg
5577 // movn out_reg_hi, true_reg_hi/ZERO, cond_reg
5578 can_move_conditionally = true;
5579 use_const_for_true_in = is_true_value_zero_constant;
5580 }
5581 break;
5582 case Primitive::kPrimFloat:
5583 case Primitive::kPrimDouble:
5584 // Moving float/double on int condition.
5585 if (is_r6) {
5586 if (materialized) {
5587 // Not materializing unmaterialized int conditions
5588 // to keep the instruction count low.
5589 can_move_conditionally = true;
5590 if (is_true_value_zero_constant) {
5591 // sltu TMP, ZERO, cond_reg
5592 // mtc1 TMP, temp_cond_reg
5593 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5594 use_const_for_true_in = true;
5595 } else if (is_false_value_zero_constant) {
5596 // sltu TMP, ZERO, cond_reg
5597 // mtc1 TMP, temp_cond_reg
5598 // selnez.fmt out_reg, true_reg, temp_cond_reg
5599 use_const_for_false_in = true;
5600 } else {
5601 // sltu TMP, ZERO, cond_reg
5602 // mtc1 TMP, temp_cond_reg
5603 // sel.fmt temp_cond_reg, false_reg, true_reg
5604 // mov.fmt out_reg, temp_cond_reg
5605 }
5606 }
5607 } else {
5608 // movn.fmt out_reg, true_reg, cond_reg
5609 can_move_conditionally = true;
5610 }
5611 break;
5612 }
5613 break;
5614 case Primitive::kPrimLong:
5615 // We don't materialize long comparison now
5616 // and use conditional branches instead.
5617 break;
5618 case Primitive::kPrimFloat:
5619 case Primitive::kPrimDouble:
5620 switch (dst_type) {
5621 default:
5622 // Moving int on float/double condition.
5623 if (is_r6) {
5624 if (is_true_value_zero_constant) {
5625 // mfc1 TMP, temp_cond_reg
5626 // seleqz out_reg, false_reg, TMP
5627 can_move_conditionally = true;
5628 use_const_for_true_in = true;
5629 } else if (is_false_value_zero_constant) {
5630 // mfc1 TMP, temp_cond_reg
5631 // selnez out_reg, true_reg, TMP
5632 can_move_conditionally = true;
5633 use_const_for_false_in = true;
5634 } else {
5635 // mfc1 TMP, temp_cond_reg
5636 // selnez AT, true_reg, TMP
5637 // seleqz TMP, false_reg, TMP
5638 // or out_reg, AT, TMP
5639 can_move_conditionally = true;
5640 }
5641 } else {
5642 // movt out_reg, true_reg/ZERO, cc
5643 can_move_conditionally = true;
5644 use_const_for_true_in = is_true_value_zero_constant;
5645 }
5646 break;
5647 case Primitive::kPrimLong:
5648 // Moving long on float/double condition.
5649 if (is_r6) {
5650 if (is_true_value_zero_constant) {
5651 // mfc1 TMP, temp_cond_reg
5652 // seleqz out_reg_lo, false_reg_lo, TMP
5653 // seleqz out_reg_hi, false_reg_hi, TMP
5654 can_move_conditionally = true;
5655 use_const_for_true_in = true;
5656 } else if (is_false_value_zero_constant) {
5657 // mfc1 TMP, temp_cond_reg
5658 // selnez out_reg_lo, true_reg_lo, TMP
5659 // selnez out_reg_hi, true_reg_hi, TMP
5660 can_move_conditionally = true;
5661 use_const_for_false_in = true;
5662 }
5663 // Other long conditional moves would generate 6+ instructions,
5664 // which is too many.
5665 } else {
5666 // movt out_reg_lo, true_reg_lo/ZERO, cc
5667 // movt out_reg_hi, true_reg_hi/ZERO, cc
5668 can_move_conditionally = true;
5669 use_const_for_true_in = is_true_value_zero_constant;
5670 }
5671 break;
5672 case Primitive::kPrimFloat:
5673 case Primitive::kPrimDouble:
5674 // Moving float/double on float/double condition.
5675 if (is_r6) {
5676 can_move_conditionally = true;
5677 if (is_true_value_zero_constant) {
5678 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5679 use_const_for_true_in = true;
5680 } else if (is_false_value_zero_constant) {
5681 // selnez.fmt out_reg, true_reg, temp_cond_reg
5682 use_const_for_false_in = true;
5683 } else {
5684 // sel.fmt temp_cond_reg, false_reg, true_reg
5685 // mov.fmt out_reg, temp_cond_reg
5686 }
5687 } else {
5688 // movt.fmt out_reg, true_reg, cc
5689 can_move_conditionally = true;
5690 }
5691 break;
5692 }
5693 break;
5694 }
5695 }
5696
5697 if (can_move_conditionally) {
5698 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
5699 } else {
5700 DCHECK(!use_const_for_false_in);
5701 DCHECK(!use_const_for_true_in);
5702 }
5703
5704 if (locations_to_set != nullptr) {
5705 if (use_const_for_false_in) {
5706 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
5707 } else {
5708 locations_to_set->SetInAt(0,
5709 Primitive::IsFloatingPointType(dst_type)
5710 ? Location::RequiresFpuRegister()
5711 : Location::RequiresRegister());
5712 }
5713 if (use_const_for_true_in) {
5714 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
5715 } else {
5716 locations_to_set->SetInAt(1,
5717 Primitive::IsFloatingPointType(dst_type)
5718 ? Location::RequiresFpuRegister()
5719 : Location::RequiresRegister());
5720 }
5721 if (materialized) {
5722 locations_to_set->SetInAt(2, Location::RequiresRegister());
5723 }
5724 // On R6 we don't require the output to be the same as the
5725 // first input for conditional moves unlike on R2.
5726 bool is_out_same_as_first_in = !can_move_conditionally || !is_r6;
5727 if (is_out_same_as_first_in) {
5728 locations_to_set->SetOut(Location::SameAsFirstInput());
5729 } else {
5730 locations_to_set->SetOut(Primitive::IsFloatingPointType(dst_type)
5731 ? Location::RequiresFpuRegister()
5732 : Location::RequiresRegister());
5733 }
5734 }
5735
5736 return can_move_conditionally;
5737}
5738
5739void InstructionCodeGeneratorMIPS::GenConditionalMoveR2(HSelect* select) {
5740 LocationSummary* locations = select->GetLocations();
5741 Location dst = locations->Out();
5742 Location src = locations->InAt(1);
5743 Register src_reg = ZERO;
5744 Register src_reg_high = ZERO;
5745 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5746 Register cond_reg = TMP;
5747 int cond_cc = 0;
5748 Primitive::Type cond_type = Primitive::kPrimInt;
5749 bool cond_inverted = false;
5750 Primitive::Type dst_type = select->GetType();
5751
5752 if (IsBooleanValueOrMaterializedCondition(cond)) {
5753 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5754 } else {
5755 HCondition* condition = cond->AsCondition();
5756 LocationSummary* cond_locations = cond->GetLocations();
5757 IfCondition if_cond = condition->GetCondition();
5758 cond_type = condition->InputAt(0)->GetType();
5759 switch (cond_type) {
5760 default:
5761 DCHECK_NE(cond_type, Primitive::kPrimLong);
5762 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5763 break;
5764 case Primitive::kPrimFloat:
5765 case Primitive::kPrimDouble:
5766 cond_inverted = MaterializeFpCompareR2(if_cond,
5767 condition->IsGtBias(),
5768 cond_type,
5769 cond_locations,
5770 cond_cc);
5771 break;
5772 }
5773 }
5774
5775 DCHECK(dst.Equals(locations->InAt(0)));
5776 if (src.IsRegister()) {
5777 src_reg = src.AsRegister<Register>();
5778 } else if (src.IsRegisterPair()) {
5779 src_reg = src.AsRegisterPairLow<Register>();
5780 src_reg_high = src.AsRegisterPairHigh<Register>();
5781 } else if (src.IsConstant()) {
5782 DCHECK(src.GetConstant()->IsZeroBitPattern());
5783 }
5784
5785 switch (cond_type) {
5786 default:
5787 switch (dst_type) {
5788 default:
5789 if (cond_inverted) {
5790 __ Movz(dst.AsRegister<Register>(), src_reg, cond_reg);
5791 } else {
5792 __ Movn(dst.AsRegister<Register>(), src_reg, cond_reg);
5793 }
5794 break;
5795 case Primitive::kPrimLong:
5796 if (cond_inverted) {
5797 __ Movz(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5798 __ Movz(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5799 } else {
5800 __ Movn(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5801 __ Movn(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5802 }
5803 break;
5804 case Primitive::kPrimFloat:
5805 if (cond_inverted) {
5806 __ MovzS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5807 } else {
5808 __ MovnS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5809 }
5810 break;
5811 case Primitive::kPrimDouble:
5812 if (cond_inverted) {
5813 __ MovzD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5814 } else {
5815 __ MovnD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5816 }
5817 break;
5818 }
5819 break;
5820 case Primitive::kPrimLong:
5821 LOG(FATAL) << "Unreachable";
5822 UNREACHABLE();
5823 case Primitive::kPrimFloat:
5824 case Primitive::kPrimDouble:
5825 switch (dst_type) {
5826 default:
5827 if (cond_inverted) {
5828 __ Movf(dst.AsRegister<Register>(), src_reg, cond_cc);
5829 } else {
5830 __ Movt(dst.AsRegister<Register>(), src_reg, cond_cc);
5831 }
5832 break;
5833 case Primitive::kPrimLong:
5834 if (cond_inverted) {
5835 __ Movf(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5836 __ Movf(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5837 } else {
5838 __ Movt(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5839 __ Movt(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5840 }
5841 break;
5842 case Primitive::kPrimFloat:
5843 if (cond_inverted) {
5844 __ MovfS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5845 } else {
5846 __ MovtS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5847 }
5848 break;
5849 case Primitive::kPrimDouble:
5850 if (cond_inverted) {
5851 __ MovfD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5852 } else {
5853 __ MovtD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5854 }
5855 break;
5856 }
5857 break;
5858 }
5859}
5860
5861void InstructionCodeGeneratorMIPS::GenConditionalMoveR6(HSelect* select) {
5862 LocationSummary* locations = select->GetLocations();
5863 Location dst = locations->Out();
5864 Location false_src = locations->InAt(0);
5865 Location true_src = locations->InAt(1);
5866 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5867 Register cond_reg = TMP;
5868 FRegister fcond_reg = FTMP;
5869 Primitive::Type cond_type = Primitive::kPrimInt;
5870 bool cond_inverted = false;
5871 Primitive::Type dst_type = select->GetType();
5872
5873 if (IsBooleanValueOrMaterializedCondition(cond)) {
5874 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5875 } else {
5876 HCondition* condition = cond->AsCondition();
5877 LocationSummary* cond_locations = cond->GetLocations();
5878 IfCondition if_cond = condition->GetCondition();
5879 cond_type = condition->InputAt(0)->GetType();
5880 switch (cond_type) {
5881 default:
5882 DCHECK_NE(cond_type, Primitive::kPrimLong);
5883 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5884 break;
5885 case Primitive::kPrimFloat:
5886 case Primitive::kPrimDouble:
5887 cond_inverted = MaterializeFpCompareR6(if_cond,
5888 condition->IsGtBias(),
5889 cond_type,
5890 cond_locations,
5891 fcond_reg);
5892 break;
5893 }
5894 }
5895
5896 if (true_src.IsConstant()) {
5897 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
5898 }
5899 if (false_src.IsConstant()) {
5900 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
5901 }
5902
5903 switch (dst_type) {
5904 default:
5905 if (Primitive::IsFloatingPointType(cond_type)) {
5906 __ Mfc1(cond_reg, fcond_reg);
5907 }
5908 if (true_src.IsConstant()) {
5909 if (cond_inverted) {
5910 __ Selnez(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5911 } else {
5912 __ Seleqz(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5913 }
5914 } else if (false_src.IsConstant()) {
5915 if (cond_inverted) {
5916 __ Seleqz(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5917 } else {
5918 __ Selnez(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5919 }
5920 } else {
5921 DCHECK_NE(cond_reg, AT);
5922 if (cond_inverted) {
5923 __ Seleqz(AT, true_src.AsRegister<Register>(), cond_reg);
5924 __ Selnez(TMP, false_src.AsRegister<Register>(), cond_reg);
5925 } else {
5926 __ Selnez(AT, true_src.AsRegister<Register>(), cond_reg);
5927 __ Seleqz(TMP, false_src.AsRegister<Register>(), cond_reg);
5928 }
5929 __ Or(dst.AsRegister<Register>(), AT, TMP);
5930 }
5931 break;
5932 case Primitive::kPrimLong: {
5933 if (Primitive::IsFloatingPointType(cond_type)) {
5934 __ Mfc1(cond_reg, fcond_reg);
5935 }
5936 Register dst_lo = dst.AsRegisterPairLow<Register>();
5937 Register dst_hi = dst.AsRegisterPairHigh<Register>();
5938 if (true_src.IsConstant()) {
5939 Register src_lo = false_src.AsRegisterPairLow<Register>();
5940 Register src_hi = false_src.AsRegisterPairHigh<Register>();
5941 if (cond_inverted) {
5942 __ Selnez(dst_lo, src_lo, cond_reg);
5943 __ Selnez(dst_hi, src_hi, cond_reg);
5944 } else {
5945 __ Seleqz(dst_lo, src_lo, cond_reg);
5946 __ Seleqz(dst_hi, src_hi, cond_reg);
5947 }
5948 } else {
5949 DCHECK(false_src.IsConstant());
5950 Register src_lo = true_src.AsRegisterPairLow<Register>();
5951 Register src_hi = true_src.AsRegisterPairHigh<Register>();
5952 if (cond_inverted) {
5953 __ Seleqz(dst_lo, src_lo, cond_reg);
5954 __ Seleqz(dst_hi, src_hi, cond_reg);
5955 } else {
5956 __ Selnez(dst_lo, src_lo, cond_reg);
5957 __ Selnez(dst_hi, src_hi, cond_reg);
5958 }
5959 }
5960 break;
5961 }
5962 case Primitive::kPrimFloat: {
5963 if (!Primitive::IsFloatingPointType(cond_type)) {
5964 // sel*.fmt tests bit 0 of the condition register, account for that.
5965 __ Sltu(TMP, ZERO, cond_reg);
5966 __ Mtc1(TMP, fcond_reg);
5967 }
5968 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
5969 if (true_src.IsConstant()) {
5970 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
5971 if (cond_inverted) {
5972 __ SelnezS(dst_reg, src_reg, fcond_reg);
5973 } else {
5974 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5975 }
5976 } else if (false_src.IsConstant()) {
5977 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
5978 if (cond_inverted) {
5979 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5980 } else {
5981 __ SelnezS(dst_reg, src_reg, fcond_reg);
5982 }
5983 } else {
5984 if (cond_inverted) {
5985 __ SelS(fcond_reg,
5986 true_src.AsFpuRegister<FRegister>(),
5987 false_src.AsFpuRegister<FRegister>());
5988 } else {
5989 __ SelS(fcond_reg,
5990 false_src.AsFpuRegister<FRegister>(),
5991 true_src.AsFpuRegister<FRegister>());
5992 }
5993 __ MovS(dst_reg, fcond_reg);
5994 }
5995 break;
5996 }
5997 case Primitive::kPrimDouble: {
5998 if (!Primitive::IsFloatingPointType(cond_type)) {
5999 // sel*.fmt tests bit 0 of the condition register, account for that.
6000 __ Sltu(TMP, ZERO, cond_reg);
6001 __ Mtc1(TMP, fcond_reg);
6002 }
6003 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
6004 if (true_src.IsConstant()) {
6005 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
6006 if (cond_inverted) {
6007 __ SelnezD(dst_reg, src_reg, fcond_reg);
6008 } else {
6009 __ SeleqzD(dst_reg, src_reg, fcond_reg);
6010 }
6011 } else if (false_src.IsConstant()) {
6012 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
6013 if (cond_inverted) {
6014 __ SeleqzD(dst_reg, src_reg, fcond_reg);
6015 } else {
6016 __ SelnezD(dst_reg, src_reg, fcond_reg);
6017 }
6018 } else {
6019 if (cond_inverted) {
6020 __ SelD(fcond_reg,
6021 true_src.AsFpuRegister<FRegister>(),
6022 false_src.AsFpuRegister<FRegister>());
6023 } else {
6024 __ SelD(fcond_reg,
6025 false_src.AsFpuRegister<FRegister>(),
6026 true_src.AsFpuRegister<FRegister>());
6027 }
6028 __ MovD(dst_reg, fcond_reg);
6029 }
6030 break;
6031 }
6032 }
6033}
6034
Goran Jakovljevicc6418422016-12-05 16:31:55 +01006035void LocationsBuilderMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
6036 LocationSummary* locations = new (GetGraph()->GetArena())
6037 LocationSummary(flag, LocationSummary::kNoCall);
6038 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07006039}
6040
Goran Jakovljevicc6418422016-12-05 16:31:55 +01006041void InstructionCodeGeneratorMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
6042 __ LoadFromOffset(kLoadWord,
6043 flag->GetLocations()->Out().AsRegister<Register>(),
6044 SP,
6045 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07006046}
6047
David Brazdil74eb1b22015-12-14 11:44:01 +00006048void LocationsBuilderMIPS::VisitSelect(HSelect* select) {
6049 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexey Frunze674b9ee2016-09-20 14:54:15 -07006050 CanMoveConditionally(select, codegen_->GetInstructionSetFeatures().IsR6(), locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00006051}
6052
6053void InstructionCodeGeneratorMIPS::VisitSelect(HSelect* select) {
Alexey Frunze674b9ee2016-09-20 14:54:15 -07006054 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
6055 if (CanMoveConditionally(select, is_r6, /* locations_to_set */ nullptr)) {
6056 if (is_r6) {
6057 GenConditionalMoveR6(select);
6058 } else {
6059 GenConditionalMoveR2(select);
6060 }
6061 } else {
6062 LocationSummary* locations = select->GetLocations();
6063 MipsLabel false_target;
6064 GenerateTestAndBranch(select,
6065 /* condition_input_index */ 2,
6066 /* true_target */ nullptr,
6067 &false_target);
6068 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
6069 __ Bind(&false_target);
6070 }
David Brazdil74eb1b22015-12-14 11:44:01 +00006071}
6072
David Srbecky0cf44932015-12-09 14:09:59 +00006073void LocationsBuilderMIPS::VisitNativeDebugInfo(HNativeDebugInfo* info) {
6074 new (GetGraph()->GetArena()) LocationSummary(info);
6075}
6076
David Srbeckyd28f4a02016-03-14 17:14:24 +00006077void InstructionCodeGeneratorMIPS::VisitNativeDebugInfo(HNativeDebugInfo*) {
6078 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00006079}
6080
6081void CodeGeneratorMIPS::GenerateNop() {
6082 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00006083}
6084
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006085void LocationsBuilderMIPS::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
6086 Primitive::Type field_type = field_info.GetFieldType();
6087 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6088 bool generate_volatile = field_info.IsVolatile() && is_wide;
Alexey Frunze15958152017-02-09 19:08:30 -08006089 bool object_field_get_with_read_barrier =
6090 kEmitCompilerReadBarrier && (field_type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006091 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08006092 instruction,
6093 generate_volatile
6094 ? LocationSummary::kCallOnMainOnly
6095 : (object_field_get_with_read_barrier
6096 ? LocationSummary::kCallOnSlowPath
6097 : LocationSummary::kNoCall));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006098
Alexey Frunzec61c0762017-04-10 13:54:23 -07006099 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6100 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6101 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006102 locations->SetInAt(0, Location::RequiresRegister());
6103 if (generate_volatile) {
6104 InvokeRuntimeCallingConvention calling_convention;
6105 // need A0 to hold base + offset
6106 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6107 if (field_type == Primitive::kPrimLong) {
6108 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimLong));
6109 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006110 // Use Location::Any() to prevent situations when running out of available fp registers.
6111 locations->SetOut(Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006112 // Need some temp core regs since FP results are returned in core registers
6113 Location reg = calling_convention.GetReturnLocation(Primitive::kPrimLong);
6114 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairLow<Register>()));
6115 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairHigh<Register>()));
6116 }
6117 } else {
6118 if (Primitive::IsFloatingPointType(instruction->GetType())) {
6119 locations->SetOut(Location::RequiresFpuRegister());
6120 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006121 // The output overlaps in the case of an object field get with
6122 // read barriers enabled: we do not want the move to overwrite the
6123 // object's location, as we need it to emit the read barrier.
6124 locations->SetOut(Location::RequiresRegister(),
6125 object_field_get_with_read_barrier
6126 ? Location::kOutputOverlap
6127 : Location::kNoOutputOverlap);
6128 }
6129 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6130 // We need a temporary register for the read barrier marking slow
6131 // path in CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006132 if (!kBakerReadBarrierThunksEnableForFields) {
6133 locations->AddTemp(Location::RequiresRegister());
6134 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006135 }
6136 }
6137}
6138
6139void InstructionCodeGeneratorMIPS::HandleFieldGet(HInstruction* instruction,
6140 const FieldInfo& field_info,
6141 uint32_t dex_pc) {
6142 Primitive::Type type = field_info.GetFieldType();
6143 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08006144 Location obj_loc = locations->InAt(0);
6145 Register obj = obj_loc.AsRegister<Register>();
6146 Location dst_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006147 LoadOperandType load_type = kLoadUnsignedByte;
6148 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006149 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006150 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006151
6152 switch (type) {
6153 case Primitive::kPrimBoolean:
6154 load_type = kLoadUnsignedByte;
6155 break;
6156 case Primitive::kPrimByte:
6157 load_type = kLoadSignedByte;
6158 break;
6159 case Primitive::kPrimShort:
6160 load_type = kLoadSignedHalfword;
6161 break;
6162 case Primitive::kPrimChar:
6163 load_type = kLoadUnsignedHalfword;
6164 break;
6165 case Primitive::kPrimInt:
6166 case Primitive::kPrimFloat:
6167 case Primitive::kPrimNot:
6168 load_type = kLoadWord;
6169 break;
6170 case Primitive::kPrimLong:
6171 case Primitive::kPrimDouble:
6172 load_type = kLoadDoubleword;
6173 break;
6174 case Primitive::kPrimVoid:
6175 LOG(FATAL) << "Unreachable type " << type;
6176 UNREACHABLE();
6177 }
6178
6179 if (is_volatile && load_type == kLoadDoubleword) {
6180 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006181 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006182 // Do implicit Null check
6183 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6184 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
Serban Constantinescufca16662016-07-14 09:21:59 +01006185 codegen_->InvokeRuntime(kQuickA64Load, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006186 CheckEntrypointTypes<kQuickA64Load, int64_t, volatile const int64_t*>();
6187 if (type == Primitive::kPrimDouble) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006188 // FP results are returned in core registers. Need to move them.
Alexey Frunze15958152017-02-09 19:08:30 -08006189 if (dst_loc.IsFpuRegister()) {
6190 __ Mtc1(locations->GetTemp(1).AsRegister<Register>(), dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006191 __ MoveToFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunze15958152017-02-09 19:08:30 -08006192 dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006193 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006194 DCHECK(dst_loc.IsDoubleStackSlot());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006195 __ StoreToOffset(kStoreWord,
6196 locations->GetTemp(1).AsRegister<Register>(),
6197 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006198 dst_loc.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006199 __ StoreToOffset(kStoreWord,
6200 locations->GetTemp(2).AsRegister<Register>(),
6201 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006202 dst_loc.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006203 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006204 }
6205 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006206 if (type == Primitive::kPrimNot) {
6207 // /* HeapReference<Object> */ dst = *(obj + offset)
6208 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006209 Location temp_loc =
6210 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08006211 // Note that a potential implicit null check is handled in this
6212 // CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier call.
6213 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6214 dst_loc,
6215 obj,
6216 offset,
6217 temp_loc,
6218 /* needs_null_check */ true);
6219 if (is_volatile) {
6220 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6221 }
6222 } else {
6223 __ LoadFromOffset(kLoadWord, dst_loc.AsRegister<Register>(), obj, offset, null_checker);
6224 if (is_volatile) {
6225 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6226 }
6227 // If read barriers are enabled, emit read barriers other than
6228 // Baker's using a slow path (and also unpoison the loaded
6229 // reference, if heap poisoning is enabled).
6230 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
6231 }
6232 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006233 Register dst;
6234 if (type == Primitive::kPrimLong) {
Alexey Frunze15958152017-02-09 19:08:30 -08006235 DCHECK(dst_loc.IsRegisterPair());
6236 dst = dst_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006237 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006238 DCHECK(dst_loc.IsRegister());
6239 dst = dst_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006240 }
Alexey Frunze2923db72016-08-20 01:55:47 -07006241 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006242 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006243 DCHECK(dst_loc.IsFpuRegister());
6244 FRegister dst = dst_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006245 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006246 __ LoadSFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006247 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006248 __ LoadDFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006249 }
6250 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006251 }
6252
Alexey Frunze15958152017-02-09 19:08:30 -08006253 // Memory barriers, in the case of references, are handled in the
6254 // previous switch statement.
6255 if (is_volatile && (type != Primitive::kPrimNot)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006256 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6257 }
6258}
6259
6260void LocationsBuilderMIPS::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
6261 Primitive::Type field_type = field_info.GetFieldType();
6262 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6263 bool generate_volatile = field_info.IsVolatile() && is_wide;
6264 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006265 instruction, generate_volatile ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006266
6267 locations->SetInAt(0, Location::RequiresRegister());
6268 if (generate_volatile) {
6269 InvokeRuntimeCallingConvention calling_convention;
6270 // need A0 to hold base + offset
6271 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6272 if (field_type == Primitive::kPrimLong) {
6273 locations->SetInAt(1, Location::RegisterPairLocation(
6274 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
6275 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006276 // Use Location::Any() to prevent situations when running out of available fp registers.
6277 locations->SetInAt(1, Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006278 // Pass FP parameters in core registers.
6279 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
6280 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
6281 }
6282 } else {
6283 if (Primitive::IsFloatingPointType(field_type)) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006284 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006285 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006286 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006287 }
6288 }
6289}
6290
6291void InstructionCodeGeneratorMIPS::HandleFieldSet(HInstruction* instruction,
6292 const FieldInfo& field_info,
Goran Jakovljevice114da22016-12-26 14:21:43 +01006293 uint32_t dex_pc,
6294 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006295 Primitive::Type type = field_info.GetFieldType();
6296 LocationSummary* locations = instruction->GetLocations();
6297 Register obj = locations->InAt(0).AsRegister<Register>();
Alexey Frunzef58b2482016-09-02 22:14:06 -07006298 Location value_location = locations->InAt(1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006299 StoreOperandType store_type = kStoreByte;
6300 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006301 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Alexey Frunzec061de12017-02-14 13:27:23 -08006302 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006303 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006304
6305 switch (type) {
6306 case Primitive::kPrimBoolean:
6307 case Primitive::kPrimByte:
6308 store_type = kStoreByte;
6309 break;
6310 case Primitive::kPrimShort:
6311 case Primitive::kPrimChar:
6312 store_type = kStoreHalfword;
6313 break;
6314 case Primitive::kPrimInt:
6315 case Primitive::kPrimFloat:
6316 case Primitive::kPrimNot:
6317 store_type = kStoreWord;
6318 break;
6319 case Primitive::kPrimLong:
6320 case Primitive::kPrimDouble:
6321 store_type = kStoreDoubleword;
6322 break;
6323 case Primitive::kPrimVoid:
6324 LOG(FATAL) << "Unreachable type " << type;
6325 UNREACHABLE();
6326 }
6327
6328 if (is_volatile) {
6329 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
6330 }
6331
6332 if (is_volatile && store_type == kStoreDoubleword) {
6333 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006334 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006335 // Do implicit Null check.
6336 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6337 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6338 if (type == Primitive::kPrimDouble) {
6339 // Pass FP parameters in core registers.
Alexey Frunzef58b2482016-09-02 22:14:06 -07006340 if (value_location.IsFpuRegister()) {
6341 __ Mfc1(locations->GetTemp(1).AsRegister<Register>(),
6342 value_location.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006343 __ MoveFromFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunzef58b2482016-09-02 22:14:06 -07006344 value_location.AsFpuRegister<FRegister>());
6345 } else if (value_location.IsDoubleStackSlot()) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006346 __ LoadFromOffset(kLoadWord,
6347 locations->GetTemp(1).AsRegister<Register>(),
6348 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006349 value_location.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006350 __ LoadFromOffset(kLoadWord,
6351 locations->GetTemp(2).AsRegister<Register>(),
6352 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006353 value_location.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006354 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006355 DCHECK(value_location.IsConstant());
6356 DCHECK(value_location.GetConstant()->IsDoubleConstant());
6357 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006358 __ LoadConst64(locations->GetTemp(2).AsRegister<Register>(),
6359 locations->GetTemp(1).AsRegister<Register>(),
6360 value);
6361 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006362 }
Serban Constantinescufca16662016-07-14 09:21:59 +01006363 codegen_->InvokeRuntime(kQuickA64Store, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006364 CheckEntrypointTypes<kQuickA64Store, void, volatile int64_t *, int64_t>();
6365 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006366 if (value_location.IsConstant()) {
6367 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
6368 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
6369 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006370 Register src;
6371 if (type == Primitive::kPrimLong) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006372 src = value_location.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006373 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006374 src = value_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006375 }
Alexey Frunzec061de12017-02-14 13:27:23 -08006376 if (kPoisonHeapReferences && needs_write_barrier) {
6377 // Note that in the case where `value` is a null reference,
6378 // we do not enter this block, as a null reference does not
6379 // need poisoning.
6380 DCHECK_EQ(type, Primitive::kPrimNot);
6381 __ PoisonHeapReference(TMP, src);
6382 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
6383 } else {
6384 __ StoreToOffset(store_type, src, obj, offset, null_checker);
6385 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006386 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006387 FRegister src = value_location.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006388 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006389 __ StoreSToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006390 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006391 __ StoreDToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006392 }
6393 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006394 }
6395
Alexey Frunzec061de12017-02-14 13:27:23 -08006396 if (needs_write_barrier) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006397 Register src = value_location.AsRegister<Register>();
Goran Jakovljevice114da22016-12-26 14:21:43 +01006398 codegen_->MarkGCCard(obj, src, value_can_be_null);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006399 }
6400
6401 if (is_volatile) {
6402 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
6403 }
6404}
6405
6406void LocationsBuilderMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6407 HandleFieldGet(instruction, instruction->GetFieldInfo());
6408}
6409
6410void InstructionCodeGeneratorMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6411 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
6412}
6413
6414void LocationsBuilderMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
6415 HandleFieldSet(instruction, instruction->GetFieldInfo());
6416}
6417
6418void InstructionCodeGeneratorMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01006419 HandleFieldSet(instruction,
6420 instruction->GetFieldInfo(),
6421 instruction->GetDexPc(),
6422 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006423}
6424
Alexey Frunze15958152017-02-09 19:08:30 -08006425void InstructionCodeGeneratorMIPS::GenerateReferenceLoadOneRegister(
6426 HInstruction* instruction,
6427 Location out,
6428 uint32_t offset,
6429 Location maybe_temp,
6430 ReadBarrierOption read_barrier_option) {
6431 Register out_reg = out.AsRegister<Register>();
6432 if (read_barrier_option == kWithReadBarrier) {
6433 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006434 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
6435 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6436 }
Alexey Frunze15958152017-02-09 19:08:30 -08006437 if (kUseBakerReadBarrier) {
6438 // Load with fast path based Baker's read barrier.
6439 // /* HeapReference<Object> */ out = *(out + offset)
6440 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6441 out,
6442 out_reg,
6443 offset,
6444 maybe_temp,
6445 /* needs_null_check */ false);
6446 } else {
6447 // Load with slow path based read barrier.
6448 // Save the value of `out` into `maybe_temp` before overwriting it
6449 // in the following move operation, as we will need it for the
6450 // read barrier below.
6451 __ Move(maybe_temp.AsRegister<Register>(), out_reg);
6452 // /* HeapReference<Object> */ out = *(out + offset)
6453 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6454 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6455 }
6456 } else {
6457 // Plain load with no read barrier.
6458 // /* HeapReference<Object> */ out = *(out + offset)
6459 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6460 __ MaybeUnpoisonHeapReference(out_reg);
6461 }
6462}
6463
6464void InstructionCodeGeneratorMIPS::GenerateReferenceLoadTwoRegisters(
6465 HInstruction* instruction,
6466 Location out,
6467 Location obj,
6468 uint32_t offset,
6469 Location maybe_temp,
6470 ReadBarrierOption read_barrier_option) {
6471 Register out_reg = out.AsRegister<Register>();
6472 Register obj_reg = obj.AsRegister<Register>();
6473 if (read_barrier_option == kWithReadBarrier) {
6474 CHECK(kEmitCompilerReadBarrier);
6475 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006476 if (!kBakerReadBarrierThunksEnableForFields) {
6477 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6478 }
Alexey Frunze15958152017-02-09 19:08:30 -08006479 // Load with fast path based Baker's read barrier.
6480 // /* HeapReference<Object> */ out = *(obj + offset)
6481 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6482 out,
6483 obj_reg,
6484 offset,
6485 maybe_temp,
6486 /* needs_null_check */ false);
6487 } else {
6488 // Load with slow path based read barrier.
6489 // /* HeapReference<Object> */ out = *(obj + offset)
6490 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6491 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6492 }
6493 } else {
6494 // Plain load with no read barrier.
6495 // /* HeapReference<Object> */ out = *(obj + offset)
6496 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6497 __ MaybeUnpoisonHeapReference(out_reg);
6498 }
6499}
6500
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006501static inline int GetBakerMarkThunkNumber(Register reg) {
6502 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 21, "Expecting equal");
6503 if (reg >= V0 && reg <= T7) { // 14 consequtive regs.
6504 return reg - V0;
6505 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
6506 return 14 + (reg - S2);
6507 } else if (reg == FP) { // One more.
6508 return 20;
6509 }
6510 LOG(FATAL) << "Unexpected register " << reg;
6511 UNREACHABLE();
6512}
6513
6514static inline int GetBakerMarkFieldArrayThunkDisplacement(Register reg, bool short_offset) {
6515 int num = GetBakerMarkThunkNumber(reg) +
6516 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
6517 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
6518}
6519
6520static inline int GetBakerMarkGcRootThunkDisplacement(Register reg) {
6521 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
6522 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
6523}
6524
Alexey Frunze15958152017-02-09 19:08:30 -08006525void InstructionCodeGeneratorMIPS::GenerateGcRootFieldLoad(HInstruction* instruction,
6526 Location root,
6527 Register obj,
6528 uint32_t offset,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006529 ReadBarrierOption read_barrier_option,
6530 MipsLabel* label_low) {
6531 bool reordering;
6532 if (label_low != nullptr) {
6533 DCHECK_EQ(offset, 0x5678u);
6534 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006535 Register root_reg = root.AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08006536 if (read_barrier_option == kWithReadBarrier) {
6537 DCHECK(kEmitCompilerReadBarrier);
6538 if (kUseBakerReadBarrier) {
6539 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6540 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006541 if (kBakerReadBarrierThunksEnableForGcRoots) {
6542 // Note that we do not actually check the value of `GetIsGcMarking()`
6543 // to decide whether to mark the loaded GC root or not. Instead, we
6544 // load into `temp` (T9) the read barrier mark introspection entrypoint.
6545 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
6546 // vice versa.
6547 //
6548 // We use thunks for the slow path. That thunk checks the reference
6549 // and jumps to the entrypoint if needed.
6550 //
6551 // temp = Thread::Current()->pReadBarrierMarkReg00
6552 // // AKA &art_quick_read_barrier_mark_introspection.
6553 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
6554 // if (temp != nullptr) {
6555 // temp = &gc_root_thunk<root_reg>
6556 // root = temp(root)
6557 // }
Alexey Frunze15958152017-02-09 19:08:30 -08006558
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006559 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
6560 const int32_t entry_point_offset =
6561 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(0);
6562 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
6563 int16_t offset_low = Low16Bits(offset);
6564 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
6565 // extension in lw.
6566 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
6567 Register base = short_offset ? obj : TMP;
6568 // Loading the entrypoint does not require a load acquire since it is only changed when
6569 // threads are suspended or running a checkpoint.
6570 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
6571 reordering = __ SetReorder(false);
6572 if (!short_offset) {
6573 DCHECK(!label_low);
6574 __ AddUpper(base, obj, offset_high);
6575 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07006576 MipsLabel skip_call;
6577 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006578 if (label_low != nullptr) {
6579 DCHECK(short_offset);
6580 __ Bind(label_low);
6581 }
6582 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6583 __ LoadFromOffset(kLoadWord, root_reg, base, offset_low); // Single instruction
6584 // in delay slot.
6585 if (isR6) {
6586 __ Jialc(T9, thunk_disp);
6587 } else {
6588 __ Addiu(T9, T9, thunk_disp);
6589 __ Jalr(T9);
6590 __ Nop();
6591 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07006592 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006593 __ SetReorder(reordering);
6594 } else {
6595 // Note that we do not actually check the value of `GetIsGcMarking()`
6596 // to decide whether to mark the loaded GC root or not. Instead, we
6597 // load into `temp` (T9) the read barrier mark entry point corresponding
6598 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
6599 // is false, and vice versa.
6600 //
6601 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
6602 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6603 // if (temp != null) {
6604 // root = temp(root)
6605 // }
Alexey Frunze15958152017-02-09 19:08:30 -08006606
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006607 if (label_low != nullptr) {
6608 reordering = __ SetReorder(false);
6609 __ Bind(label_low);
6610 }
6611 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6612 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6613 if (label_low != nullptr) {
6614 __ SetReorder(reordering);
6615 }
6616 static_assert(
6617 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6618 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6619 "have different sizes.");
6620 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6621 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6622 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08006623
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006624 // Slow path marking the GC root `root`.
6625 Location temp = Location::RegisterLocation(T9);
6626 SlowPathCodeMIPS* slow_path =
6627 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(
6628 instruction,
6629 root,
6630 /*entrypoint*/ temp);
6631 codegen_->AddSlowPath(slow_path);
6632
6633 const int32_t entry_point_offset =
6634 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(root.reg() - 1);
6635 // Loading the entrypoint does not require a load acquire since it is only changed when
6636 // threads are suspended or running a checkpoint.
6637 __ LoadFromOffset(kLoadWord, temp.AsRegister<Register>(), TR, entry_point_offset);
6638 __ Bnez(temp.AsRegister<Register>(), slow_path->GetEntryLabel());
6639 __ Bind(slow_path->GetExitLabel());
6640 }
Alexey Frunze15958152017-02-09 19:08:30 -08006641 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006642 if (label_low != nullptr) {
6643 reordering = __ SetReorder(false);
6644 __ Bind(label_low);
6645 }
Alexey Frunze15958152017-02-09 19:08:30 -08006646 // GC root loaded through a slow path for read barriers other
6647 // than Baker's.
6648 // /* GcRoot<mirror::Object>* */ root = obj + offset
6649 __ Addiu32(root_reg, obj, offset);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006650 if (label_low != nullptr) {
6651 __ SetReorder(reordering);
6652 }
Alexey Frunze15958152017-02-09 19:08:30 -08006653 // /* mirror::Object* */ root = root->Read()
6654 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6655 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006656 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006657 if (label_low != nullptr) {
6658 reordering = __ SetReorder(false);
6659 __ Bind(label_low);
6660 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006661 // Plain GC root load with no read barrier.
6662 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6663 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6664 // Note that GC roots are not affected by heap poisoning, thus we
6665 // do not have to unpoison `root_reg` here.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006666 if (label_low != nullptr) {
6667 __ SetReorder(reordering);
6668 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006669 }
6670}
6671
Alexey Frunze15958152017-02-09 19:08:30 -08006672void CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6673 Location ref,
6674 Register obj,
6675 uint32_t offset,
6676 Location temp,
6677 bool needs_null_check) {
6678 DCHECK(kEmitCompilerReadBarrier);
6679 DCHECK(kUseBakerReadBarrier);
6680
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006681 if (kBakerReadBarrierThunksEnableForFields) {
6682 // Note that we do not actually check the value of `GetIsGcMarking()`
6683 // to decide whether to mark the loaded reference or not. Instead, we
6684 // load into `temp` (T9) the read barrier mark introspection entrypoint.
6685 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
6686 // vice versa.
6687 //
6688 // We use thunks for the slow path. That thunk checks the reference
6689 // and jumps to the entrypoint if needed. If the holder is not gray,
6690 // it issues a load-load memory barrier and returns to the original
6691 // reference load.
6692 //
6693 // temp = Thread::Current()->pReadBarrierMarkReg00
6694 // // AKA &art_quick_read_barrier_mark_introspection.
6695 // if (temp != nullptr) {
6696 // temp = &field_array_thunk<holder_reg>
6697 // temp()
6698 // }
6699 // not_gray_return_address:
6700 // // If the offset is too large to fit into the lw instruction, we
6701 // // use an adjusted base register (TMP) here. This register
6702 // // receives bits 16 ... 31 of the offset before the thunk invocation
6703 // // and the thunk benefits from it.
6704 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
6705 // gray_return_address:
6706
6707 DCHECK(temp.IsInvalid());
6708 bool isR6 = GetInstructionSetFeatures().IsR6();
6709 int16_t offset_low = Low16Bits(offset);
6710 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lw.
6711 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
6712 bool reordering = __ SetReorder(false);
6713 const int32_t entry_point_offset =
6714 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(0);
6715 // There may have or may have not been a null check if the field offset is smaller than
6716 // the page size.
6717 // There must've been a null check in case it's actually a load from an array.
6718 // We will, however, perform an explicit null check in the thunk as it's easier to
6719 // do it than not.
6720 if (instruction->IsArrayGet()) {
6721 DCHECK(!needs_null_check);
6722 }
6723 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
6724 // Loading the entrypoint does not require a load acquire since it is only changed when
6725 // threads are suspended or running a checkpoint.
6726 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
6727 Register ref_reg = ref.AsRegister<Register>();
6728 Register base = short_offset ? obj : TMP;
Alexey Frunze0cab6562017-07-25 15:19:36 -07006729 MipsLabel skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006730 if (short_offset) {
6731 if (isR6) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07006732 __ Beqzc(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006733 __ Nop(); // In forbidden slot.
6734 __ Jialc(T9, thunk_disp);
6735 } else {
Alexey Frunze0cab6562017-07-25 15:19:36 -07006736 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006737 __ Addiu(T9, T9, thunk_disp); // In delay slot.
6738 __ Jalr(T9);
6739 __ Nop(); // In delay slot.
6740 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07006741 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006742 } else {
6743 if (isR6) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07006744 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006745 __ Aui(base, obj, offset_high); // In delay slot.
6746 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006747 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006748 } else {
6749 __ Lui(base, offset_high);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006750 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006751 __ Addiu(T9, T9, thunk_disp); // In delay slot.
6752 __ Jalr(T9);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006753 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006754 __ Addu(base, base, obj); // In delay slot.
6755 }
6756 }
6757 // /* HeapReference<Object> */ ref = *(obj + offset)
6758 __ LoadFromOffset(kLoadWord, ref_reg, base, offset_low); // Single instruction.
6759 if (needs_null_check) {
6760 MaybeRecordImplicitNullCheck(instruction);
6761 }
6762 __ MaybeUnpoisonHeapReference(ref_reg);
6763 __ SetReorder(reordering);
6764 return;
6765 }
6766
Alexey Frunze15958152017-02-09 19:08:30 -08006767 // /* HeapReference<Object> */ ref = *(obj + offset)
6768 Location no_index = Location::NoLocation();
6769 ScaleFactor no_scale_factor = TIMES_1;
6770 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6771 ref,
6772 obj,
6773 offset,
6774 no_index,
6775 no_scale_factor,
6776 temp,
6777 needs_null_check);
6778}
6779
6780void CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6781 Location ref,
6782 Register obj,
6783 uint32_t data_offset,
6784 Location index,
6785 Location temp,
6786 bool needs_null_check) {
6787 DCHECK(kEmitCompilerReadBarrier);
6788 DCHECK(kUseBakerReadBarrier);
6789
6790 static_assert(
6791 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6792 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006793 ScaleFactor scale_factor = TIMES_4;
6794
6795 if (kBakerReadBarrierThunksEnableForArrays) {
6796 // Note that we do not actually check the value of `GetIsGcMarking()`
6797 // to decide whether to mark the loaded reference or not. Instead, we
6798 // load into `temp` (T9) the read barrier mark introspection entrypoint.
6799 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
6800 // vice versa.
6801 //
6802 // We use thunks for the slow path. That thunk checks the reference
6803 // and jumps to the entrypoint if needed. If the holder is not gray,
6804 // it issues a load-load memory barrier and returns to the original
6805 // reference load.
6806 //
6807 // temp = Thread::Current()->pReadBarrierMarkReg00
6808 // // AKA &art_quick_read_barrier_mark_introspection.
6809 // if (temp != nullptr) {
6810 // temp = &field_array_thunk<holder_reg>
6811 // temp()
6812 // }
6813 // not_gray_return_address:
6814 // // The element address is pre-calculated in the TMP register before the
6815 // // thunk invocation and the thunk benefits from it.
6816 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
6817 // gray_return_address:
6818
6819 DCHECK(temp.IsInvalid());
6820 DCHECK(index.IsValid());
6821 bool reordering = __ SetReorder(false);
6822 const int32_t entry_point_offset =
6823 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(0);
6824 // We will not do the explicit null check in the thunk as some form of a null check
6825 // must've been done earlier.
6826 DCHECK(!needs_null_check);
6827 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
6828 // Loading the entrypoint does not require a load acquire since it is only changed when
6829 // threads are suspended or running a checkpoint.
6830 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
6831 Register ref_reg = ref.AsRegister<Register>();
6832 Register index_reg = index.IsRegisterPair()
6833 ? index.AsRegisterPairLow<Register>()
6834 : index.AsRegister<Register>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07006835 MipsLabel skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006836 if (GetInstructionSetFeatures().IsR6()) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07006837 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006838 __ Lsa(TMP, index_reg, obj, scale_factor); // In delay slot.
6839 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006840 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006841 } else {
6842 __ Sll(TMP, index_reg, scale_factor);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006843 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006844 __ Addiu(T9, T9, thunk_disp); // In delay slot.
6845 __ Jalr(T9);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006846 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006847 __ Addu(TMP, TMP, obj); // In delay slot.
6848 }
6849 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
6850 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
6851 __ LoadFromOffset(kLoadWord, ref_reg, TMP, data_offset); // Single instruction.
6852 __ MaybeUnpoisonHeapReference(ref_reg);
6853 __ SetReorder(reordering);
6854 return;
6855 }
6856
Alexey Frunze15958152017-02-09 19:08:30 -08006857 // /* HeapReference<Object> */ ref =
6858 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08006859 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6860 ref,
6861 obj,
6862 data_offset,
6863 index,
6864 scale_factor,
6865 temp,
6866 needs_null_check);
6867}
6868
6869void CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6870 Location ref,
6871 Register obj,
6872 uint32_t offset,
6873 Location index,
6874 ScaleFactor scale_factor,
6875 Location temp,
6876 bool needs_null_check,
6877 bool always_update_field) {
6878 DCHECK(kEmitCompilerReadBarrier);
6879 DCHECK(kUseBakerReadBarrier);
6880
6881 // In slow path based read barriers, the read barrier call is
6882 // inserted after the original load. However, in fast path based
6883 // Baker's read barriers, we need to perform the load of
6884 // mirror::Object::monitor_ *before* the original reference load.
6885 // This load-load ordering is required by the read barrier.
6886 // The fast path/slow path (for Baker's algorithm) should look like:
6887 //
6888 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6889 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6890 // HeapReference<Object> ref = *src; // Original reference load.
6891 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6892 // if (is_gray) {
6893 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6894 // }
6895 //
6896 // Note: the original implementation in ReadBarrier::Barrier is
6897 // slightly more complex as it performs additional checks that we do
6898 // not do here for performance reasons.
6899
6900 Register ref_reg = ref.AsRegister<Register>();
6901 Register temp_reg = temp.AsRegister<Register>();
6902 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6903
6904 // /* int32_t */ monitor = obj->monitor_
6905 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
6906 if (needs_null_check) {
6907 MaybeRecordImplicitNullCheck(instruction);
6908 }
6909 // /* LockWord */ lock_word = LockWord(monitor)
6910 static_assert(sizeof(LockWord) == sizeof(int32_t),
6911 "art::LockWord and int32_t have different sizes.");
6912
6913 __ Sync(0); // Barrier to prevent load-load reordering.
6914
6915 // The actual reference load.
6916 if (index.IsValid()) {
6917 // Load types involving an "index": ArrayGet,
6918 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6919 // intrinsics.
6920 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
6921 if (index.IsConstant()) {
6922 size_t computed_offset =
6923 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
6924 __ LoadFromOffset(kLoadWord, ref_reg, obj, computed_offset);
6925 } else {
6926 // Handle the special case of the
6927 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6928 // intrinsics, which use a register pair as index ("long
6929 // offset"), of which only the low part contains data.
6930 Register index_reg = index.IsRegisterPair()
6931 ? index.AsRegisterPairLow<Register>()
6932 : index.AsRegister<Register>();
Chris Larsencd0295d2017-03-31 15:26:54 -07006933 __ ShiftAndAdd(TMP, index_reg, obj, scale_factor, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08006934 __ LoadFromOffset(kLoadWord, ref_reg, TMP, offset);
6935 }
6936 } else {
6937 // /* HeapReference<Object> */ ref = *(obj + offset)
6938 __ LoadFromOffset(kLoadWord, ref_reg, obj, offset);
6939 }
6940
6941 // Object* ref = ref_addr->AsMirrorPtr()
6942 __ MaybeUnpoisonHeapReference(ref_reg);
6943
6944 // Slow path marking the object `ref` when it is gray.
6945 SlowPathCodeMIPS* slow_path;
6946 if (always_update_field) {
6947 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS only supports address
6948 // of the form `obj + field_offset`, where `obj` is a register and
6949 // `field_offset` is a register pair (of which only the lower half
6950 // is used). Thus `offset` and `scale_factor` above are expected
6951 // to be null in this code path.
6952 DCHECK_EQ(offset, 0u);
6953 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
6954 slow_path = new (GetGraph()->GetArena())
6955 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(instruction,
6956 ref,
6957 obj,
6958 /* field_offset */ index,
6959 temp_reg);
6960 } else {
6961 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(instruction, ref);
6962 }
6963 AddSlowPath(slow_path);
6964
6965 // if (rb_state == ReadBarrier::GrayState())
6966 // ref = ReadBarrier::Mark(ref);
6967 // Given the numeric representation, it's enough to check the low bit of the
6968 // rb_state. We do that by shifting the bit into the sign bit (31) and
6969 // performing a branch on less than zero.
6970 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6971 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6972 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
6973 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
6974 __ Bltz(temp_reg, slow_path->GetEntryLabel());
6975 __ Bind(slow_path->GetExitLabel());
6976}
6977
6978void CodeGeneratorMIPS::GenerateReadBarrierSlow(HInstruction* instruction,
6979 Location out,
6980 Location ref,
6981 Location obj,
6982 uint32_t offset,
6983 Location index) {
6984 DCHECK(kEmitCompilerReadBarrier);
6985
6986 // Insert a slow path based read barrier *after* the reference load.
6987 //
6988 // If heap poisoning is enabled, the unpoisoning of the loaded
6989 // reference will be carried out by the runtime within the slow
6990 // path.
6991 //
6992 // Note that `ref` currently does not get unpoisoned (when heap
6993 // poisoning is enabled), which is alright as the `ref` argument is
6994 // not used by the artReadBarrierSlow entry point.
6995 //
6996 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6997 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena())
6998 ReadBarrierForHeapReferenceSlowPathMIPS(instruction, out, ref, obj, offset, index);
6999 AddSlowPath(slow_path);
7000
7001 __ B(slow_path->GetEntryLabel());
7002 __ Bind(slow_path->GetExitLabel());
7003}
7004
7005void CodeGeneratorMIPS::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7006 Location out,
7007 Location ref,
7008 Location obj,
7009 uint32_t offset,
7010 Location index) {
7011 if (kEmitCompilerReadBarrier) {
7012 // Baker's read barriers shall be handled by the fast path
7013 // (CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier).
7014 DCHECK(!kUseBakerReadBarrier);
7015 // If heap poisoning is enabled, unpoisoning will be taken care of
7016 // by the runtime within the slow path.
7017 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
7018 } else if (kPoisonHeapReferences) {
7019 __ UnpoisonHeapReference(out.AsRegister<Register>());
7020 }
7021}
7022
7023void CodeGeneratorMIPS::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7024 Location out,
7025 Location root) {
7026 DCHECK(kEmitCompilerReadBarrier);
7027
7028 // Insert a slow path based read barrier *after* the GC root load.
7029 //
7030 // Note that GC roots are not affected by heap poisoning, so we do
7031 // not need to do anything special for this here.
7032 SlowPathCodeMIPS* slow_path =
7033 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS(instruction, out, root);
7034 AddSlowPath(slow_path);
7035
7036 __ B(slow_path->GetEntryLabel());
7037 __ Bind(slow_path->GetExitLabel());
7038}
7039
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007040void LocationsBuilderMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007041 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
7042 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07007043 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007044 switch (type_check_kind) {
7045 case TypeCheckKind::kExactCheck:
7046 case TypeCheckKind::kAbstractClassCheck:
7047 case TypeCheckKind::kClassHierarchyCheck:
7048 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08007049 call_kind =
7050 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007051 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007052 break;
7053 case TypeCheckKind::kArrayCheck:
7054 case TypeCheckKind::kUnresolvedCheck:
7055 case TypeCheckKind::kInterfaceCheck:
7056 call_kind = LocationSummary::kCallOnSlowPath;
7057 break;
7058 }
7059
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007060 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007061 if (baker_read_barrier_slow_path) {
7062 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7063 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007064 locations->SetInAt(0, Location::RequiresRegister());
7065 locations->SetInAt(1, Location::RequiresRegister());
7066 // The output does overlap inputs.
7067 // Note that TypeCheckSlowPathMIPS uses this register too.
7068 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08007069 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007070}
7071
7072void InstructionCodeGeneratorMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007073 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007074 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08007075 Location obj_loc = locations->InAt(0);
7076 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007077 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08007078 Location out_loc = locations->Out();
7079 Register out = out_loc.AsRegister<Register>();
7080 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
7081 DCHECK_LE(num_temps, 1u);
7082 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007083 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
7084 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7085 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7086 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007087 MipsLabel done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007088 SlowPathCodeMIPS* slow_path = nullptr;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007089
7090 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007091 // Avoid this check if we know `obj` is not null.
7092 if (instruction->MustDoNullCheck()) {
7093 __ Move(out, ZERO);
7094 __ Beqz(obj, &done);
7095 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007096
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007097 switch (type_check_kind) {
7098 case TypeCheckKind::kExactCheck: {
7099 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007100 GenerateReferenceLoadTwoRegisters(instruction,
7101 out_loc,
7102 obj_loc,
7103 class_offset,
7104 maybe_temp_loc,
7105 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007106 // Classes must be equal for the instanceof to succeed.
7107 __ Xor(out, out, cls);
7108 __ Sltiu(out, out, 1);
7109 break;
7110 }
7111
7112 case TypeCheckKind::kAbstractClassCheck: {
7113 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007114 GenerateReferenceLoadTwoRegisters(instruction,
7115 out_loc,
7116 obj_loc,
7117 class_offset,
7118 maybe_temp_loc,
7119 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007120 // If the class is abstract, we eagerly fetch the super class of the
7121 // object to avoid doing a comparison we know will fail.
7122 MipsLabel loop;
7123 __ Bind(&loop);
7124 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08007125 GenerateReferenceLoadOneRegister(instruction,
7126 out_loc,
7127 super_offset,
7128 maybe_temp_loc,
7129 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007130 // If `out` is null, we use it for the result, and jump to `done`.
7131 __ Beqz(out, &done);
7132 __ Bne(out, cls, &loop);
7133 __ LoadConst32(out, 1);
7134 break;
7135 }
7136
7137 case TypeCheckKind::kClassHierarchyCheck: {
7138 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007139 GenerateReferenceLoadTwoRegisters(instruction,
7140 out_loc,
7141 obj_loc,
7142 class_offset,
7143 maybe_temp_loc,
7144 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007145 // Walk over the class hierarchy to find a match.
7146 MipsLabel loop, success;
7147 __ Bind(&loop);
7148 __ Beq(out, cls, &success);
7149 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08007150 GenerateReferenceLoadOneRegister(instruction,
7151 out_loc,
7152 super_offset,
7153 maybe_temp_loc,
7154 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007155 __ Bnez(out, &loop);
7156 // If `out` is null, we use it for the result, and jump to `done`.
7157 __ B(&done);
7158 __ Bind(&success);
7159 __ LoadConst32(out, 1);
7160 break;
7161 }
7162
7163 case TypeCheckKind::kArrayObjectCheck: {
7164 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007165 GenerateReferenceLoadTwoRegisters(instruction,
7166 out_loc,
7167 obj_loc,
7168 class_offset,
7169 maybe_temp_loc,
7170 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007171 // Do an exact check.
7172 MipsLabel success;
7173 __ Beq(out, cls, &success);
7174 // Otherwise, we need to check that the object's class is a non-primitive array.
7175 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08007176 GenerateReferenceLoadOneRegister(instruction,
7177 out_loc,
7178 component_offset,
7179 maybe_temp_loc,
7180 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007181 // If `out` is null, we use it for the result, and jump to `done`.
7182 __ Beqz(out, &done);
7183 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
7184 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
7185 __ Sltiu(out, out, 1);
7186 __ B(&done);
7187 __ Bind(&success);
7188 __ LoadConst32(out, 1);
7189 break;
7190 }
7191
7192 case TypeCheckKind::kArrayCheck: {
7193 // No read barrier since the slow path will retry upon failure.
7194 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007195 GenerateReferenceLoadTwoRegisters(instruction,
7196 out_loc,
7197 obj_loc,
7198 class_offset,
7199 maybe_temp_loc,
7200 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007201 DCHECK(locations->OnlyCallsOnSlowPath());
7202 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
7203 /* is_fatal */ false);
7204 codegen_->AddSlowPath(slow_path);
7205 __ Bne(out, cls, slow_path->GetEntryLabel());
7206 __ LoadConst32(out, 1);
7207 break;
7208 }
7209
7210 case TypeCheckKind::kUnresolvedCheck:
7211 case TypeCheckKind::kInterfaceCheck: {
7212 // Note that we indeed only call on slow path, but we always go
7213 // into the slow path for the unresolved and interface check
7214 // cases.
7215 //
7216 // We cannot directly call the InstanceofNonTrivial runtime
7217 // entry point without resorting to a type checking slow path
7218 // here (i.e. by calling InvokeRuntime directly), as it would
7219 // require to assign fixed registers for the inputs of this
7220 // HInstanceOf instruction (following the runtime calling
7221 // convention), which might be cluttered by the potential first
7222 // read barrier emission at the beginning of this method.
7223 //
7224 // TODO: Introduce a new runtime entry point taking the object
7225 // to test (instead of its class) as argument, and let it deal
7226 // with the read barrier issues. This will let us refactor this
7227 // case of the `switch` code as it was previously (with a direct
7228 // call to the runtime not using a type checking slow path).
7229 // This should also be beneficial for the other cases above.
7230 DCHECK(locations->OnlyCallsOnSlowPath());
7231 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
7232 /* is_fatal */ false);
7233 codegen_->AddSlowPath(slow_path);
7234 __ B(slow_path->GetEntryLabel());
7235 break;
7236 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007237 }
7238
7239 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007240
7241 if (slow_path != nullptr) {
7242 __ Bind(slow_path->GetExitLabel());
7243 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007244}
7245
7246void LocationsBuilderMIPS::VisitIntConstant(HIntConstant* constant) {
7247 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7248 locations->SetOut(Location::ConstantLocation(constant));
7249}
7250
7251void InstructionCodeGeneratorMIPS::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
7252 // Will be generated at use site.
7253}
7254
7255void LocationsBuilderMIPS::VisitNullConstant(HNullConstant* constant) {
7256 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7257 locations->SetOut(Location::ConstantLocation(constant));
7258}
7259
7260void InstructionCodeGeneratorMIPS::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
7261 // Will be generated at use site.
7262}
7263
7264void LocationsBuilderMIPS::HandleInvoke(HInvoke* invoke) {
7265 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
7266 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
7267}
7268
7269void LocationsBuilderMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
7270 HandleInvoke(invoke);
Alexey Frunze1b8464d2016-11-12 17:22:05 -08007271 // The register T7 is required to be used for the hidden argument in
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007272 // art_quick_imt_conflict_trampoline, so add the hidden argument.
Alexey Frunze1b8464d2016-11-12 17:22:05 -08007273 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T7));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007274}
7275
7276void InstructionCodeGeneratorMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
7277 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
7278 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007279 Location receiver = invoke->GetLocations()->InAt(0);
7280 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07007281 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007282
7283 // Set the hidden argument.
7284 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
7285 invoke->GetDexMethodIndex());
7286
7287 // temp = object->GetClass();
7288 if (receiver.IsStackSlot()) {
7289 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
7290 __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
7291 } else {
7292 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
7293 }
7294 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08007295 // Instead of simply (possibly) unpoisoning `temp` here, we should
7296 // emit a read barrier for the previous class reference load.
7297 // However this is not required in practice, as this is an
7298 // intermediate/temporary reference and because the current
7299 // concurrent copying collector keeps the from-space memory
7300 // intact/accessible until the end of the marking phase (the
7301 // concurrent copying collector may not in the future).
7302 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007303 __ LoadFromOffset(kLoadWord, temp, temp,
7304 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
7305 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00007306 invoke->GetImtIndex(), kMipsPointerSize));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007307 // temp = temp->GetImtEntryAt(method_offset);
7308 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
7309 // T9 = temp->GetEntryPoint();
7310 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
7311 // T9();
7312 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007313 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007314 DCHECK(!codegen_->IsLeafMethod());
7315 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
7316}
7317
7318void LocationsBuilderMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen701566a2015-10-27 15:29:13 -07007319 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
7320 if (intrinsic.TryDispatch(invoke)) {
7321 return;
7322 }
7323
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007324 HandleInvoke(invoke);
7325}
7326
7327void LocationsBuilderMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007328 // Explicit clinit checks triggered by static invokes must have been pruned by
7329 // art::PrepareForRegisterAllocation.
7330 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007331
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007332 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007333 bool has_extra_input = invoke->HasPcRelativeMethodLoadKind() && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007334
Chris Larsen701566a2015-10-27 15:29:13 -07007335 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
7336 if (intrinsic.TryDispatch(invoke)) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007337 if (invoke->GetLocations()->CanCall() && has_extra_input) {
7338 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
7339 }
Chris Larsen701566a2015-10-27 15:29:13 -07007340 return;
7341 }
7342
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007343 HandleInvoke(invoke);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007344
7345 // Add the extra input register if either the dex cache array base register
7346 // or the PC-relative base register for accessing literals is needed.
7347 if (has_extra_input) {
7348 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
7349 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007350}
7351
Orion Hodsonac141392017-01-13 11:53:47 +00007352void LocationsBuilderMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
7353 HandleInvoke(invoke);
7354}
7355
7356void InstructionCodeGeneratorMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
7357 codegen_->GenerateInvokePolymorphicCall(invoke);
7358}
7359
Chris Larsen701566a2015-10-27 15:29:13 -07007360static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007361 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen701566a2015-10-27 15:29:13 -07007362 IntrinsicCodeGeneratorMIPS intrinsic(codegen);
7363 intrinsic.Dispatch(invoke);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007364 return true;
7365 }
7366 return false;
7367}
7368
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007369HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind(
Alexey Frunze06a46c42016-07-19 15:00:40 -07007370 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007371 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007372 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007373 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
Vladimir Markoaad75c62016-10-03 08:46:48 +00007374 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007375 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007376 bool is_r6 = GetInstructionSetFeatures().IsR6();
7377 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007378 switch (desired_string_load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007379 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007380 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007381 DCHECK(!Runtime::Current()->UseJitCompilation());
Alexey Frunze06a46c42016-07-19 15:00:40 -07007382 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007383 case HLoadString::LoadKind::kBootImageAddress:
7384 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007385 case HLoadString::LoadKind::kJitTableAddress:
7386 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007387 fallback_load = false;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007388 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007389 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007390 fallback_load = false;
7391 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007392 }
7393 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007394 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007395 }
7396 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007397}
7398
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007399HLoadClass::LoadKind CodeGeneratorMIPS::GetSupportedLoadClassKind(
7400 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007401 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007402 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007403 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
7404 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007405 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007406 bool is_r6 = GetInstructionSetFeatures().IsR6();
7407 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007408 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007409 case HLoadClass::LoadKind::kInvalid:
7410 LOG(FATAL) << "UNREACHABLE";
7411 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007412 case HLoadClass::LoadKind::kReferrersClass:
7413 fallback_load = false;
7414 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007415 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007416 case HLoadClass::LoadKind::kBssEntry:
7417 DCHECK(!Runtime::Current()->UseJitCompilation());
7418 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007419 case HLoadClass::LoadKind::kBootImageAddress:
7420 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007421 case HLoadClass::LoadKind::kJitTableAddress:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007422 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007423 fallback_load = false;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007424 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007425 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007426 fallback_load = false;
7427 break;
7428 }
7429 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007430 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007431 }
7432 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007433}
7434
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007435Register CodeGeneratorMIPS::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
7436 Register temp) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007437 CHECK(!GetInstructionSetFeatures().IsR6());
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007438 CHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
7439 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
7440 if (!invoke->GetLocations()->Intrinsified()) {
7441 return location.AsRegister<Register>();
7442 }
7443 // For intrinsics we allow any location, so it may be on the stack.
7444 if (!location.IsRegister()) {
7445 __ LoadFromOffset(kLoadWord, temp, SP, location.GetStackIndex());
7446 return temp;
7447 }
7448 // For register locations, check if the register was saved. If so, get it from the stack.
7449 // Note: There is a chance that the register was saved but not overwritten, so we could
7450 // save one load. However, since this is just an intrinsic slow path we prefer this
7451 // simple and more robust approach rather that trying to determine if that's the case.
7452 SlowPathCode* slow_path = GetCurrentSlowPath();
7453 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
7454 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
7455 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
7456 __ LoadFromOffset(kLoadWord, temp, SP, stack_offset);
7457 return temp;
7458 }
7459 return location.AsRegister<Register>();
7460}
7461
Vladimir Markodc151b22015-10-15 18:02:30 +01007462HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS::GetSupportedInvokeStaticOrDirectDispatch(
7463 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01007464 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007465 HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007466 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007467 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007468 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
7469 // with irreducible loops.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007470 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007471 bool is_r6 = GetInstructionSetFeatures().IsR6();
7472 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007473 switch (dispatch_info.method_load_kind) {
Vladimir Marko65979462017-05-19 17:25:12 +01007474 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007475 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007476 break;
Vladimir Markodc151b22015-10-15 18:02:30 +01007477 default:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007478 fallback_load = false;
Vladimir Markodc151b22015-10-15 18:02:30 +01007479 break;
7480 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007481 if (fallback_load) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007482 dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007483 dispatch_info.method_load_data = 0;
7484 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007485 return dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01007486}
7487
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007488void CodeGeneratorMIPS::GenerateStaticOrDirectCall(
7489 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007490 // All registers are assumed to be correctly set up per the calling convention.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007491 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007492 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
7493 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007494 bool is_r6 = GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007495 Register base_reg = (invoke->HasPcRelativeMethodLoadKind() && !is_r6)
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007496 ? GetInvokeStaticOrDirectExtraParameter(invoke, temp.AsRegister<Register>())
7497 : ZERO;
7498
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007499 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007500 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007501 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007502 uint32_t offset =
7503 GetThreadOffset<kMipsPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007504 __ LoadFromOffset(kLoadWord,
7505 temp.AsRegister<Register>(),
7506 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007507 offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007508 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007509 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007510 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00007511 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007512 break;
Vladimir Marko65979462017-05-19 17:25:12 +01007513 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
7514 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007515 PcRelativePatchInfo* info_high = NewPcRelativeMethodPatch(invoke->GetTargetMethod());
7516 PcRelativePatchInfo* info_low =
7517 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
Vladimir Marko65979462017-05-19 17:25:12 +01007518 bool reordering = __ SetReorder(false);
7519 Register temp_reg = temp.AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007520 EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base_reg, info_low);
Alexey Frunze6079dca2017-05-28 19:10:28 -07007521 __ Addiu(temp_reg, TMP, /* placeholder */ 0x5678);
Vladimir Marko65979462017-05-19 17:25:12 +01007522 __ SetReorder(reordering);
7523 break;
7524 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007525 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
7526 __ LoadConst32(temp.AsRegister<Register>(), invoke->GetMethodAddress());
7527 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007528 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007529 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007530 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007531 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
7532 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007533 Register temp_reg = temp.AsRegister<Register>();
7534 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007535 EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base_reg, info_low);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007536 __ Lw(temp_reg, TMP, /* placeholder */ 0x5678);
7537 __ SetReorder(reordering);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007538 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007539 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007540 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
7541 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
7542 return; // No code pointer retrieval; the runtime performs the call directly.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007543 }
7544 }
7545
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007546 switch (code_ptr_location) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007547 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007548 __ Bal(&frame_entry_label_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007549 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007550 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
7551 // T9 = callee_method->entry_point_from_quick_compiled_code_;
Goran Jakovljevic1a878372015-10-26 14:28:52 +01007552 __ LoadFromOffset(kLoadWord,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007553 T9,
7554 callee_method.AsRegister<Register>(),
7555 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07007556 kMipsPointerSize).Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007557 // T9()
7558 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007559 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007560 break;
7561 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007562 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
7563
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007564 DCHECK(!IsLeafMethod());
7565}
7566
7567void InstructionCodeGeneratorMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007568 // Explicit clinit checks triggered by static invokes must have been pruned by
7569 // art::PrepareForRegisterAllocation.
7570 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007571
7572 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7573 return;
7574 }
7575
7576 LocationSummary* locations = invoke->GetLocations();
7577 codegen_->GenerateStaticOrDirectCall(invoke,
7578 locations->HasTemps()
7579 ? locations->GetTemp(0)
7580 : Location::NoLocation());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007581}
7582
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007583void CodeGeneratorMIPS::GenerateVirtualCall(
7584 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Goran Jakovljevice919b072016-10-04 10:17:34 +02007585 // Use the calling convention instead of the location of the receiver, as
7586 // intrinsics may have put the receiver in a different register. In the intrinsics
7587 // slow path, the arguments have been moved to the right place, so here we are
7588 // guaranteed that the receiver is the first register of the calling convention.
7589 InvokeDexCallingConvention calling_convention;
7590 Register receiver = calling_convention.GetRegisterAt(0);
7591
Chris Larsen3acee732015-11-18 13:31:08 -08007592 Register temp = temp_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007593 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7594 invoke->GetVTableIndex(), kMipsPointerSize).SizeValue();
7595 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07007596 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007597
7598 // temp = object->GetClass();
Goran Jakovljevice919b072016-10-04 10:17:34 +02007599 __ LoadFromOffset(kLoadWord, temp, receiver, class_offset);
Chris Larsen3acee732015-11-18 13:31:08 -08007600 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08007601 // Instead of simply (possibly) unpoisoning `temp` here, we should
7602 // emit a read barrier for the previous class reference load.
7603 // However this is not required in practice, as this is an
7604 // intermediate/temporary reference and because the current
7605 // concurrent copying collector keeps the from-space memory
7606 // intact/accessible until the end of the marking phase (the
7607 // concurrent copying collector may not in the future).
7608 __ MaybeUnpoisonHeapReference(temp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007609 // temp = temp->GetMethodAt(method_offset);
7610 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
7611 // T9 = temp->GetEntryPoint();
7612 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
7613 // T9();
7614 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007615 __ NopIfNoReordering();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007616 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Chris Larsen3acee732015-11-18 13:31:08 -08007617}
7618
7619void InstructionCodeGeneratorMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
7620 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7621 return;
7622 }
7623
7624 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007625 DCHECK(!codegen_->IsLeafMethod());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007626}
7627
7628void LocationsBuilderMIPS::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00007629 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007630 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007631 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007632 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
7633 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007634 return;
7635 }
Vladimir Marko41559982017-01-06 14:04:23 +00007636 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007637 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze15958152017-02-09 19:08:30 -08007638 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
7639 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunze06a46c42016-07-19 15:00:40 -07007640 ? LocationSummary::kCallOnSlowPath
7641 : LocationSummary::kNoCall;
7642 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007643 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
7644 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7645 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007646 switch (load_kind) {
7647 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007648 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007649 case HLoadClass::LoadKind::kBootImageAddress:
7650 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007651 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007652 break;
7653 }
7654 FALLTHROUGH_INTENDED;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007655 case HLoadClass::LoadKind::kReferrersClass:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007656 locations->SetInAt(0, Location::RequiresRegister());
7657 break;
7658 default:
7659 break;
7660 }
7661 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007662 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
7663 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7664 // Rely on the type resolution or initialization and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007665 // Request a temp to hold the BSS entry location for the slow path.
7666 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007667 RegisterSet caller_saves = RegisterSet::Empty();
7668 InvokeRuntimeCallingConvention calling_convention;
7669 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7670 locations->SetCustomSlowPathCallerSaves(caller_saves);
7671 } else {
7672 // For non-Baker read barriers we have a temp-clobbering call.
7673 }
7674 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007675}
7676
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007677// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7678// move.
7679void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00007680 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007681 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00007682 codegen_->GenerateLoadClassRuntimeCall(cls);
Pavle Batutae87a7182015-10-28 13:10:42 +01007683 return;
7684 }
Vladimir Marko41559982017-01-06 14:04:23 +00007685 DCHECK(!cls->NeedsAccessCheck());
Pavle Batutae87a7182015-10-28 13:10:42 +01007686
Vladimir Marko41559982017-01-06 14:04:23 +00007687 LocationSummary* locations = cls->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007688 Location out_loc = locations->Out();
7689 Register out = out_loc.AsRegister<Register>();
7690 Register base_or_current_method_reg;
7691 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7692 switch (load_kind) {
7693 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007694 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007695 case HLoadClass::LoadKind::kBootImageAddress:
7696 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007697 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7698 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007699 case HLoadClass::LoadKind::kReferrersClass:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007700 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007701 base_or_current_method_reg = locations->InAt(0).AsRegister<Register>();
7702 break;
7703 default:
7704 base_or_current_method_reg = ZERO;
7705 break;
7706 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00007707
Alexey Frunze15958152017-02-09 19:08:30 -08007708 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
7709 ? kWithoutReadBarrier
7710 : kCompilerReadBarrierOption;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007711 bool generate_null_check = false;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007712 CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high = nullptr;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007713 switch (load_kind) {
7714 case HLoadClass::LoadKind::kReferrersClass: {
7715 DCHECK(!cls->CanCallRuntime());
7716 DCHECK(!cls->MustGenerateClinitCheck());
7717 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7718 GenerateGcRootFieldLoad(cls,
7719 out_loc,
7720 base_or_current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08007721 ArtMethod::DeclaringClassOffset().Int32Value(),
7722 read_barrier_option);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007723 break;
7724 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007725 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007726 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08007727 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007728 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Alexey Frunze06a46c42016-07-19 15:00:40 -07007729 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007730 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7731 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007732 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007733 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7734 out,
7735 base_or_current_method_reg,
7736 info_low);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007737 __ Addiu(out, out, /* placeholder */ 0x5678);
7738 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007739 break;
7740 }
7741 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08007742 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007743 uint32_t address = dchecked_integral_cast<uint32_t>(
7744 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
7745 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007746 __ LoadLiteral(out,
7747 base_or_current_method_reg,
7748 codegen_->DeduplicateBootImageAddressLiteral(address));
7749 break;
7750 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007751 case HLoadClass::LoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007752 bss_info_high = codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
7753 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7754 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007755 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007756 Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
7757 bool reordering = __ SetReorder(false);
7758 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high,
7759 temp,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007760 base_or_current_method_reg);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007761 __ SetReorder(reordering);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007762 GenerateGcRootFieldLoad(cls,
7763 out_loc,
7764 temp,
7765 /* placeholder */ 0x5678,
7766 read_barrier_option,
7767 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007768 generate_null_check = true;
7769 break;
7770 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007771 case HLoadClass::LoadKind::kJitTableAddress: {
Alexey Frunze627c1a02017-01-30 19:28:14 -08007772 CodeGeneratorMIPS::JitPatchInfo* info = codegen_->NewJitRootClassPatch(cls->GetDexFile(),
7773 cls->GetTypeIndex(),
7774 cls->GetClass());
7775 bool reordering = __ SetReorder(false);
7776 __ Bind(&info->high_label);
7777 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007778 __ SetReorder(reordering);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007779 GenerateGcRootFieldLoad(cls,
7780 out_loc,
7781 out,
7782 /* placeholder */ 0x5678,
7783 read_barrier_option,
7784 &info->low_label);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007785 break;
7786 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007787 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007788 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00007789 LOG(FATAL) << "UNREACHABLE";
7790 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007791 }
7792
7793 if (generate_null_check || cls->MustGenerateClinitCheck()) {
7794 DCHECK(cls->CanCallRuntime());
7795 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007796 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck(), bss_info_high);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007797 codegen_->AddSlowPath(slow_path);
7798 if (generate_null_check) {
7799 __ Beqz(out, slow_path->GetEntryLabel());
7800 }
7801 if (cls->MustGenerateClinitCheck()) {
7802 GenerateClassInitializationCheck(slow_path, out);
7803 } else {
7804 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007805 }
7806 }
7807}
7808
7809static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07007810 return Thread::ExceptionOffset<kMipsPointerSize>().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007811}
7812
7813void LocationsBuilderMIPS::VisitLoadException(HLoadException* load) {
7814 LocationSummary* locations =
7815 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
7816 locations->SetOut(Location::RequiresRegister());
7817}
7818
7819void InstructionCodeGeneratorMIPS::VisitLoadException(HLoadException* load) {
7820 Register out = load->GetLocations()->Out().AsRegister<Register>();
7821 __ LoadFromOffset(kLoadWord, out, TR, GetExceptionTlsOffset());
7822}
7823
7824void LocationsBuilderMIPS::VisitClearException(HClearException* clear) {
7825 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
7826}
7827
7828void InstructionCodeGeneratorMIPS::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
7829 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
7830}
7831
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007832void LocationsBuilderMIPS::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08007833 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007834 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007835 HLoadString::LoadKind load_kind = load->GetLoadKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07007836 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007837 switch (load_kind) {
7838 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007839 case HLoadString::LoadKind::kBootImageAddress:
7840 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007841 case HLoadString::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007842 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007843 break;
7844 }
7845 FALLTHROUGH_INTENDED;
7846 // We need an extra register for PC-relative dex cache accesses.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007847 case HLoadString::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007848 locations->SetInAt(0, Location::RequiresRegister());
7849 break;
7850 default:
7851 break;
7852 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007853 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzebb51df82016-11-01 16:07:32 -07007854 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007855 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzebb51df82016-11-01 16:07:32 -07007856 } else {
7857 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007858 if (load_kind == HLoadString::LoadKind::kBssEntry) {
7859 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7860 // Rely on the pResolveString and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007861 // Request a temp to hold the BSS entry location for the slow path.
7862 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007863 RegisterSet caller_saves = RegisterSet::Empty();
7864 InvokeRuntimeCallingConvention calling_convention;
7865 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7866 locations->SetCustomSlowPathCallerSaves(caller_saves);
7867 } else {
7868 // For non-Baker read barriers we have a temp-clobbering call.
7869 }
7870 }
Alexey Frunzebb51df82016-11-01 16:07:32 -07007871 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007872}
7873
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007874// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7875// move.
7876void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007877 HLoadString::LoadKind load_kind = load->GetLoadKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007878 LocationSummary* locations = load->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007879 Location out_loc = locations->Out();
7880 Register out = out_loc.AsRegister<Register>();
7881 Register base_or_current_method_reg;
7882 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7883 switch (load_kind) {
7884 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007885 case HLoadString::LoadKind::kBootImageAddress:
7886 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007887 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007888 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7889 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007890 default:
7891 base_or_current_method_reg = ZERO;
7892 break;
7893 }
7894
7895 switch (load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007896 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00007897 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007898 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007899 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007900 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7901 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007902 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007903 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7904 out,
7905 base_or_current_method_reg,
7906 info_low);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007907 __ Addiu(out, out, /* placeholder */ 0x5678);
7908 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007909 return; // No dex cache slow path.
7910 }
7911 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007912 uint32_t address = dchecked_integral_cast<uint32_t>(
7913 reinterpret_cast<uintptr_t>(load->GetString().Get()));
7914 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007915 __ LoadLiteral(out,
7916 base_or_current_method_reg,
7917 codegen_->DeduplicateBootImageAddressLiteral(address));
7918 return; // No dex cache slow path.
7919 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00007920 case HLoadString::LoadKind::kBssEntry: {
7921 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007922 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007923 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007924 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7925 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007926 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007927 Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
7928 bool reordering = __ SetReorder(false);
7929 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7930 temp,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007931 base_or_current_method_reg);
7932 __ SetReorder(reordering);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007933 GenerateGcRootFieldLoad(load,
7934 out_loc,
7935 temp,
7936 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007937 kCompilerReadBarrierOption,
7938 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007939 SlowPathCodeMIPS* slow_path =
7940 new (GetGraph()->GetArena()) LoadStringSlowPathMIPS(load, info_high);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007941 codegen_->AddSlowPath(slow_path);
7942 __ Beqz(out, slow_path->GetEntryLabel());
7943 __ Bind(slow_path->GetExitLabel());
7944 return;
7945 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08007946 case HLoadString::LoadKind::kJitTableAddress: {
7947 CodeGeneratorMIPS::JitPatchInfo* info =
7948 codegen_->NewJitRootStringPatch(load->GetDexFile(),
7949 load->GetStringIndex(),
7950 load->GetString());
7951 bool reordering = __ SetReorder(false);
7952 __ Bind(&info->high_label);
7953 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007954 __ SetReorder(reordering);
Alexey Frunze15958152017-02-09 19:08:30 -08007955 GenerateGcRootFieldLoad(load,
7956 out_loc,
7957 out,
7958 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007959 kCompilerReadBarrierOption,
7960 &info->low_label);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007961 return;
7962 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007963 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007964 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007965 }
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007966
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007967 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007968 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007969 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007970 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007971 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007972 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
7973 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007974}
7975
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007976void LocationsBuilderMIPS::VisitLongConstant(HLongConstant* constant) {
7977 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7978 locations->SetOut(Location::ConstantLocation(constant));
7979}
7980
7981void InstructionCodeGeneratorMIPS::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
7982 // Will be generated at use site.
7983}
7984
7985void LocationsBuilderMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7986 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007987 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007988 InvokeRuntimeCallingConvention calling_convention;
7989 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7990}
7991
7992void InstructionCodeGeneratorMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7993 if (instruction->IsEnter()) {
Serban Constantinescufca16662016-07-14 09:21:59 +01007994 codegen_->InvokeRuntime(kQuickLockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007995 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7996 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01007997 codegen_->InvokeRuntime(kQuickUnlockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007998 }
7999 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
8000}
8001
8002void LocationsBuilderMIPS::VisitMul(HMul* mul) {
8003 LocationSummary* locations =
8004 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
8005 switch (mul->GetResultType()) {
8006 case Primitive::kPrimInt:
8007 case Primitive::kPrimLong:
8008 locations->SetInAt(0, Location::RequiresRegister());
8009 locations->SetInAt(1, Location::RequiresRegister());
8010 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8011 break;
8012
8013 case Primitive::kPrimFloat:
8014 case Primitive::kPrimDouble:
8015 locations->SetInAt(0, Location::RequiresFpuRegister());
8016 locations->SetInAt(1, Location::RequiresFpuRegister());
8017 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8018 break;
8019
8020 default:
8021 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
8022 }
8023}
8024
8025void InstructionCodeGeneratorMIPS::VisitMul(HMul* instruction) {
8026 Primitive::Type type = instruction->GetType();
8027 LocationSummary* locations = instruction->GetLocations();
8028 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
8029
8030 switch (type) {
8031 case Primitive::kPrimInt: {
8032 Register dst = locations->Out().AsRegister<Register>();
8033 Register lhs = locations->InAt(0).AsRegister<Register>();
8034 Register rhs = locations->InAt(1).AsRegister<Register>();
8035
8036 if (isR6) {
8037 __ MulR6(dst, lhs, rhs);
8038 } else {
8039 __ MulR2(dst, lhs, rhs);
8040 }
8041 break;
8042 }
8043 case Primitive::kPrimLong: {
8044 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8045 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8046 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8047 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
8048 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
8049 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
8050
8051 // Extra checks to protect caused by the existance of A1_A2.
8052 // The algorithm is wrong if dst_high is either lhs_lo or rhs_lo:
8053 // (e.g. lhs=a0_a1, rhs=a2_a3 and dst=a1_a2).
8054 DCHECK_NE(dst_high, lhs_low);
8055 DCHECK_NE(dst_high, rhs_low);
8056
8057 // A_B * C_D
8058 // dst_hi: [ low(A*D) + low(B*C) + hi(B*D) ]
8059 // dst_lo: [ low(B*D) ]
8060 // Note: R2 and R6 MUL produce the low 32 bit of the multiplication result.
8061
8062 if (isR6) {
8063 __ MulR6(TMP, lhs_high, rhs_low);
8064 __ MulR6(dst_high, lhs_low, rhs_high);
8065 __ Addu(dst_high, dst_high, TMP);
8066 __ MuhuR6(TMP, lhs_low, rhs_low);
8067 __ Addu(dst_high, dst_high, TMP);
8068 __ MulR6(dst_low, lhs_low, rhs_low);
8069 } else {
8070 __ MulR2(TMP, lhs_high, rhs_low);
8071 __ MulR2(dst_high, lhs_low, rhs_high);
8072 __ Addu(dst_high, dst_high, TMP);
8073 __ MultuR2(lhs_low, rhs_low);
8074 __ Mfhi(TMP);
8075 __ Addu(dst_high, dst_high, TMP);
8076 __ Mflo(dst_low);
8077 }
8078 break;
8079 }
8080 case Primitive::kPrimFloat:
8081 case Primitive::kPrimDouble: {
8082 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8083 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
8084 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
8085 if (type == Primitive::kPrimFloat) {
8086 __ MulS(dst, lhs, rhs);
8087 } else {
8088 __ MulD(dst, lhs, rhs);
8089 }
8090 break;
8091 }
8092 default:
8093 LOG(FATAL) << "Unexpected mul type " << type;
8094 }
8095}
8096
8097void LocationsBuilderMIPS::VisitNeg(HNeg* neg) {
8098 LocationSummary* locations =
8099 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
8100 switch (neg->GetResultType()) {
8101 case Primitive::kPrimInt:
8102 case Primitive::kPrimLong:
8103 locations->SetInAt(0, Location::RequiresRegister());
8104 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8105 break;
8106
8107 case Primitive::kPrimFloat:
8108 case Primitive::kPrimDouble:
8109 locations->SetInAt(0, Location::RequiresFpuRegister());
8110 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8111 break;
8112
8113 default:
8114 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
8115 }
8116}
8117
8118void InstructionCodeGeneratorMIPS::VisitNeg(HNeg* instruction) {
8119 Primitive::Type type = instruction->GetType();
8120 LocationSummary* locations = instruction->GetLocations();
8121
8122 switch (type) {
8123 case Primitive::kPrimInt: {
8124 Register dst = locations->Out().AsRegister<Register>();
8125 Register src = locations->InAt(0).AsRegister<Register>();
8126 __ Subu(dst, ZERO, src);
8127 break;
8128 }
8129 case Primitive::kPrimLong: {
8130 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8131 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8132 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8133 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8134 __ Subu(dst_low, ZERO, src_low);
8135 __ Sltu(TMP, ZERO, dst_low);
8136 __ Subu(dst_high, ZERO, src_high);
8137 __ Subu(dst_high, dst_high, TMP);
8138 break;
8139 }
8140 case Primitive::kPrimFloat:
8141 case Primitive::kPrimDouble: {
8142 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8143 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8144 if (type == Primitive::kPrimFloat) {
8145 __ NegS(dst, src);
8146 } else {
8147 __ NegD(dst, src);
8148 }
8149 break;
8150 }
8151 default:
8152 LOG(FATAL) << "Unexpected neg type " << type;
8153 }
8154}
8155
8156void LocationsBuilderMIPS::VisitNewArray(HNewArray* instruction) {
8157 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008158 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008159 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008160 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00008161 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8162 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008163}
8164
8165void InstructionCodeGeneratorMIPS::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08008166 // Note: if heap poisoning is enabled, the entry point takes care
8167 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02008168 QuickEntrypointEnum entrypoint =
8169 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
8170 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00008171 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02008172 DCHECK(!codegen_->IsLeafMethod());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008173}
8174
8175void LocationsBuilderMIPS::VisitNewInstance(HNewInstance* instruction) {
8176 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008177 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008178 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00008179 if (instruction->IsStringAlloc()) {
8180 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
8181 } else {
8182 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00008183 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008184 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
8185}
8186
8187void InstructionCodeGeneratorMIPS::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08008188 // Note: if heap poisoning is enabled, the entry point takes care
8189 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00008190 if (instruction->IsStringAlloc()) {
8191 // String is allocated through StringFactory. Call NewEmptyString entry point.
8192 Register temp = instruction->GetLocations()->GetTemp(0).AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07008193 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00008194 __ LoadFromOffset(kLoadWord, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
8195 __ LoadFromOffset(kLoadWord, T9, temp, code_offset.Int32Value());
8196 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07008197 __ NopIfNoReordering();
David Brazdil6de19382016-01-08 17:37:10 +00008198 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
8199 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008200 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00008201 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00008202 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008203}
8204
8205void LocationsBuilderMIPS::VisitNot(HNot* instruction) {
8206 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
8207 locations->SetInAt(0, Location::RequiresRegister());
8208 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8209}
8210
8211void InstructionCodeGeneratorMIPS::VisitNot(HNot* instruction) {
8212 Primitive::Type type = instruction->GetType();
8213 LocationSummary* locations = instruction->GetLocations();
8214
8215 switch (type) {
8216 case Primitive::kPrimInt: {
8217 Register dst = locations->Out().AsRegister<Register>();
8218 Register src = locations->InAt(0).AsRegister<Register>();
8219 __ Nor(dst, src, ZERO);
8220 break;
8221 }
8222
8223 case Primitive::kPrimLong: {
8224 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8225 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8226 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8227 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8228 __ Nor(dst_high, src_high, ZERO);
8229 __ Nor(dst_low, src_low, ZERO);
8230 break;
8231 }
8232
8233 default:
8234 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
8235 }
8236}
8237
8238void LocationsBuilderMIPS::VisitBooleanNot(HBooleanNot* instruction) {
8239 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
8240 locations->SetInAt(0, Location::RequiresRegister());
8241 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8242}
8243
8244void InstructionCodeGeneratorMIPS::VisitBooleanNot(HBooleanNot* instruction) {
8245 LocationSummary* locations = instruction->GetLocations();
8246 __ Xori(locations->Out().AsRegister<Register>(),
8247 locations->InAt(0).AsRegister<Register>(),
8248 1);
8249}
8250
8251void LocationsBuilderMIPS::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01008252 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
8253 locations->SetInAt(0, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008254}
8255
Calin Juravle2ae48182016-03-16 14:05:09 +00008256void CodeGeneratorMIPS::GenerateImplicitNullCheck(HNullCheck* instruction) {
8257 if (CanMoveNullCheckToUser(instruction)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008258 return;
8259 }
8260 Location obj = instruction->GetLocations()->InAt(0);
8261
8262 __ Lw(ZERO, obj.AsRegister<Register>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00008263 RecordPcInfo(instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008264}
8265
Calin Juravle2ae48182016-03-16 14:05:09 +00008266void CodeGeneratorMIPS::GenerateExplicitNullCheck(HNullCheck* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008267 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00008268 AddSlowPath(slow_path);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008269
8270 Location obj = instruction->GetLocations()->InAt(0);
8271
8272 __ Beqz(obj.AsRegister<Register>(), slow_path->GetEntryLabel());
8273}
8274
8275void InstructionCodeGeneratorMIPS::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00008276 codegen_->GenerateNullCheck(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008277}
8278
8279void LocationsBuilderMIPS::VisitOr(HOr* instruction) {
8280 HandleBinaryOp(instruction);
8281}
8282
8283void InstructionCodeGeneratorMIPS::VisitOr(HOr* instruction) {
8284 HandleBinaryOp(instruction);
8285}
8286
8287void LocationsBuilderMIPS::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
8288 LOG(FATAL) << "Unreachable";
8289}
8290
8291void InstructionCodeGeneratorMIPS::VisitParallelMove(HParallelMove* instruction) {
8292 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
8293}
8294
8295void LocationsBuilderMIPS::VisitParameterValue(HParameterValue* instruction) {
8296 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
8297 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
8298 if (location.IsStackSlot()) {
8299 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
8300 } else if (location.IsDoubleStackSlot()) {
8301 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
8302 }
8303 locations->SetOut(location);
8304}
8305
8306void InstructionCodeGeneratorMIPS::VisitParameterValue(HParameterValue* instruction
8307 ATTRIBUTE_UNUSED) {
8308 // Nothing to do, the parameter is already at its location.
8309}
8310
8311void LocationsBuilderMIPS::VisitCurrentMethod(HCurrentMethod* instruction) {
8312 LocationSummary* locations =
8313 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
8314 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
8315}
8316
8317void InstructionCodeGeneratorMIPS::VisitCurrentMethod(HCurrentMethod* instruction
8318 ATTRIBUTE_UNUSED) {
8319 // Nothing to do, the method is already at its location.
8320}
8321
8322void LocationsBuilderMIPS::VisitPhi(HPhi* instruction) {
8323 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01008324 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008325 locations->SetInAt(i, Location::Any());
8326 }
8327 locations->SetOut(Location::Any());
8328}
8329
8330void InstructionCodeGeneratorMIPS::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
8331 LOG(FATAL) << "Unreachable";
8332}
8333
8334void LocationsBuilderMIPS::VisitRem(HRem* rem) {
8335 Primitive::Type type = rem->GetResultType();
8336 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008337 (type == Primitive::kPrimInt) ? LocationSummary::kNoCall : LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008338 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
8339
8340 switch (type) {
8341 case Primitive::kPrimInt:
8342 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08008343 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008344 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8345 break;
8346
8347 case Primitive::kPrimLong: {
8348 InvokeRuntimeCallingConvention calling_convention;
8349 locations->SetInAt(0, Location::RegisterPairLocation(
8350 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8351 locations->SetInAt(1, Location::RegisterPairLocation(
8352 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
8353 locations->SetOut(calling_convention.GetReturnLocation(type));
8354 break;
8355 }
8356
8357 case Primitive::kPrimFloat:
8358 case Primitive::kPrimDouble: {
8359 InvokeRuntimeCallingConvention calling_convention;
8360 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8361 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
8362 locations->SetOut(calling_convention.GetReturnLocation(type));
8363 break;
8364 }
8365
8366 default:
8367 LOG(FATAL) << "Unexpected rem type " << type;
8368 }
8369}
8370
8371void InstructionCodeGeneratorMIPS::VisitRem(HRem* instruction) {
8372 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008373
8374 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08008375 case Primitive::kPrimInt:
8376 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008377 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008378 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008379 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008380 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
8381 break;
8382 }
8383 case Primitive::kPrimFloat: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008384 codegen_->InvokeRuntime(kQuickFmodf, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008385 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008386 break;
8387 }
8388 case Primitive::kPrimDouble: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008389 codegen_->InvokeRuntime(kQuickFmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008390 CheckEntrypointTypes<kQuickFmod, double, double, double>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008391 break;
8392 }
8393 default:
8394 LOG(FATAL) << "Unexpected rem type " << type;
8395 }
8396}
8397
Igor Murashkind01745e2017-04-05 16:40:31 -07008398void LocationsBuilderMIPS::VisitConstructorFence(HConstructorFence* constructor_fence) {
8399 constructor_fence->SetLocations(nullptr);
8400}
8401
8402void InstructionCodeGeneratorMIPS::VisitConstructorFence(
8403 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
8404 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
8405}
8406
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008407void LocationsBuilderMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8408 memory_barrier->SetLocations(nullptr);
8409}
8410
8411void InstructionCodeGeneratorMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8412 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
8413}
8414
8415void LocationsBuilderMIPS::VisitReturn(HReturn* ret) {
8416 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
8417 Primitive::Type return_type = ret->InputAt(0)->GetType();
8418 locations->SetInAt(0, MipsReturnLocation(return_type));
8419}
8420
8421void InstructionCodeGeneratorMIPS::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
8422 codegen_->GenerateFrameExit();
8423}
8424
8425void LocationsBuilderMIPS::VisitReturnVoid(HReturnVoid* ret) {
8426 ret->SetLocations(nullptr);
8427}
8428
8429void InstructionCodeGeneratorMIPS::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
8430 codegen_->GenerateFrameExit();
8431}
8432
Alexey Frunze92d90602015-12-18 18:16:36 -08008433void LocationsBuilderMIPS::VisitRor(HRor* ror) {
8434 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008435}
8436
Alexey Frunze92d90602015-12-18 18:16:36 -08008437void InstructionCodeGeneratorMIPS::VisitRor(HRor* ror) {
8438 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008439}
8440
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008441void LocationsBuilderMIPS::VisitShl(HShl* shl) {
8442 HandleShift(shl);
8443}
8444
8445void InstructionCodeGeneratorMIPS::VisitShl(HShl* shl) {
8446 HandleShift(shl);
8447}
8448
8449void LocationsBuilderMIPS::VisitShr(HShr* shr) {
8450 HandleShift(shr);
8451}
8452
8453void InstructionCodeGeneratorMIPS::VisitShr(HShr* shr) {
8454 HandleShift(shr);
8455}
8456
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008457void LocationsBuilderMIPS::VisitSub(HSub* instruction) {
8458 HandleBinaryOp(instruction);
8459}
8460
8461void InstructionCodeGeneratorMIPS::VisitSub(HSub* instruction) {
8462 HandleBinaryOp(instruction);
8463}
8464
8465void LocationsBuilderMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8466 HandleFieldGet(instruction, instruction->GetFieldInfo());
8467}
8468
8469void InstructionCodeGeneratorMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8470 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
8471}
8472
8473void LocationsBuilderMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
8474 HandleFieldSet(instruction, instruction->GetFieldInfo());
8475}
8476
8477void InstructionCodeGeneratorMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01008478 HandleFieldSet(instruction,
8479 instruction->GetFieldInfo(),
8480 instruction->GetDexPc(),
8481 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008482}
8483
8484void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldGet(
8485 HUnresolvedInstanceFieldGet* instruction) {
8486 FieldAccessCallingConventionMIPS calling_convention;
8487 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8488 instruction->GetFieldType(),
8489 calling_convention);
8490}
8491
8492void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldGet(
8493 HUnresolvedInstanceFieldGet* instruction) {
8494 FieldAccessCallingConventionMIPS calling_convention;
8495 codegen_->GenerateUnresolvedFieldAccess(instruction,
8496 instruction->GetFieldType(),
8497 instruction->GetFieldIndex(),
8498 instruction->GetDexPc(),
8499 calling_convention);
8500}
8501
8502void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldSet(
8503 HUnresolvedInstanceFieldSet* instruction) {
8504 FieldAccessCallingConventionMIPS calling_convention;
8505 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8506 instruction->GetFieldType(),
8507 calling_convention);
8508}
8509
8510void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldSet(
8511 HUnresolvedInstanceFieldSet* instruction) {
8512 FieldAccessCallingConventionMIPS calling_convention;
8513 codegen_->GenerateUnresolvedFieldAccess(instruction,
8514 instruction->GetFieldType(),
8515 instruction->GetFieldIndex(),
8516 instruction->GetDexPc(),
8517 calling_convention);
8518}
8519
8520void LocationsBuilderMIPS::VisitUnresolvedStaticFieldGet(
8521 HUnresolvedStaticFieldGet* instruction) {
8522 FieldAccessCallingConventionMIPS calling_convention;
8523 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8524 instruction->GetFieldType(),
8525 calling_convention);
8526}
8527
8528void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldGet(
8529 HUnresolvedStaticFieldGet* instruction) {
8530 FieldAccessCallingConventionMIPS calling_convention;
8531 codegen_->GenerateUnresolvedFieldAccess(instruction,
8532 instruction->GetFieldType(),
8533 instruction->GetFieldIndex(),
8534 instruction->GetDexPc(),
8535 calling_convention);
8536}
8537
8538void LocationsBuilderMIPS::VisitUnresolvedStaticFieldSet(
8539 HUnresolvedStaticFieldSet* instruction) {
8540 FieldAccessCallingConventionMIPS calling_convention;
8541 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8542 instruction->GetFieldType(),
8543 calling_convention);
8544}
8545
8546void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldSet(
8547 HUnresolvedStaticFieldSet* instruction) {
8548 FieldAccessCallingConventionMIPS calling_convention;
8549 codegen_->GenerateUnresolvedFieldAccess(instruction,
8550 instruction->GetFieldType(),
8551 instruction->GetFieldIndex(),
8552 instruction->GetDexPc(),
8553 calling_convention);
8554}
8555
8556void LocationsBuilderMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01008557 LocationSummary* locations =
8558 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Lena Djokicca8c2952017-05-29 11:31:46 +02008559 // In suspend check slow path, usually there are no caller-save registers at all.
8560 // If SIMD instructions are present, however, we force spilling all live SIMD
8561 // registers in full width (since the runtime only saves/restores lower part).
8562 locations->SetCustomSlowPathCallerSaves(
8563 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008564}
8565
8566void InstructionCodeGeneratorMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
8567 HBasicBlock* block = instruction->GetBlock();
8568 if (block->GetLoopInformation() != nullptr) {
8569 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
8570 // The back edge will generate the suspend check.
8571 return;
8572 }
8573 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
8574 // The goto will generate the suspend check.
8575 return;
8576 }
8577 GenerateSuspendCheck(instruction, nullptr);
8578}
8579
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008580void LocationsBuilderMIPS::VisitThrow(HThrow* instruction) {
8581 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008582 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008583 InvokeRuntimeCallingConvention calling_convention;
8584 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8585}
8586
8587void InstructionCodeGeneratorMIPS::VisitThrow(HThrow* instruction) {
Serban Constantinescufca16662016-07-14 09:21:59 +01008588 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008589 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
8590}
8591
8592void LocationsBuilderMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8593 Primitive::Type input_type = conversion->GetInputType();
8594 Primitive::Type result_type = conversion->GetResultType();
8595 DCHECK_NE(input_type, result_type);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008596 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008597
8598 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
8599 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
8600 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
8601 }
8602
8603 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008604 if (!isR6 &&
8605 ((Primitive::IsFloatingPointType(result_type) && input_type == Primitive::kPrimLong) ||
8606 (result_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(input_type)))) {
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008607 call_kind = LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008608 }
8609
8610 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
8611
8612 if (call_kind == LocationSummary::kNoCall) {
8613 if (Primitive::IsFloatingPointType(input_type)) {
8614 locations->SetInAt(0, Location::RequiresFpuRegister());
8615 } else {
8616 locations->SetInAt(0, Location::RequiresRegister());
8617 }
8618
8619 if (Primitive::IsFloatingPointType(result_type)) {
8620 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8621 } else {
8622 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8623 }
8624 } else {
8625 InvokeRuntimeCallingConvention calling_convention;
8626
8627 if (Primitive::IsFloatingPointType(input_type)) {
8628 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8629 } else {
8630 DCHECK_EQ(input_type, Primitive::kPrimLong);
8631 locations->SetInAt(0, Location::RegisterPairLocation(
8632 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8633 }
8634
8635 locations->SetOut(calling_convention.GetReturnLocation(result_type));
8636 }
8637}
8638
8639void InstructionCodeGeneratorMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8640 LocationSummary* locations = conversion->GetLocations();
8641 Primitive::Type result_type = conversion->GetResultType();
8642 Primitive::Type input_type = conversion->GetInputType();
8643 bool has_sign_extension = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008644 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008645
8646 DCHECK_NE(input_type, result_type);
8647
8648 if (result_type == Primitive::kPrimLong && Primitive::IsIntegralType(input_type)) {
8649 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8650 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8651 Register src = locations->InAt(0).AsRegister<Register>();
8652
Alexey Frunzea871ef12016-06-27 15:20:11 -07008653 if (dst_low != src) {
8654 __ Move(dst_low, src);
8655 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008656 __ Sra(dst_high, src, 31);
8657 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
8658 Register dst = locations->Out().AsRegister<Register>();
8659 Register src = (input_type == Primitive::kPrimLong)
8660 ? locations->InAt(0).AsRegisterPairLow<Register>()
8661 : locations->InAt(0).AsRegister<Register>();
8662
8663 switch (result_type) {
8664 case Primitive::kPrimChar:
8665 __ Andi(dst, src, 0xFFFF);
8666 break;
8667 case Primitive::kPrimByte:
8668 if (has_sign_extension) {
8669 __ Seb(dst, src);
8670 } else {
8671 __ Sll(dst, src, 24);
8672 __ Sra(dst, dst, 24);
8673 }
8674 break;
8675 case Primitive::kPrimShort:
8676 if (has_sign_extension) {
8677 __ Seh(dst, src);
8678 } else {
8679 __ Sll(dst, src, 16);
8680 __ Sra(dst, dst, 16);
8681 }
8682 break;
8683 case Primitive::kPrimInt:
Alexey Frunzea871ef12016-06-27 15:20:11 -07008684 if (dst != src) {
8685 __ Move(dst, src);
8686 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008687 break;
8688
8689 default:
8690 LOG(FATAL) << "Unexpected type conversion from " << input_type
8691 << " to " << result_type;
8692 }
8693 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008694 if (input_type == Primitive::kPrimLong) {
8695 if (isR6) {
8696 // cvt.s.l/cvt.d.l requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8697 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8698 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8699 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8700 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8701 __ Mtc1(src_low, FTMP);
8702 __ Mthc1(src_high, FTMP);
8703 if (result_type == Primitive::kPrimFloat) {
8704 __ Cvtsl(dst, FTMP);
8705 } else {
8706 __ Cvtdl(dst, FTMP);
8707 }
8708 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008709 QuickEntrypointEnum entrypoint = (result_type == Primitive::kPrimFloat) ? kQuickL2f
8710 : kQuickL2d;
8711 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008712 if (result_type == Primitive::kPrimFloat) {
8713 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
8714 } else {
8715 CheckEntrypointTypes<kQuickL2d, double, int64_t>();
8716 }
8717 }
8718 } else {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008719 Register src = locations->InAt(0).AsRegister<Register>();
8720 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8721 __ Mtc1(src, FTMP);
8722 if (result_type == Primitive::kPrimFloat) {
8723 __ Cvtsw(dst, FTMP);
8724 } else {
8725 __ Cvtdw(dst, FTMP);
8726 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008727 }
8728 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
8729 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008730
8731 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
8732 // value of the output type if the input is outside of the range after the truncation or
8733 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
8734 // results. This matches the desired float/double-to-int/long conversion exactly.
8735 //
8736 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
8737 // value when the input is either a NaN or is outside of the range of the output type
8738 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
8739 // the same result.
8740 //
8741 // The code takes care of the different behaviors by first comparing the input to the
8742 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
8743 // If the input is greater than or equal to the minimum, it procedes to the truncate
8744 // instruction, which will handle such an input the same way irrespective of NAN2008.
8745 // Otherwise the input is compared to itself to determine whether it is a NaN or not
8746 // in order to return either zero or the minimum value.
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008747 if (result_type == Primitive::kPrimLong) {
8748 if (isR6) {
8749 // trunc.l.s/trunc.l.d requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8750 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8751 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8752 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8753 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008754
8755 if (input_type == Primitive::kPrimFloat) {
8756 __ TruncLS(FTMP, src);
8757 } else {
8758 __ TruncLD(FTMP, src);
8759 }
8760 __ Mfc1(dst_low, FTMP);
8761 __ Mfhc1(dst_high, FTMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008762 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008763 QuickEntrypointEnum entrypoint = (input_type == Primitive::kPrimFloat) ? kQuickF2l
8764 : kQuickD2l;
8765 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008766 if (input_type == Primitive::kPrimFloat) {
8767 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
8768 } else {
8769 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
8770 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008771 }
8772 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008773 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8774 Register dst = locations->Out().AsRegister<Register>();
8775 MipsLabel truncate;
8776 MipsLabel done;
8777
Lena Djokicf4e23a82017-05-09 15:43:45 +02008778 if (!isR6) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008779 if (input_type == Primitive::kPrimFloat) {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008780 uint32_t min_val = bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
8781 __ LoadConst32(TMP, min_val);
8782 __ Mtc1(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008783 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008784 uint64_t min_val = bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
8785 __ LoadConst32(TMP, High32Bits(min_val));
8786 __ Mtc1(ZERO, FTMP);
8787 __ MoveToFpuHigh(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008788 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008789
8790 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008791 __ ColeS(0, FTMP, src);
8792 } else {
8793 __ ColeD(0, FTMP, src);
8794 }
8795 __ Bc1t(0, &truncate);
8796
8797 if (input_type == Primitive::kPrimFloat) {
8798 __ CeqS(0, src, src);
8799 } else {
8800 __ CeqD(0, src, src);
8801 }
8802 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
8803 __ Movf(dst, ZERO, 0);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008804
8805 __ B(&done);
8806
8807 __ Bind(&truncate);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008808 }
8809
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008810 if (input_type == Primitive::kPrimFloat) {
8811 __ TruncWS(FTMP, src);
8812 } else {
8813 __ TruncWD(FTMP, src);
8814 }
8815 __ Mfc1(dst, FTMP);
8816
Lena Djokicf4e23a82017-05-09 15:43:45 +02008817 if (!isR6) {
8818 __ Bind(&done);
8819 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008820 }
8821 } else if (Primitive::IsFloatingPointType(result_type) &&
8822 Primitive::IsFloatingPointType(input_type)) {
8823 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8824 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8825 if (result_type == Primitive::kPrimFloat) {
8826 __ Cvtsd(dst, src);
8827 } else {
8828 __ Cvtds(dst, src);
8829 }
8830 } else {
8831 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
8832 << " to " << result_type;
8833 }
8834}
8835
8836void LocationsBuilderMIPS::VisitUShr(HUShr* ushr) {
8837 HandleShift(ushr);
8838}
8839
8840void InstructionCodeGeneratorMIPS::VisitUShr(HUShr* ushr) {
8841 HandleShift(ushr);
8842}
8843
8844void LocationsBuilderMIPS::VisitXor(HXor* instruction) {
8845 HandleBinaryOp(instruction);
8846}
8847
8848void InstructionCodeGeneratorMIPS::VisitXor(HXor* instruction) {
8849 HandleBinaryOp(instruction);
8850}
8851
8852void LocationsBuilderMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8853 // Nothing to do, this should be removed during prepare for register allocator.
8854 LOG(FATAL) << "Unreachable";
8855}
8856
8857void InstructionCodeGeneratorMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8858 // Nothing to do, this should be removed during prepare for register allocator.
8859 LOG(FATAL) << "Unreachable";
8860}
8861
8862void LocationsBuilderMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008863 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008864}
8865
8866void InstructionCodeGeneratorMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008867 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008868}
8869
8870void LocationsBuilderMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008871 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008872}
8873
8874void InstructionCodeGeneratorMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008875 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008876}
8877
8878void LocationsBuilderMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008879 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008880}
8881
8882void InstructionCodeGeneratorMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008883 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008884}
8885
8886void LocationsBuilderMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008887 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008888}
8889
8890void InstructionCodeGeneratorMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008891 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008892}
8893
8894void LocationsBuilderMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008895 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008896}
8897
8898void InstructionCodeGeneratorMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008899 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008900}
8901
8902void LocationsBuilderMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008903 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008904}
8905
8906void InstructionCodeGeneratorMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008907 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008908}
8909
8910void LocationsBuilderMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008911 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008912}
8913
8914void InstructionCodeGeneratorMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008915 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008916}
8917
8918void LocationsBuilderMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008919 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008920}
8921
8922void InstructionCodeGeneratorMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008923 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008924}
8925
8926void LocationsBuilderMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008927 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008928}
8929
8930void InstructionCodeGeneratorMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008931 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008932}
8933
8934void LocationsBuilderMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008935 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008936}
8937
8938void InstructionCodeGeneratorMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008939 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008940}
8941
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008942void LocationsBuilderMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8943 LocationSummary* locations =
8944 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
8945 locations->SetInAt(0, Location::RequiresRegister());
8946}
8947
Alexey Frunze96b66822016-09-10 02:32:44 -07008948void InstructionCodeGeneratorMIPS::GenPackedSwitchWithCompares(Register value_reg,
8949 int32_t lower_bound,
8950 uint32_t num_entries,
8951 HBasicBlock* switch_block,
8952 HBasicBlock* default_block) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008953 // Create a set of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008954 Register temp_reg = TMP;
8955 __ Addiu32(temp_reg, value_reg, -lower_bound);
8956 // Jump to default if index is negative
8957 // Note: We don't check the case that index is positive while value < lower_bound, because in
8958 // this case, index >= num_entries must be true. So that we can save one branch instruction.
8959 __ Bltz(temp_reg, codegen_->GetLabelOf(default_block));
8960
Alexey Frunze96b66822016-09-10 02:32:44 -07008961 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008962 // Jump to successors[0] if value == lower_bound.
8963 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[0]));
8964 int32_t last_index = 0;
8965 for (; num_entries - last_index > 2; last_index += 2) {
8966 __ Addiu(temp_reg, temp_reg, -2);
8967 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
8968 __ Bltz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
8969 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
8970 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
8971 }
8972 if (num_entries - last_index == 2) {
8973 // The last missing case_value.
8974 __ Addiu(temp_reg, temp_reg, -1);
8975 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008976 }
8977
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008978 // And the default for any other value.
Alexey Frunze96b66822016-09-10 02:32:44 -07008979 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008980 __ B(codegen_->GetLabelOf(default_block));
8981 }
8982}
8983
Alexey Frunze96b66822016-09-10 02:32:44 -07008984void InstructionCodeGeneratorMIPS::GenTableBasedPackedSwitch(Register value_reg,
8985 Register constant_area,
8986 int32_t lower_bound,
8987 uint32_t num_entries,
8988 HBasicBlock* switch_block,
8989 HBasicBlock* default_block) {
8990 // Create a jump table.
8991 std::vector<MipsLabel*> labels(num_entries);
8992 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
8993 for (uint32_t i = 0; i < num_entries; i++) {
8994 labels[i] = codegen_->GetLabelOf(successors[i]);
8995 }
8996 JumpTable* table = __ CreateJumpTable(std::move(labels));
8997
8998 // Is the value in range?
8999 __ Addiu32(TMP, value_reg, -lower_bound);
9000 if (IsInt<16>(static_cast<int32_t>(num_entries))) {
9001 __ Sltiu(AT, TMP, num_entries);
9002 __ Beqz(AT, codegen_->GetLabelOf(default_block));
9003 } else {
9004 __ LoadConst32(AT, num_entries);
9005 __ Bgeu(TMP, AT, codegen_->GetLabelOf(default_block));
9006 }
9007
9008 // We are in the range of the table.
9009 // Load the target address from the jump table, indexing by the value.
9010 __ LoadLabelAddress(AT, constant_area, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07009011 __ ShiftAndAdd(TMP, TMP, AT, 2, TMP);
Alexey Frunze96b66822016-09-10 02:32:44 -07009012 __ Lw(TMP, TMP, 0);
9013 // Compute the absolute target address by adding the table start address
9014 // (the table contains offsets to targets relative to its start).
9015 __ Addu(TMP, TMP, AT);
9016 // And jump.
9017 __ Jr(TMP);
9018 __ NopIfNoReordering();
9019}
9020
9021void InstructionCodeGeneratorMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
9022 int32_t lower_bound = switch_instr->GetStartValue();
9023 uint32_t num_entries = switch_instr->GetNumEntries();
9024 LocationSummary* locations = switch_instr->GetLocations();
9025 Register value_reg = locations->InAt(0).AsRegister<Register>();
9026 HBasicBlock* switch_block = switch_instr->GetBlock();
9027 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
9028
9029 if (codegen_->GetInstructionSetFeatures().IsR6() &&
9030 num_entries > kPackedSwitchJumpTableThreshold) {
9031 // R6 uses PC-relative addressing to access the jump table.
9032 // R2, OTOH, requires an HMipsComputeBaseMethodAddress input to access
9033 // the jump table and it is implemented by changing HPackedSwitch to
9034 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress.
9035 // See VisitMipsPackedSwitch() for the table-based implementation on R2.
9036 GenTableBasedPackedSwitch(value_reg,
9037 ZERO,
9038 lower_bound,
9039 num_entries,
9040 switch_block,
9041 default_block);
9042 } else {
9043 GenPackedSwitchWithCompares(value_reg,
9044 lower_bound,
9045 num_entries,
9046 switch_block,
9047 default_block);
9048 }
9049}
9050
9051void LocationsBuilderMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
9052 LocationSummary* locations =
9053 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
9054 locations->SetInAt(0, Location::RequiresRegister());
9055 // Constant area pointer (HMipsComputeBaseMethodAddress).
9056 locations->SetInAt(1, Location::RequiresRegister());
9057}
9058
9059void InstructionCodeGeneratorMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
9060 int32_t lower_bound = switch_instr->GetStartValue();
9061 uint32_t num_entries = switch_instr->GetNumEntries();
9062 LocationSummary* locations = switch_instr->GetLocations();
9063 Register value_reg = locations->InAt(0).AsRegister<Register>();
9064 Register constant_area = locations->InAt(1).AsRegister<Register>();
9065 HBasicBlock* switch_block = switch_instr->GetBlock();
9066 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
9067
9068 // This is an R2-only path. HPackedSwitch has been changed to
9069 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress
9070 // required to address the jump table relative to PC.
9071 GenTableBasedPackedSwitch(value_reg,
9072 constant_area,
9073 lower_bound,
9074 num_entries,
9075 switch_block,
9076 default_block);
9077}
9078
Alexey Frunzee3fb2452016-05-10 16:08:05 -07009079void LocationsBuilderMIPS::VisitMipsComputeBaseMethodAddress(
9080 HMipsComputeBaseMethodAddress* insn) {
9081 LocationSummary* locations =
9082 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
9083 locations->SetOut(Location::RequiresRegister());
9084}
9085
9086void InstructionCodeGeneratorMIPS::VisitMipsComputeBaseMethodAddress(
9087 HMipsComputeBaseMethodAddress* insn) {
9088 LocationSummary* locations = insn->GetLocations();
9089 Register reg = locations->Out().AsRegister<Register>();
9090
9091 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
9092
9093 // Generate a dummy PC-relative call to obtain PC.
9094 __ Nal();
9095 // Grab the return address off RA.
9096 __ Move(reg, RA);
9097
9098 // Remember this offset (the obtained PC value) for later use with constant area.
9099 __ BindPcRelBaseLabel();
9100}
9101
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02009102void LocationsBuilderMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
9103 // The trampoline uses the same calling convention as dex calling conventions,
9104 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
9105 // the method_idx.
9106 HandleInvoke(invoke);
9107}
9108
9109void InstructionCodeGeneratorMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
9110 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
9111}
9112
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009113void LocationsBuilderMIPS::VisitClassTableGet(HClassTableGet* instruction) {
9114 LocationSummary* locations =
9115 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
9116 locations->SetInAt(0, Location::RequiresRegister());
9117 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009118}
9119
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009120void InstructionCodeGeneratorMIPS::VisitClassTableGet(HClassTableGet* instruction) {
9121 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00009122 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009123 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009124 instruction->GetIndex(), kMipsPointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009125 __ LoadFromOffset(kLoadWord,
9126 locations->Out().AsRegister<Register>(),
9127 locations->InAt(0).AsRegister<Register>(),
9128 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009129 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009130 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00009131 instruction->GetIndex(), kMipsPointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00009132 __ LoadFromOffset(kLoadWord,
9133 locations->Out().AsRegister<Register>(),
9134 locations->InAt(0).AsRegister<Register>(),
9135 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009136 __ LoadFromOffset(kLoadWord,
9137 locations->Out().AsRegister<Register>(),
9138 locations->Out().AsRegister<Register>(),
9139 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009140 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009141}
9142
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02009143#undef __
9144#undef QUICK_ENTRY_POINT
9145
9146} // namespace mips
9147} // namespace art