blob: d8ac99a9a6002c3ee78592c16498f98f4b6f60ef [file] [log] [blame]
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips.h"
18
19#include "arch/mips/entrypoints_direct_mips.h"
20#include "arch/mips/instruction_set_features_mips.h"
21#include "art_method.h"
Chris Larsen701566a2015-10-27 15:29:13 -070022#include "code_generator_utils.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010023#include "compiled_method.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
27#include "intrinsics.h"
Chris Larsen701566a2015-10-27 15:29:13 -070028#include "intrinsics_mips.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020029#include "mirror/array-inl.h"
30#include "mirror/class-inl.h"
31#include "offsets.h"
32#include "thread.h"
33#include "utils/assembler.h"
34#include "utils/mips/assembler_mips.h"
35#include "utils/stack_checks.h"
36
37namespace art {
38namespace mips {
39
40static constexpr int kCurrentMethodStackOffset = 0;
41static constexpr Register kMethodRegisterArgument = A0;
42
Alexey Frunzee3fb2452016-05-10 16:08:05 -070043// We'll maximize the range of a single load instruction for dex cache array accesses
44// by aligning offset -32768 with the offset of the first used element.
45static constexpr uint32_t kDexCacheArrayLwOffset = 0x8000;
46
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020047Location MipsReturnLocation(Primitive::Type return_type) {
48 switch (return_type) {
49 case Primitive::kPrimBoolean:
50 case Primitive::kPrimByte:
51 case Primitive::kPrimChar:
52 case Primitive::kPrimShort:
53 case Primitive::kPrimInt:
54 case Primitive::kPrimNot:
55 return Location::RegisterLocation(V0);
56
57 case Primitive::kPrimLong:
58 return Location::RegisterPairLocation(V0, V1);
59
60 case Primitive::kPrimFloat:
61 case Primitive::kPrimDouble:
62 return Location::FpuRegisterLocation(F0);
63
64 case Primitive::kPrimVoid:
65 return Location();
66 }
67 UNREACHABLE();
68}
69
70Location InvokeDexCallingConventionVisitorMIPS::GetReturnLocation(Primitive::Type type) const {
71 return MipsReturnLocation(type);
72}
73
74Location InvokeDexCallingConventionVisitorMIPS::GetMethodLocation() const {
75 return Location::RegisterLocation(kMethodRegisterArgument);
76}
77
78Location InvokeDexCallingConventionVisitorMIPS::GetNextLocation(Primitive::Type type) {
79 Location next_location;
80
81 switch (type) {
82 case Primitive::kPrimBoolean:
83 case Primitive::kPrimByte:
84 case Primitive::kPrimChar:
85 case Primitive::kPrimShort:
86 case Primitive::kPrimInt:
87 case Primitive::kPrimNot: {
88 uint32_t gp_index = gp_index_++;
89 if (gp_index < calling_convention.GetNumberOfRegisters()) {
90 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index));
91 } else {
92 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
93 next_location = Location::StackSlot(stack_offset);
94 }
95 break;
96 }
97
98 case Primitive::kPrimLong: {
99 uint32_t gp_index = gp_index_;
100 gp_index_ += 2;
101 if (gp_index + 1 < calling_convention.GetNumberOfRegisters()) {
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800102 Register reg = calling_convention.GetRegisterAt(gp_index);
103 if (reg == A1 || reg == A3) {
104 gp_index_++; // Skip A1(A3), and use A2_A3(T0_T1) instead.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200105 gp_index++;
106 }
107 Register low_even = calling_convention.GetRegisterAt(gp_index);
108 Register high_odd = calling_convention.GetRegisterAt(gp_index + 1);
109 DCHECK_EQ(low_even + 1, high_odd);
110 next_location = Location::RegisterPairLocation(low_even, high_odd);
111 } else {
112 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
113 next_location = Location::DoubleStackSlot(stack_offset);
114 }
115 break;
116 }
117
118 // Note: both float and double types are stored in even FPU registers. On 32 bit FPU, double
119 // will take up the even/odd pair, while floats are stored in even regs only.
120 // On 64 bit FPU, both double and float are stored in even registers only.
121 case Primitive::kPrimFloat:
122 case Primitive::kPrimDouble: {
123 uint32_t float_index = float_index_++;
124 if (float_index < calling_convention.GetNumberOfFpuRegisters()) {
125 next_location = Location::FpuRegisterLocation(
126 calling_convention.GetFpuRegisterAt(float_index));
127 } else {
128 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
129 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
130 : Location::StackSlot(stack_offset);
131 }
132 break;
133 }
134
135 case Primitive::kPrimVoid:
136 LOG(FATAL) << "Unexpected parameter type " << type;
137 break;
138 }
139
140 // Space on the stack is reserved for all arguments.
141 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
142
143 return next_location;
144}
145
146Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
147 return MipsReturnLocation(type);
148}
149
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100150// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
151#define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700152#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200153
154class BoundsCheckSlowPathMIPS : public SlowPathCodeMIPS {
155 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000156 explicit BoundsCheckSlowPathMIPS(HBoundsCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200157
158 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
159 LocationSummary* locations = instruction_->GetLocations();
160 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
161 __ Bind(GetEntryLabel());
162 if (instruction_->CanThrowIntoCatchBlock()) {
163 // Live registers will be restored in the catch block if caught.
164 SaveLiveRegisters(codegen, instruction_->GetLocations());
165 }
166 // We're moving two locations to locations that could overlap, so we need a parallel
167 // move resolver.
168 InvokeRuntimeCallingConvention calling_convention;
169 codegen->EmitParallelMoves(locations->InAt(0),
170 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
171 Primitive::kPrimInt,
172 locations->InAt(1),
173 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
174 Primitive::kPrimInt);
Serban Constantinescufca16662016-07-14 09:21:59 +0100175 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
176 ? kQuickThrowStringBounds
177 : kQuickThrowArrayBounds;
178 mips_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100179 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200180 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
181 }
182
183 bool IsFatal() const OVERRIDE { return true; }
184
185 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS"; }
186
187 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200188 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS);
189};
190
191class DivZeroCheckSlowPathMIPS : public SlowPathCodeMIPS {
192 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000193 explicit DivZeroCheckSlowPathMIPS(HDivZeroCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200194
195 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
196 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
197 __ Bind(GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +0100198 mips_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200199 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
200 }
201
202 bool IsFatal() const OVERRIDE { return true; }
203
204 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS"; }
205
206 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200207 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS);
208};
209
210class LoadClassSlowPathMIPS : public SlowPathCodeMIPS {
211 public:
212 LoadClassSlowPathMIPS(HLoadClass* cls,
213 HInstruction* at,
214 uint32_t dex_pc,
215 bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000216 : SlowPathCodeMIPS(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200217 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
218 }
219
220 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000221 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700222 Location out = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200223 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700224 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
225 const bool r2_baker_or_no_read_barriers = !isR6 && (!kUseReadBarrier || kUseBakerReadBarrier);
226 InvokeRuntimeCallingConvention calling_convention;
227 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
228 const bool is_load_class_bss_entry =
229 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200230 __ Bind(GetEntryLabel());
231 SaveLiveRegisters(codegen, locations);
232
Alexey Frunzec61c0762017-04-10 13:54:23 -0700233 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
234 Register entry_address = kNoRegister;
235 if (is_load_class_bss_entry && r2_baker_or_no_read_barriers) {
236 Register temp = locations->GetTemp(0).AsRegister<Register>();
237 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
238 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
239 // kSaveEverything call.
240 entry_address = temp_is_a0 ? out.AsRegister<Register>() : temp;
241 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
242 if (temp_is_a0) {
243 __ Move(entry_address, temp);
244 }
245 }
246
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000247 dex::TypeIndex type_index = cls_->GetTypeIndex();
248 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100249 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
250 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000251 mips_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200252 if (do_clinit_) {
253 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
254 } else {
255 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
256 }
257
Alexey Frunzec61c0762017-04-10 13:54:23 -0700258 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
259 if (is_load_class_bss_entry && r2_baker_or_no_read_barriers) {
260 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
261 __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(0), entry_address, 0);
262 }
263
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200264 // Move the class to the desired location.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200265 if (out.IsValid()) {
266 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000267 Primitive::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700268 mips_codegen->MoveLocation(out,
269 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
270 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200271 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200272 RestoreLiveRegisters(codegen, locations);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700273
274 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
275 if (is_load_class_bss_entry && !r2_baker_or_no_read_barriers) {
276 // For non-Baker read barriers (or on R6), we need to re-calculate the address of
277 // the class entry.
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000278 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000279 CodeGeneratorMIPS::PcRelativePatchInfo* info =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000280 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze6b892cd2017-01-03 17:11:38 -0800281 bool reordering = __ SetReorder(false);
282 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info, TMP, base);
283 __ StoreToOffset(kStoreWord, out.AsRegister<Register>(), TMP, /* placeholder */ 0x5678);
284 __ SetReorder(reordering);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000285 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200286 __ B(GetExitLabel());
287 }
288
289 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS"; }
290
291 private:
292 // The class this slow path will load.
293 HLoadClass* const cls_;
294
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200295 // The dex PC of `at_`.
296 const uint32_t dex_pc_;
297
298 // Whether to initialize the class.
299 const bool do_clinit_;
300
301 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS);
302};
303
304class LoadStringSlowPathMIPS : public SlowPathCodeMIPS {
305 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000306 explicit LoadStringSlowPathMIPS(HLoadString* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200307
308 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700309 DCHECK(instruction_->IsLoadString());
310 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200311 LocationSummary* locations = instruction_->GetLocations();
312 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunzec61c0762017-04-10 13:54:23 -0700313 HLoadString* load = instruction_->AsLoadString();
314 const dex::StringIndex string_index = load->GetStringIndex();
315 Register out = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200316 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700317 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
318 const bool r2_baker_or_no_read_barriers = !isR6 && (!kUseReadBarrier || kUseBakerReadBarrier);
319 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200320 __ Bind(GetEntryLabel());
321 SaveLiveRegisters(codegen, locations);
322
Alexey Frunzec61c0762017-04-10 13:54:23 -0700323 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
324 Register entry_address = kNoRegister;
325 if (r2_baker_or_no_read_barriers) {
326 Register temp = locations->GetTemp(0).AsRegister<Register>();
327 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
328 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
329 // kSaveEverything call.
330 entry_address = temp_is_a0 ? out : temp;
331 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
332 if (temp_is_a0) {
333 __ Move(entry_address, temp);
334 }
335 }
336
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000337 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100338 mips_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200339 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700340
341 // Store the resolved string to the BSS entry.
342 if (r2_baker_or_no_read_barriers) {
343 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
344 __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(0), entry_address, 0);
345 }
346
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200347 Primitive::Type type = instruction_->GetType();
348 mips_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700349 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200350 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200351 RestoreLiveRegisters(codegen, locations);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000352
Alexey Frunzec61c0762017-04-10 13:54:23 -0700353 // Store the resolved string to the BSS entry.
354 if (!r2_baker_or_no_read_barriers) {
355 // For non-Baker read barriers (or on R6), we need to re-calculate the address of
356 // the string entry.
357 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
358 CodeGeneratorMIPS::PcRelativePatchInfo* info =
359 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
360 bool reordering = __ SetReorder(false);
361 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info, TMP, base);
362 __ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
363 __ SetReorder(reordering);
364 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200365 __ B(GetExitLabel());
366 }
367
368 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS"; }
369
370 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200371 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS);
372};
373
374class NullCheckSlowPathMIPS : public SlowPathCodeMIPS {
375 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000376 explicit NullCheckSlowPathMIPS(HNullCheck* instr) : SlowPathCodeMIPS(instr) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200377
378 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
379 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
380 __ Bind(GetEntryLabel());
381 if (instruction_->CanThrowIntoCatchBlock()) {
382 // Live registers will be restored in the catch block if caught.
383 SaveLiveRegisters(codegen, instruction_->GetLocations());
384 }
Serban Constantinescufca16662016-07-14 09:21:59 +0100385 mips_codegen->InvokeRuntime(kQuickThrowNullPointer,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200386 instruction_,
387 instruction_->GetDexPc(),
Serban Constantinescufca16662016-07-14 09:21:59 +0100388 this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200389 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
390 }
391
392 bool IsFatal() const OVERRIDE { return true; }
393
394 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS"; }
395
396 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200397 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS);
398};
399
400class SuspendCheckSlowPathMIPS : public SlowPathCodeMIPS {
401 public:
402 SuspendCheckSlowPathMIPS(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000403 : SlowPathCodeMIPS(instruction), successor_(successor) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200404
405 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
406 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
407 __ Bind(GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +0100408 mips_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200409 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200410 if (successor_ == nullptr) {
411 __ B(GetReturnLabel());
412 } else {
413 __ B(mips_codegen->GetLabelOf(successor_));
414 }
415 }
416
417 MipsLabel* GetReturnLabel() {
418 DCHECK(successor_ == nullptr);
419 return &return_label_;
420 }
421
422 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS"; }
423
424 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200425 // If not null, the block to branch to after the suspend check.
426 HBasicBlock* const successor_;
427
428 // If `successor_` is null, the label to branch to after the suspend check.
429 MipsLabel return_label_;
430
431 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS);
432};
433
434class TypeCheckSlowPathMIPS : public SlowPathCodeMIPS {
435 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800436 explicit TypeCheckSlowPathMIPS(HInstruction* instruction, bool is_fatal)
437 : SlowPathCodeMIPS(instruction), is_fatal_(is_fatal) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200438
439 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
440 LocationSummary* locations = instruction_->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200441 uint32_t dex_pc = instruction_->GetDexPc();
442 DCHECK(instruction_->IsCheckCast()
443 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
444 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
445
446 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800447 if (!is_fatal_) {
448 SaveLiveRegisters(codegen, locations);
449 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200450
451 // We're moving two locations to locations that could overlap, so we need a parallel
452 // move resolver.
453 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800454 codegen->EmitParallelMoves(locations->InAt(0),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200455 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
456 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800457 locations->InAt(1),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200458 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
459 Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200460 if (instruction_->IsInstanceOf()) {
Serban Constantinescufca16662016-07-14 09:21:59 +0100461 mips_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800462 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200463 Primitive::Type ret_type = instruction_->GetType();
464 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
465 mips_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200466 } else {
467 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800468 mips_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
469 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200470 }
471
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800472 if (!is_fatal_) {
473 RestoreLiveRegisters(codegen, locations);
474 __ B(GetExitLabel());
475 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200476 }
477
478 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS"; }
479
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800480 bool IsFatal() const OVERRIDE { return is_fatal_; }
481
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200482 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800483 const bool is_fatal_;
484
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200485 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS);
486};
487
488class DeoptimizationSlowPathMIPS : public SlowPathCodeMIPS {
489 public:
Aart Bik42249c32016-01-07 15:33:50 -0800490 explicit DeoptimizationSlowPathMIPS(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000491 : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200492
493 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800494 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200495 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100496 LocationSummary* locations = instruction_->GetLocations();
497 SaveLiveRegisters(codegen, locations);
498 InvokeRuntimeCallingConvention calling_convention;
499 __ LoadConst32(calling_convention.GetRegisterAt(0),
500 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufca16662016-07-14 09:21:59 +0100501 mips_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100502 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200503 }
504
505 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS"; }
506
507 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200508 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS);
509};
510
Alexey Frunze15958152017-02-09 19:08:30 -0800511class ArraySetSlowPathMIPS : public SlowPathCodeMIPS {
512 public:
513 explicit ArraySetSlowPathMIPS(HInstruction* instruction) : SlowPathCodeMIPS(instruction) {}
514
515 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
516 LocationSummary* locations = instruction_->GetLocations();
517 __ Bind(GetEntryLabel());
518 SaveLiveRegisters(codegen, locations);
519
520 InvokeRuntimeCallingConvention calling_convention;
521 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
522 parallel_move.AddMove(
523 locations->InAt(0),
524 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
525 Primitive::kPrimNot,
526 nullptr);
527 parallel_move.AddMove(
528 locations->InAt(1),
529 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
530 Primitive::kPrimInt,
531 nullptr);
532 parallel_move.AddMove(
533 locations->InAt(2),
534 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
535 Primitive::kPrimNot,
536 nullptr);
537 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
538
539 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
540 mips_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
541 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
542 RestoreLiveRegisters(codegen, locations);
543 __ B(GetExitLabel());
544 }
545
546 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS"; }
547
548 private:
549 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS);
550};
551
552// Slow path marking an object reference `ref` during a read
553// barrier. The field `obj.field` in the object `obj` holding this
554// reference does not get updated by this slow path after marking (see
555// ReadBarrierMarkAndUpdateFieldSlowPathMIPS below for that).
556//
557// This means that after the execution of this slow path, `ref` will
558// always be up-to-date, but `obj.field` may not; i.e., after the
559// flip, `ref` will be a to-space reference, but `obj.field` will
560// probably still be a from-space reference (unless it gets updated by
561// another thread, or if another thread installed another object
562// reference (different from `ref`) in `obj.field`).
563//
564// If `entrypoint` is a valid location it is assumed to already be
565// holding the entrypoint. The case where the entrypoint is passed in
566// is for the GcRoot read barrier.
567class ReadBarrierMarkSlowPathMIPS : public SlowPathCodeMIPS {
568 public:
569 ReadBarrierMarkSlowPathMIPS(HInstruction* instruction,
570 Location ref,
571 Location entrypoint = Location::NoLocation())
572 : SlowPathCodeMIPS(instruction), ref_(ref), entrypoint_(entrypoint) {
573 DCHECK(kEmitCompilerReadBarrier);
574 }
575
576 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
577
578 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
579 LocationSummary* locations = instruction_->GetLocations();
580 Register ref_reg = ref_.AsRegister<Register>();
581 DCHECK(locations->CanCall());
582 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
583 DCHECK(instruction_->IsInstanceFieldGet() ||
584 instruction_->IsStaticFieldGet() ||
585 instruction_->IsArrayGet() ||
586 instruction_->IsArraySet() ||
587 instruction_->IsLoadClass() ||
588 instruction_->IsLoadString() ||
589 instruction_->IsInstanceOf() ||
590 instruction_->IsCheckCast() ||
591 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
592 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
593 << "Unexpected instruction in read barrier marking slow path: "
594 << instruction_->DebugName();
595
596 __ Bind(GetEntryLabel());
597 // No need to save live registers; it's taken care of by the
598 // entrypoint. Also, there is no need to update the stack mask,
599 // as this runtime call will not trigger a garbage collection.
600 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
601 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
602 (S2 <= ref_reg && ref_reg <= S7) ||
603 (ref_reg == FP)) << ref_reg;
604 // "Compact" slow path, saving two moves.
605 //
606 // Instead of using the standard runtime calling convention (input
607 // and output in A0 and V0 respectively):
608 //
609 // A0 <- ref
610 // V0 <- ReadBarrierMark(A0)
611 // ref <- V0
612 //
613 // we just use rX (the register containing `ref`) as input and output
614 // of a dedicated entrypoint:
615 //
616 // rX <- ReadBarrierMarkRegX(rX)
617 //
618 if (entrypoint_.IsValid()) {
619 mips_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
620 DCHECK_EQ(entrypoint_.AsRegister<Register>(), T9);
621 __ Jalr(entrypoint_.AsRegister<Register>());
622 __ NopIfNoReordering();
623 } else {
624 int32_t entry_point_offset =
625 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
626 // This runtime call does not require a stack map.
627 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
628 instruction_,
629 this,
630 /* direct */ false);
631 }
632 __ B(GetExitLabel());
633 }
634
635 private:
636 // The location (register) of the marked object reference.
637 const Location ref_;
638
639 // The location of the entrypoint if already loaded.
640 const Location entrypoint_;
641
642 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS);
643};
644
645// Slow path marking an object reference `ref` during a read barrier,
646// and if needed, atomically updating the field `obj.field` in the
647// object `obj` holding this reference after marking (contrary to
648// ReadBarrierMarkSlowPathMIPS above, which never tries to update
649// `obj.field`).
650//
651// This means that after the execution of this slow path, both `ref`
652// and `obj.field` will be up-to-date; i.e., after the flip, both will
653// hold the same to-space reference (unless another thread installed
654// another object reference (different from `ref`) in `obj.field`).
655class ReadBarrierMarkAndUpdateFieldSlowPathMIPS : public SlowPathCodeMIPS {
656 public:
657 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(HInstruction* instruction,
658 Location ref,
659 Register obj,
660 Location field_offset,
661 Register temp1)
662 : SlowPathCodeMIPS(instruction),
663 ref_(ref),
664 obj_(obj),
665 field_offset_(field_offset),
666 temp1_(temp1) {
667 DCHECK(kEmitCompilerReadBarrier);
668 }
669
670 const char* GetDescription() const OVERRIDE {
671 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS";
672 }
673
674 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
675 LocationSummary* locations = instruction_->GetLocations();
676 Register ref_reg = ref_.AsRegister<Register>();
677 DCHECK(locations->CanCall());
678 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
679 // This slow path is only used by the UnsafeCASObject intrinsic.
680 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
681 << "Unexpected instruction in read barrier marking and field updating slow path: "
682 << instruction_->DebugName();
683 DCHECK(instruction_->GetLocations()->Intrinsified());
684 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
685 DCHECK(field_offset_.IsRegisterPair()) << field_offset_;
686
687 __ Bind(GetEntryLabel());
688
689 // Save the old reference.
690 // Note that we cannot use AT or TMP to save the old reference, as those
691 // are used by the code that follows, but we need the old reference after
692 // the call to the ReadBarrierMarkRegX entry point.
693 DCHECK_NE(temp1_, AT);
694 DCHECK_NE(temp1_, TMP);
695 __ Move(temp1_, ref_reg);
696
697 // No need to save live registers; it's taken care of by the
698 // entrypoint. Also, there is no need to update the stack mask,
699 // as this runtime call will not trigger a garbage collection.
700 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
701 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
702 (S2 <= ref_reg && ref_reg <= S7) ||
703 (ref_reg == FP)) << ref_reg;
704 // "Compact" slow path, saving two moves.
705 //
706 // Instead of using the standard runtime calling convention (input
707 // and output in A0 and V0 respectively):
708 //
709 // A0 <- ref
710 // V0 <- ReadBarrierMark(A0)
711 // ref <- V0
712 //
713 // we just use rX (the register containing `ref`) as input and output
714 // of a dedicated entrypoint:
715 //
716 // rX <- ReadBarrierMarkRegX(rX)
717 //
718 int32_t entry_point_offset =
719 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
720 // This runtime call does not require a stack map.
721 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
722 instruction_,
723 this,
724 /* direct */ false);
725
726 // If the new reference is different from the old reference,
727 // update the field in the holder (`*(obj_ + field_offset_)`).
728 //
729 // Note that this field could also hold a different object, if
730 // another thread had concurrently changed it. In that case, the
731 // the compare-and-set (CAS) loop below would abort, leaving the
732 // field as-is.
733 MipsLabel done;
734 __ Beq(temp1_, ref_reg, &done);
735
736 // Update the the holder's field atomically. This may fail if
737 // mutator updates before us, but it's OK. This is achieved
738 // using a strong compare-and-set (CAS) operation with relaxed
739 // memory synchronization ordering, where the expected value is
740 // the old reference and the desired value is the new reference.
741
742 // Convenience aliases.
743 Register base = obj_;
744 // The UnsafeCASObject intrinsic uses a register pair as field
745 // offset ("long offset"), of which only the low part contains
746 // data.
747 Register offset = field_offset_.AsRegisterPairLow<Register>();
748 Register expected = temp1_;
749 Register value = ref_reg;
750 Register tmp_ptr = TMP; // Pointer to actual memory.
751 Register tmp = AT; // Value in memory.
752
753 __ Addu(tmp_ptr, base, offset);
754
755 if (kPoisonHeapReferences) {
756 __ PoisonHeapReference(expected);
757 // Do not poison `value` if it is the same register as
758 // `expected`, which has just been poisoned.
759 if (value != expected) {
760 __ PoisonHeapReference(value);
761 }
762 }
763
764 // do {
765 // tmp = [r_ptr] - expected;
766 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
767
768 bool is_r6 = mips_codegen->GetInstructionSetFeatures().IsR6();
769 MipsLabel loop_head, exit_loop;
770 __ Bind(&loop_head);
771 if (is_r6) {
772 __ LlR6(tmp, tmp_ptr);
773 } else {
774 __ LlR2(tmp, tmp_ptr);
775 }
776 __ Bne(tmp, expected, &exit_loop);
777 __ Move(tmp, value);
778 if (is_r6) {
779 __ ScR6(tmp, tmp_ptr);
780 } else {
781 __ ScR2(tmp, tmp_ptr);
782 }
783 __ Beqz(tmp, &loop_head);
784 __ Bind(&exit_loop);
785
786 if (kPoisonHeapReferences) {
787 __ UnpoisonHeapReference(expected);
788 // Do not unpoison `value` if it is the same register as
789 // `expected`, which has just been unpoisoned.
790 if (value != expected) {
791 __ UnpoisonHeapReference(value);
792 }
793 }
794
795 __ Bind(&done);
796 __ B(GetExitLabel());
797 }
798
799 private:
800 // The location (register) of the marked object reference.
801 const Location ref_;
802 // The register containing the object holding the marked object reference field.
803 const Register obj_;
804 // The location of the offset of the marked reference field within `obj_`.
805 Location field_offset_;
806
807 const Register temp1_;
808
809 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS);
810};
811
812// Slow path generating a read barrier for a heap reference.
813class ReadBarrierForHeapReferenceSlowPathMIPS : public SlowPathCodeMIPS {
814 public:
815 ReadBarrierForHeapReferenceSlowPathMIPS(HInstruction* instruction,
816 Location out,
817 Location ref,
818 Location obj,
819 uint32_t offset,
820 Location index)
821 : SlowPathCodeMIPS(instruction),
822 out_(out),
823 ref_(ref),
824 obj_(obj),
825 offset_(offset),
826 index_(index) {
827 DCHECK(kEmitCompilerReadBarrier);
828 // If `obj` is equal to `out` or `ref`, it means the initial object
829 // has been overwritten by (or after) the heap object reference load
830 // to be instrumented, e.g.:
831 //
832 // __ LoadFromOffset(kLoadWord, out, out, offset);
833 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
834 //
835 // In that case, we have lost the information about the original
836 // object, and the emitted read barrier cannot work properly.
837 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
838 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
839 }
840
841 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
842 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
843 LocationSummary* locations = instruction_->GetLocations();
844 Register reg_out = out_.AsRegister<Register>();
845 DCHECK(locations->CanCall());
846 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
847 DCHECK(instruction_->IsInstanceFieldGet() ||
848 instruction_->IsStaticFieldGet() ||
849 instruction_->IsArrayGet() ||
850 instruction_->IsInstanceOf() ||
851 instruction_->IsCheckCast() ||
852 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
853 << "Unexpected instruction in read barrier for heap reference slow path: "
854 << instruction_->DebugName();
855
856 __ Bind(GetEntryLabel());
857 SaveLiveRegisters(codegen, locations);
858
859 // We may have to change the index's value, but as `index_` is a
860 // constant member (like other "inputs" of this slow path),
861 // introduce a copy of it, `index`.
862 Location index = index_;
863 if (index_.IsValid()) {
864 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
865 if (instruction_->IsArrayGet()) {
866 // Compute the actual memory offset and store it in `index`.
867 Register index_reg = index_.AsRegister<Register>();
868 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
869 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
870 // We are about to change the value of `index_reg` (see the
871 // calls to art::mips::MipsAssembler::Sll and
872 // art::mips::MipsAssembler::Addiu32 below), but it has
873 // not been saved by the previous call to
874 // art::SlowPathCode::SaveLiveRegisters, as it is a
875 // callee-save register --
876 // art::SlowPathCode::SaveLiveRegisters does not consider
877 // callee-save registers, as it has been designed with the
878 // assumption that callee-save registers are supposed to be
879 // handled by the called function. So, as a callee-save
880 // register, `index_reg` _would_ eventually be saved onto
881 // the stack, but it would be too late: we would have
882 // changed its value earlier. Therefore, we manually save
883 // it here into another freely available register,
884 // `free_reg`, chosen of course among the caller-save
885 // registers (as a callee-save `free_reg` register would
886 // exhibit the same problem).
887 //
888 // Note we could have requested a temporary register from
889 // the register allocator instead; but we prefer not to, as
890 // this is a slow path, and we know we can find a
891 // caller-save register that is available.
892 Register free_reg = FindAvailableCallerSaveRegister(codegen);
893 __ Move(free_reg, index_reg);
894 index_reg = free_reg;
895 index = Location::RegisterLocation(index_reg);
896 } else {
897 // The initial register stored in `index_` has already been
898 // saved in the call to art::SlowPathCode::SaveLiveRegisters
899 // (as it is not a callee-save register), so we can freely
900 // use it.
901 }
902 // Shifting the index value contained in `index_reg` by the scale
903 // factor (2) cannot overflow in practice, as the runtime is
904 // unable to allocate object arrays with a size larger than
905 // 2^26 - 1 (that is, 2^28 - 4 bytes).
906 __ Sll(index_reg, index_reg, TIMES_4);
907 static_assert(
908 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
909 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
910 __ Addiu32(index_reg, index_reg, offset_);
911 } else {
912 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
913 // intrinsics, `index_` is not shifted by a scale factor of 2
914 // (as in the case of ArrayGet), as it is actually an offset
915 // to an object field within an object.
916 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
917 DCHECK(instruction_->GetLocations()->Intrinsified());
918 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
919 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
920 << instruction_->AsInvoke()->GetIntrinsic();
921 DCHECK_EQ(offset_, 0U);
922 DCHECK(index_.IsRegisterPair());
923 // UnsafeGet's offset location is a register pair, the low
924 // part contains the correct offset.
925 index = index_.ToLow();
926 }
927 }
928
929 // We're moving two or three locations to locations that could
930 // overlap, so we need a parallel move resolver.
931 InvokeRuntimeCallingConvention calling_convention;
932 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
933 parallel_move.AddMove(ref_,
934 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
935 Primitive::kPrimNot,
936 nullptr);
937 parallel_move.AddMove(obj_,
938 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
939 Primitive::kPrimNot,
940 nullptr);
941 if (index.IsValid()) {
942 parallel_move.AddMove(index,
943 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
944 Primitive::kPrimInt,
945 nullptr);
946 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
947 } else {
948 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
949 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
950 }
951 mips_codegen->InvokeRuntime(kQuickReadBarrierSlow,
952 instruction_,
953 instruction_->GetDexPc(),
954 this);
955 CheckEntrypointTypes<
956 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
957 mips_codegen->Move32(out_, calling_convention.GetReturnLocation(Primitive::kPrimNot));
958
959 RestoreLiveRegisters(codegen, locations);
960 __ B(GetExitLabel());
961 }
962
963 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathMIPS"; }
964
965 private:
966 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
967 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
968 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
969 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
970 if (i != ref &&
971 i != obj &&
972 !codegen->IsCoreCalleeSaveRegister(i) &&
973 !codegen->IsBlockedCoreRegister(i)) {
974 return static_cast<Register>(i);
975 }
976 }
977 // We shall never fail to find a free caller-save register, as
978 // there are more than two core caller-save registers on MIPS
979 // (meaning it is possible to find one which is different from
980 // `ref` and `obj`).
981 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
982 LOG(FATAL) << "Could not find a free caller-save register";
983 UNREACHABLE();
984 }
985
986 const Location out_;
987 const Location ref_;
988 const Location obj_;
989 const uint32_t offset_;
990 // An additional location containing an index to an array.
991 // Only used for HArrayGet and the UnsafeGetObject &
992 // UnsafeGetObjectVolatile intrinsics.
993 const Location index_;
994
995 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS);
996};
997
998// Slow path generating a read barrier for a GC root.
999class ReadBarrierForRootSlowPathMIPS : public SlowPathCodeMIPS {
1000 public:
1001 ReadBarrierForRootSlowPathMIPS(HInstruction* instruction, Location out, Location root)
1002 : SlowPathCodeMIPS(instruction), out_(out), root_(root) {
1003 DCHECK(kEmitCompilerReadBarrier);
1004 }
1005
1006 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1007 LocationSummary* locations = instruction_->GetLocations();
1008 Register reg_out = out_.AsRegister<Register>();
1009 DCHECK(locations->CanCall());
1010 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
1011 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1012 << "Unexpected instruction in read barrier for GC root slow path: "
1013 << instruction_->DebugName();
1014
1015 __ Bind(GetEntryLabel());
1016 SaveLiveRegisters(codegen, locations);
1017
1018 InvokeRuntimeCallingConvention calling_convention;
1019 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
1020 mips_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
1021 mips_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
1022 instruction_,
1023 instruction_->GetDexPc(),
1024 this);
1025 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1026 mips_codegen->Move32(out_, calling_convention.GetReturnLocation(Primitive::kPrimNot));
1027
1028 RestoreLiveRegisters(codegen, locations);
1029 __ B(GetExitLabel());
1030 }
1031
1032 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS"; }
1033
1034 private:
1035 const Location out_;
1036 const Location root_;
1037
1038 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS);
1039};
1040
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001041CodeGeneratorMIPS::CodeGeneratorMIPS(HGraph* graph,
1042 const MipsInstructionSetFeatures& isa_features,
1043 const CompilerOptions& compiler_options,
1044 OptimizingCompilerStats* stats)
1045 : CodeGenerator(graph,
1046 kNumberOfCoreRegisters,
1047 kNumberOfFRegisters,
1048 kNumberOfRegisterPairs,
1049 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1050 arraysize(kCoreCalleeSaves)),
1051 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1052 arraysize(kFpuCalleeSaves)),
1053 compiler_options,
1054 stats),
1055 block_labels_(nullptr),
1056 location_builder_(graph, this),
1057 instruction_visitor_(graph, this),
1058 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001059 assembler_(graph->GetArena(), &isa_features),
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001060 isa_features_(isa_features),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001061 uint32_literals_(std::less<uint32_t>(),
1062 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001063 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001064 pc_relative_method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001065 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001066 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001067 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001068 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1069 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001070 clobbered_ra_(false) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001071 // Save RA (containing the return address) to mimic Quick.
1072 AddAllocatedRegister(Location::RegisterLocation(RA));
1073}
1074
1075#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001076// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1077#define __ down_cast<MipsAssembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001078#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001079
1080void CodeGeneratorMIPS::Finalize(CodeAllocator* allocator) {
1081 // Ensure that we fix up branches.
1082 __ FinalizeCode();
1083
1084 // Adjust native pc offsets in stack maps.
1085 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001086 uint32_t old_position =
1087 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001088 uint32_t new_position = __ GetAdjustedPosition(old_position);
1089 DCHECK_GE(new_position, old_position);
1090 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
1091 }
1092
1093 // Adjust pc offsets for the disassembly information.
1094 if (disasm_info_ != nullptr) {
1095 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1096 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1097 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1098 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1099 it.second.start = __ GetAdjustedPosition(it.second.start);
1100 it.second.end = __ GetAdjustedPosition(it.second.end);
1101 }
1102 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1103 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1104 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1105 }
1106 }
1107
1108 CodeGenerator::Finalize(allocator);
1109}
1110
1111MipsAssembler* ParallelMoveResolverMIPS::GetAssembler() const {
1112 return codegen_->GetAssembler();
1113}
1114
1115void ParallelMoveResolverMIPS::EmitMove(size_t index) {
1116 DCHECK_LT(index, moves_.size());
1117 MoveOperands* move = moves_[index];
1118 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1119}
1120
1121void ParallelMoveResolverMIPS::EmitSwap(size_t index) {
1122 DCHECK_LT(index, moves_.size());
1123 MoveOperands* move = moves_[index];
1124 Primitive::Type type = move->GetType();
1125 Location loc1 = move->GetDestination();
1126 Location loc2 = move->GetSource();
1127
1128 DCHECK(!loc1.IsConstant());
1129 DCHECK(!loc2.IsConstant());
1130
1131 if (loc1.Equals(loc2)) {
1132 return;
1133 }
1134
1135 if (loc1.IsRegister() && loc2.IsRegister()) {
1136 // Swap 2 GPRs.
1137 Register r1 = loc1.AsRegister<Register>();
1138 Register r2 = loc2.AsRegister<Register>();
1139 __ Move(TMP, r2);
1140 __ Move(r2, r1);
1141 __ Move(r1, TMP);
1142 } else if (loc1.IsFpuRegister() && loc2.IsFpuRegister()) {
1143 FRegister f1 = loc1.AsFpuRegister<FRegister>();
1144 FRegister f2 = loc2.AsFpuRegister<FRegister>();
1145 if (type == Primitive::kPrimFloat) {
1146 __ MovS(FTMP, f2);
1147 __ MovS(f2, f1);
1148 __ MovS(f1, FTMP);
1149 } else {
1150 DCHECK_EQ(type, Primitive::kPrimDouble);
1151 __ MovD(FTMP, f2);
1152 __ MovD(f2, f1);
1153 __ MovD(f1, FTMP);
1154 }
1155 } else if ((loc1.IsRegister() && loc2.IsFpuRegister()) ||
1156 (loc1.IsFpuRegister() && loc2.IsRegister())) {
1157 // Swap FPR and GPR.
1158 DCHECK_EQ(type, Primitive::kPrimFloat); // Can only swap a float.
1159 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1160 : loc2.AsFpuRegister<FRegister>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001161 Register r2 = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001162 __ Move(TMP, r2);
1163 __ Mfc1(r2, f1);
1164 __ Mtc1(TMP, f1);
1165 } else if (loc1.IsRegisterPair() && loc2.IsRegisterPair()) {
1166 // Swap 2 GPR register pairs.
1167 Register r1 = loc1.AsRegisterPairLow<Register>();
1168 Register r2 = loc2.AsRegisterPairLow<Register>();
1169 __ Move(TMP, r2);
1170 __ Move(r2, r1);
1171 __ Move(r1, TMP);
1172 r1 = loc1.AsRegisterPairHigh<Register>();
1173 r2 = loc2.AsRegisterPairHigh<Register>();
1174 __ Move(TMP, r2);
1175 __ Move(r2, r1);
1176 __ Move(r1, TMP);
1177 } else if ((loc1.IsRegisterPair() && loc2.IsFpuRegister()) ||
1178 (loc1.IsFpuRegister() && loc2.IsRegisterPair())) {
1179 // Swap FPR and GPR register pair.
1180 DCHECK_EQ(type, Primitive::kPrimDouble);
1181 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1182 : loc2.AsFpuRegister<FRegister>();
1183 Register r2_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1184 : loc2.AsRegisterPairLow<Register>();
1185 Register r2_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1186 : loc2.AsRegisterPairHigh<Register>();
1187 // Use 2 temporary registers because we can't first swap the low 32 bits of an FPR and
1188 // then swap the high 32 bits of the same FPR. mtc1 makes the high 32 bits of an FPR
1189 // unpredictable and the following mfch1 will fail.
1190 __ Mfc1(TMP, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001191 __ MoveFromFpuHigh(AT, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001192 __ Mtc1(r2_l, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001193 __ MoveToFpuHigh(r2_h, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001194 __ Move(r2_l, TMP);
1195 __ Move(r2_h, AT);
1196 } else if (loc1.IsStackSlot() && loc2.IsStackSlot()) {
1197 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ false);
1198 } else if (loc1.IsDoubleStackSlot() && loc2.IsDoubleStackSlot()) {
1199 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ true);
David Brazdilcc0f3112016-01-28 17:14:52 +00001200 } else if ((loc1.IsRegister() && loc2.IsStackSlot()) ||
1201 (loc1.IsStackSlot() && loc2.IsRegister())) {
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001202 Register reg = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
1203 intptr_t offset = loc1.IsStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001204 __ Move(TMP, reg);
1205 __ LoadFromOffset(kLoadWord, reg, SP, offset);
1206 __ StoreToOffset(kStoreWord, TMP, SP, offset);
1207 } else if ((loc1.IsRegisterPair() && loc2.IsDoubleStackSlot()) ||
1208 (loc1.IsDoubleStackSlot() && loc2.IsRegisterPair())) {
1209 Register reg_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1210 : loc2.AsRegisterPairLow<Register>();
1211 Register reg_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1212 : loc2.AsRegisterPairHigh<Register>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001213 intptr_t offset_l = loc1.IsDoubleStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001214 intptr_t offset_h = loc1.IsDoubleStackSlot() ? loc1.GetHighStackIndex(kMipsWordSize)
1215 : loc2.GetHighStackIndex(kMipsWordSize);
1216 __ Move(TMP, reg_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001217 __ LoadFromOffset(kLoadWord, reg_l, SP, offset_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001218 __ StoreToOffset(kStoreWord, TMP, SP, offset_l);
David Brazdil04d3e872016-01-29 09:50:09 +00001219 __ Move(TMP, reg_h);
1220 __ LoadFromOffset(kLoadWord, reg_h, SP, offset_h);
1221 __ StoreToOffset(kStoreWord, TMP, SP, offset_h);
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001222 } else if (loc1.IsFpuRegister() || loc2.IsFpuRegister()) {
1223 FRegister reg = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1224 : loc2.AsFpuRegister<FRegister>();
1225 intptr_t offset = loc1.IsFpuRegister() ? loc2.GetStackIndex() : loc1.GetStackIndex();
1226 if (type == Primitive::kPrimFloat) {
1227 __ MovS(FTMP, reg);
1228 __ LoadSFromOffset(reg, SP, offset);
1229 __ StoreSToOffset(FTMP, SP, offset);
1230 } else {
1231 DCHECK_EQ(type, Primitive::kPrimDouble);
1232 __ MovD(FTMP, reg);
1233 __ LoadDFromOffset(reg, SP, offset);
1234 __ StoreDToOffset(FTMP, SP, offset);
1235 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001236 } else {
1237 LOG(FATAL) << "Swap between " << loc1 << " and " << loc2 << " is unsupported";
1238 }
1239}
1240
1241void ParallelMoveResolverMIPS::RestoreScratch(int reg) {
1242 __ Pop(static_cast<Register>(reg));
1243}
1244
1245void ParallelMoveResolverMIPS::SpillScratch(int reg) {
1246 __ Push(static_cast<Register>(reg));
1247}
1248
1249void ParallelMoveResolverMIPS::Exchange(int index1, int index2, bool double_slot) {
1250 // Allocate a scratch register other than TMP, if available.
1251 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1252 // automatically unspilled when the scratch scope object is destroyed).
1253 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1254 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
1255 int stack_offset = ensure_scratch.IsSpilled() ? kMipsWordSize : 0;
1256 for (int i = 0; i <= (double_slot ? 1 : 0); i++, stack_offset += kMipsWordSize) {
1257 __ LoadFromOffset(kLoadWord,
1258 Register(ensure_scratch.GetRegister()),
1259 SP,
1260 index1 + stack_offset);
1261 __ LoadFromOffset(kLoadWord,
1262 TMP,
1263 SP,
1264 index2 + stack_offset);
1265 __ StoreToOffset(kStoreWord,
1266 Register(ensure_scratch.GetRegister()),
1267 SP,
1268 index2 + stack_offset);
1269 __ StoreToOffset(kStoreWord, TMP, SP, index1 + stack_offset);
1270 }
1271}
1272
Alexey Frunze73296a72016-06-03 22:51:46 -07001273void CodeGeneratorMIPS::ComputeSpillMask() {
1274 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
1275 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
1276 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
1277 // If there're FPU callee-saved registers and there's an odd number of GPR callee-saved
1278 // registers, include the ZERO register to force alignment of FPU callee-saved registers
1279 // within the stack frame.
1280 if ((fpu_spill_mask_ != 0) && (POPCOUNT(core_spill_mask_) % 2 != 0)) {
1281 core_spill_mask_ |= (1 << ZERO);
1282 }
Alexey Frunze58320ce2016-08-30 21:40:46 -07001283}
1284
1285bool CodeGeneratorMIPS::HasAllocatedCalleeSaveRegisters() const {
Alexey Frunze06a46c42016-07-19 15:00:40 -07001286 // If RA is clobbered by PC-relative operations on R2 and it's the only spilled register
Alexey Frunze58320ce2016-08-30 21:40:46 -07001287 // (this can happen in leaf methods), force CodeGenerator::InitializeCodeGeneration()
1288 // into the path that creates a stack frame so that RA can be explicitly saved and restored.
1289 // RA can't otherwise be saved/restored when it's the only spilled register.
Alexey Frunze58320ce2016-08-30 21:40:46 -07001290 return CodeGenerator::HasAllocatedCalleeSaveRegisters() || clobbered_ra_;
Alexey Frunze73296a72016-06-03 22:51:46 -07001291}
1292
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001293static dwarf::Reg DWARFReg(Register reg) {
1294 return dwarf::Reg::MipsCore(static_cast<int>(reg));
1295}
1296
1297// TODO: mapping of floating-point registers to DWARF.
1298
1299void CodeGeneratorMIPS::GenerateFrameEntry() {
1300 __ Bind(&frame_entry_label_);
1301
1302 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips) || !IsLeafMethod();
1303
1304 if (do_overflow_check) {
1305 __ LoadFromOffset(kLoadWord,
1306 ZERO,
1307 SP,
1308 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips)));
1309 RecordPcInfo(nullptr, 0);
1310 }
1311
1312 if (HasEmptyFrame()) {
Alexey Frunze58320ce2016-08-30 21:40:46 -07001313 CHECK_EQ(fpu_spill_mask_, 0u);
1314 CHECK_EQ(core_spill_mask_, 1u << RA);
1315 CHECK(!clobbered_ra_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001316 return;
1317 }
1318
1319 // Make sure the frame size isn't unreasonably large.
1320 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips)) {
1321 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips) << " bytes";
1322 }
1323
1324 // Spill callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001325
Alexey Frunze73296a72016-06-03 22:51:46 -07001326 uint32_t ofs = GetFrameSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001327 __ IncreaseFrameSize(ofs);
1328
Alexey Frunze73296a72016-06-03 22:51:46 -07001329 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1330 Register reg = static_cast<Register>(MostSignificantBit(mask));
1331 mask ^= 1u << reg;
1332 ofs -= kMipsWordSize;
1333 // The ZERO register is only included for alignment.
1334 if (reg != ZERO) {
1335 __ StoreToOffset(kStoreWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001336 __ cfi().RelOffset(DWARFReg(reg), ofs);
1337 }
1338 }
1339
Alexey Frunze73296a72016-06-03 22:51:46 -07001340 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1341 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1342 mask ^= 1u << reg;
1343 ofs -= kMipsDoublewordSize;
1344 __ StoreDToOffset(reg, SP, ofs);
1345 // TODO: __ cfi().RelOffset(DWARFReg(reg), ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001346 }
1347
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001348 // Save the current method if we need it. Note that we do not
1349 // do this in HCurrentMethod, as the instruction might have been removed
1350 // in the SSA graph.
1351 if (RequiresCurrentMethod()) {
1352 __ StoreToOffset(kStoreWord, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
1353 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001354
1355 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1356 // Initialize should deoptimize flag to 0.
1357 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1358 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001359}
1360
1361void CodeGeneratorMIPS::GenerateFrameExit() {
1362 __ cfi().RememberState();
1363
1364 if (!HasEmptyFrame()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001365 // Restore callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001366
Alexey Frunze73296a72016-06-03 22:51:46 -07001367 // For better instruction scheduling restore RA before other registers.
1368 uint32_t ofs = GetFrameSize();
1369 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1370 Register reg = static_cast<Register>(MostSignificantBit(mask));
1371 mask ^= 1u << reg;
1372 ofs -= kMipsWordSize;
1373 // The ZERO register is only included for alignment.
1374 if (reg != ZERO) {
1375 __ LoadFromOffset(kLoadWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001376 __ cfi().Restore(DWARFReg(reg));
1377 }
1378 }
1379
Alexey Frunze73296a72016-06-03 22:51:46 -07001380 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1381 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1382 mask ^= 1u << reg;
1383 ofs -= kMipsDoublewordSize;
1384 __ LoadDFromOffset(reg, SP, ofs);
1385 // TODO: __ cfi().Restore(DWARFReg(reg));
1386 }
1387
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001388 size_t frame_size = GetFrameSize();
1389 // Adjust the stack pointer in the delay slot if doing so doesn't break CFI.
1390 bool exchange = IsInt<16>(static_cast<int32_t>(frame_size));
1391 bool reordering = __ SetReorder(false);
1392 if (exchange) {
1393 __ Jr(RA);
1394 __ DecreaseFrameSize(frame_size); // Single instruction in delay slot.
1395 } else {
1396 __ DecreaseFrameSize(frame_size);
1397 __ Jr(RA);
1398 __ Nop(); // In delay slot.
1399 }
1400 __ SetReorder(reordering);
1401 } else {
1402 __ Jr(RA);
1403 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001404 }
1405
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001406 __ cfi().RestoreState();
1407 __ cfi().DefCFAOffset(GetFrameSize());
1408}
1409
1410void CodeGeneratorMIPS::Bind(HBasicBlock* block) {
1411 __ Bind(GetLabelOf(block));
1412}
1413
1414void CodeGeneratorMIPS::MoveLocation(Location dst, Location src, Primitive::Type dst_type) {
1415 if (src.Equals(dst)) {
1416 return;
1417 }
1418
1419 if (src.IsConstant()) {
1420 MoveConstant(dst, src.GetConstant());
1421 } else {
1422 if (Primitive::Is64BitType(dst_type)) {
1423 Move64(dst, src);
1424 } else {
1425 Move32(dst, src);
1426 }
1427 }
1428}
1429
1430void CodeGeneratorMIPS::Move32(Location destination, Location source) {
1431 if (source.Equals(destination)) {
1432 return;
1433 }
1434
1435 if (destination.IsRegister()) {
1436 if (source.IsRegister()) {
1437 __ Move(destination.AsRegister<Register>(), source.AsRegister<Register>());
1438 } else if (source.IsFpuRegister()) {
1439 __ Mfc1(destination.AsRegister<Register>(), source.AsFpuRegister<FRegister>());
1440 } else {
1441 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1442 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
1443 }
1444 } else if (destination.IsFpuRegister()) {
1445 if (source.IsRegister()) {
1446 __ Mtc1(source.AsRegister<Register>(), destination.AsFpuRegister<FRegister>());
1447 } else if (source.IsFpuRegister()) {
1448 __ MovS(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1449 } else {
1450 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1451 __ LoadSFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1452 }
1453 } else {
1454 DCHECK(destination.IsStackSlot()) << destination;
1455 if (source.IsRegister()) {
1456 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
1457 } else if (source.IsFpuRegister()) {
1458 __ StoreSToOffset(source.AsFpuRegister<FRegister>(), SP, destination.GetStackIndex());
1459 } else {
1460 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1461 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1462 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1463 }
1464 }
1465}
1466
1467void CodeGeneratorMIPS::Move64(Location destination, Location source) {
1468 if (source.Equals(destination)) {
1469 return;
1470 }
1471
1472 if (destination.IsRegisterPair()) {
1473 if (source.IsRegisterPair()) {
1474 __ Move(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
1475 __ Move(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
1476 } else if (source.IsFpuRegister()) {
1477 Register dst_high = destination.AsRegisterPairHigh<Register>();
1478 Register dst_low = destination.AsRegisterPairLow<Register>();
1479 FRegister src = source.AsFpuRegister<FRegister>();
1480 __ Mfc1(dst_low, src);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001481 __ MoveFromFpuHigh(dst_high, src);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001482 } else {
1483 DCHECK(source.IsDoubleStackSlot()) << "Cannot move from " << source << " to " << destination;
1484 int32_t off = source.GetStackIndex();
1485 Register r = destination.AsRegisterPairLow<Register>();
1486 __ LoadFromOffset(kLoadDoubleword, r, SP, off);
1487 }
1488 } else if (destination.IsFpuRegister()) {
1489 if (source.IsRegisterPair()) {
1490 FRegister dst = destination.AsFpuRegister<FRegister>();
1491 Register src_high = source.AsRegisterPairHigh<Register>();
1492 Register src_low = source.AsRegisterPairLow<Register>();
1493 __ Mtc1(src_low, dst);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001494 __ MoveToFpuHigh(src_high, dst);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001495 } else if (source.IsFpuRegister()) {
1496 __ MovD(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1497 } else {
1498 DCHECK(source.IsDoubleStackSlot()) << "Cannot move from " << source << " to " << destination;
1499 __ LoadDFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1500 }
1501 } else {
1502 DCHECK(destination.IsDoubleStackSlot()) << destination;
1503 int32_t off = destination.GetStackIndex();
1504 if (source.IsRegisterPair()) {
1505 __ StoreToOffset(kStoreDoubleword, source.AsRegisterPairLow<Register>(), SP, off);
1506 } else if (source.IsFpuRegister()) {
1507 __ StoreDToOffset(source.AsFpuRegister<FRegister>(), SP, off);
1508 } else {
1509 DCHECK(source.IsDoubleStackSlot()) << "Cannot move from " << source << " to " << destination;
1510 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1511 __ StoreToOffset(kStoreWord, TMP, SP, off);
1512 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex() + 4);
1513 __ StoreToOffset(kStoreWord, TMP, SP, off + 4);
1514 }
1515 }
1516}
1517
1518void CodeGeneratorMIPS::MoveConstant(Location destination, HConstant* c) {
1519 if (c->IsIntConstant() || c->IsNullConstant()) {
1520 // Move 32 bit constant.
1521 int32_t value = GetInt32ValueOf(c);
1522 if (destination.IsRegister()) {
1523 Register dst = destination.AsRegister<Register>();
1524 __ LoadConst32(dst, value);
1525 } else {
1526 DCHECK(destination.IsStackSlot())
1527 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001528 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001529 }
1530 } else if (c->IsLongConstant()) {
1531 // Move 64 bit constant.
1532 int64_t value = GetInt64ValueOf(c);
1533 if (destination.IsRegisterPair()) {
1534 Register r_h = destination.AsRegisterPairHigh<Register>();
1535 Register r_l = destination.AsRegisterPairLow<Register>();
1536 __ LoadConst64(r_h, r_l, value);
1537 } else {
1538 DCHECK(destination.IsDoubleStackSlot())
1539 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001540 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001541 }
1542 } else if (c->IsFloatConstant()) {
1543 // Move 32 bit float constant.
1544 int32_t value = GetInt32ValueOf(c);
1545 if (destination.IsFpuRegister()) {
1546 __ LoadSConst32(destination.AsFpuRegister<FRegister>(), value, TMP);
1547 } else {
1548 DCHECK(destination.IsStackSlot())
1549 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001550 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001551 }
1552 } else {
1553 // Move 64 bit double constant.
1554 DCHECK(c->IsDoubleConstant()) << c->DebugName();
1555 int64_t value = GetInt64ValueOf(c);
1556 if (destination.IsFpuRegister()) {
1557 FRegister fd = destination.AsFpuRegister<FRegister>();
1558 __ LoadDConst64(fd, value, TMP);
1559 } else {
1560 DCHECK(destination.IsDoubleStackSlot())
1561 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001562 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001563 }
1564 }
1565}
1566
1567void CodeGeneratorMIPS::MoveConstant(Location destination, int32_t value) {
1568 DCHECK(destination.IsRegister());
1569 Register dst = destination.AsRegister<Register>();
1570 __ LoadConst32(dst, value);
1571}
1572
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001573void CodeGeneratorMIPS::AddLocationAsTemp(Location location, LocationSummary* locations) {
1574 if (location.IsRegister()) {
1575 locations->AddTemp(location);
Alexey Frunzec9e94f32015-10-26 16:11:39 -07001576 } else if (location.IsRegisterPair()) {
1577 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1578 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001579 } else {
1580 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1581 }
1582}
1583
Vladimir Markoaad75c62016-10-03 08:46:48 +00001584template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1585inline void CodeGeneratorMIPS::EmitPcRelativeLinkerPatches(
1586 const ArenaDeque<PcRelativePatchInfo>& infos,
1587 ArenaVector<LinkerPatch>* linker_patches) {
1588 for (const PcRelativePatchInfo& info : infos) {
1589 const DexFile& dex_file = info.target_dex_file;
1590 size_t offset_or_index = info.offset_or_index;
1591 DCHECK(info.high_label.IsBound());
1592 uint32_t high_offset = __ GetLabelLocation(&info.high_label);
1593 // On R2 we use HMipsComputeBaseMethodAddress and patch relative to
1594 // the assembler's base label used for PC-relative addressing.
1595 uint32_t pc_rel_offset = info.pc_rel_label.IsBound()
1596 ? __ GetLabelLocation(&info.pc_rel_label)
1597 : __ GetPcRelBaseLabelLocation();
1598 linker_patches->push_back(Factory(high_offset, &dex_file, pc_rel_offset, offset_or_index));
1599 }
1600}
1601
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001602void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1603 DCHECK(linker_patches->empty());
1604 size_t size =
Alexey Frunze06a46c42016-07-19 15:00:40 -07001605 pc_relative_dex_cache_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001606 pc_relative_method_patches_.size() +
Alexey Frunze06a46c42016-07-19 15:00:40 -07001607 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001608 type_bss_entry_patches_.size() +
1609 pc_relative_string_patches_.size();
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001610 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001611 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
1612 linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001613 if (GetCompilerOptions().IsBootImage()) {
1614 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
Vladimir Markoaad75c62016-10-03 08:46:48 +00001615 linker_patches);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001616 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
1617 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001618 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
1619 linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001620 } else {
1621 DCHECK(pc_relative_method_patches_.empty());
1622 DCHECK(pc_relative_type_patches_.empty());
1623 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
1624 linker_patches);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001625 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00001626 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1627 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001628 DCHECK_EQ(size, linker_patches->size());
Alexey Frunze06a46c42016-07-19 15:00:40 -07001629}
1630
Vladimir Marko65979462017-05-19 17:25:12 +01001631CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeMethodPatch(
1632 MethodReference target_method) {
1633 return NewPcRelativePatch(*target_method.dex_file,
1634 target_method.dex_method_index,
1635 &pc_relative_method_patches_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001636}
1637
1638CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeTypePatch(
Andreas Gampea5b09a62016-11-17 15:21:22 -08001639 const DexFile& dex_file, dex::TypeIndex type_index) {
1640 return NewPcRelativePatch(dex_file, type_index.index_, &pc_relative_type_patches_);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001641}
1642
Vladimir Marko1998cd02017-01-13 13:02:58 +00001643CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewTypeBssEntryPatch(
1644 const DexFile& dex_file, dex::TypeIndex type_index) {
1645 return NewPcRelativePatch(dex_file, type_index.index_, &type_bss_entry_patches_);
1646}
1647
Vladimir Marko65979462017-05-19 17:25:12 +01001648CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeStringPatch(
1649 const DexFile& dex_file, dex::StringIndex string_index) {
1650 return NewPcRelativePatch(dex_file, string_index.index_, &pc_relative_string_patches_);
1651}
1652
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001653CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeDexCacheArrayPatch(
1654 const DexFile& dex_file, uint32_t element_offset) {
1655 return NewPcRelativePatch(dex_file, element_offset, &pc_relative_dex_cache_patches_);
1656}
1657
1658CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativePatch(
1659 const DexFile& dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
1660 patches->emplace_back(dex_file, offset_or_index);
1661 return &patches->back();
1662}
1663
Alexey Frunze06a46c42016-07-19 15:00:40 -07001664Literal* CodeGeneratorMIPS::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1665 return map->GetOrCreate(
1666 value,
1667 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1668}
1669
Alexey Frunze06a46c42016-07-19 15:00:40 -07001670Literal* CodeGeneratorMIPS::DeduplicateBootImageAddressLiteral(uint32_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001671 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001672}
1673
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001674void CodeGeneratorMIPS::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info,
1675 Register out,
1676 Register base) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00001677 if (GetInstructionSetFeatures().IsR6()) {
1678 DCHECK_EQ(base, ZERO);
1679 __ Bind(&info->high_label);
1680 __ Bind(&info->pc_rel_label);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001681 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001682 __ Auipc(out, /* placeholder */ 0x1234);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001683 } else {
1684 // If base is ZERO, emit NAL to obtain the actual base.
1685 if (base == ZERO) {
1686 // Generate a dummy PC-relative call to obtain PC.
1687 __ Nal();
1688 }
1689 __ Bind(&info->high_label);
1690 __ Lui(out, /* placeholder */ 0x1234);
1691 // If we emitted the NAL, bind the pc_rel_label, otherwise base is a register holding
1692 // the HMipsComputeBaseMethodAddress which has its own label stored in MipsAssembler.
1693 if (base == ZERO) {
1694 __ Bind(&info->pc_rel_label);
1695 }
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001696 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001697 __ Addu(out, out, (base == ZERO) ? RA : base);
1698 }
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001699 // The immediately following instruction will add the sign-extended low half of the 32-bit
1700 // offset to `out` (e.g. lw, jialc, addiu).
Vladimir Markoaad75c62016-10-03 08:46:48 +00001701}
1702
Alexey Frunze627c1a02017-01-30 19:28:14 -08001703CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootStringPatch(
1704 const DexFile& dex_file,
1705 dex::StringIndex dex_index,
1706 Handle<mirror::String> handle) {
1707 jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index),
1708 reinterpret_cast64<uint64_t>(handle.GetReference()));
1709 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
1710 return &jit_string_patches_.back();
1711}
1712
1713CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootClassPatch(
1714 const DexFile& dex_file,
1715 dex::TypeIndex dex_index,
1716 Handle<mirror::Class> handle) {
1717 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index),
1718 reinterpret_cast64<uint64_t>(handle.GetReference()));
1719 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
1720 return &jit_class_patches_.back();
1721}
1722
1723void CodeGeneratorMIPS::PatchJitRootUse(uint8_t* code,
1724 const uint8_t* roots_data,
1725 const CodeGeneratorMIPS::JitPatchInfo& info,
1726 uint64_t index_in_table) const {
1727 uint32_t literal_offset = GetAssembler().GetLabelLocation(&info.high_label);
1728 uintptr_t address =
1729 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1730 uint32_t addr32 = dchecked_integral_cast<uint32_t>(address);
1731 // lui reg, addr32_high
1732 DCHECK_EQ(code[literal_offset + 0], 0x34);
1733 DCHECK_EQ(code[literal_offset + 1], 0x12);
1734 DCHECK_EQ((code[literal_offset + 2] & 0xE0), 0x00);
1735 DCHECK_EQ(code[literal_offset + 3], 0x3C);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001736 // instr reg, reg, addr32_low
Alexey Frunze627c1a02017-01-30 19:28:14 -08001737 DCHECK_EQ(code[literal_offset + 4], 0x78);
1738 DCHECK_EQ(code[literal_offset + 5], 0x56);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001739 addr32 += (addr32 & 0x8000) << 1; // Account for sign extension in "instr reg, reg, addr32_low".
Alexey Frunze627c1a02017-01-30 19:28:14 -08001740 // lui reg, addr32_high
1741 code[literal_offset + 0] = static_cast<uint8_t>(addr32 >> 16);
1742 code[literal_offset + 1] = static_cast<uint8_t>(addr32 >> 24);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001743 // instr reg, reg, addr32_low
Alexey Frunze627c1a02017-01-30 19:28:14 -08001744 code[literal_offset + 4] = static_cast<uint8_t>(addr32 >> 0);
1745 code[literal_offset + 5] = static_cast<uint8_t>(addr32 >> 8);
1746}
1747
1748void CodeGeneratorMIPS::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1749 for (const JitPatchInfo& info : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001750 const auto it = jit_string_roots_.find(StringReference(&info.target_dex_file,
1751 dex::StringIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001752 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001753 uint64_t index_in_table = it->second;
1754 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001755 }
1756 for (const JitPatchInfo& info : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001757 const auto it = jit_class_roots_.find(TypeReference(&info.target_dex_file,
1758 dex::TypeIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001759 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001760 uint64_t index_in_table = it->second;
1761 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001762 }
1763}
1764
Goran Jakovljevice114da22016-12-26 14:21:43 +01001765void CodeGeneratorMIPS::MarkGCCard(Register object,
1766 Register value,
1767 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001768 MipsLabel done;
1769 Register card = AT;
1770 Register temp = TMP;
Goran Jakovljevice114da22016-12-26 14:21:43 +01001771 if (value_can_be_null) {
1772 __ Beqz(value, &done);
1773 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001774 __ LoadFromOffset(kLoadWord,
1775 card,
1776 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001777 Thread::CardTableOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001778 __ Srl(temp, object, gc::accounting::CardTable::kCardShift);
1779 __ Addu(temp, card, temp);
1780 __ Sb(card, temp, 0);
Goran Jakovljevice114da22016-12-26 14:21:43 +01001781 if (value_can_be_null) {
1782 __ Bind(&done);
1783 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001784}
1785
David Brazdil58282f42016-01-14 12:45:10 +00001786void CodeGeneratorMIPS::SetupBlockedRegisters() const {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001787 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1788 blocked_core_registers_[ZERO] = true;
1789 blocked_core_registers_[K0] = true;
1790 blocked_core_registers_[K1] = true;
1791 blocked_core_registers_[GP] = true;
1792 blocked_core_registers_[SP] = true;
1793 blocked_core_registers_[RA] = true;
1794
1795 // AT and TMP(T8) are used as temporary/scratch registers
1796 // (similar to how AT is used by MIPS assemblers).
1797 blocked_core_registers_[AT] = true;
1798 blocked_core_registers_[TMP] = true;
1799 blocked_fpu_registers_[FTMP] = true;
1800
1801 // Reserve suspend and thread registers.
1802 blocked_core_registers_[S0] = true;
1803 blocked_core_registers_[TR] = true;
1804
1805 // Reserve T9 for function calls
1806 blocked_core_registers_[T9] = true;
1807
1808 // Reserve odd-numbered FPU registers.
1809 for (size_t i = 1; i < kNumberOfFRegisters; i += 2) {
1810 blocked_fpu_registers_[i] = true;
1811 }
1812
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02001813 if (GetGraph()->IsDebuggable()) {
1814 // Stubs do not save callee-save floating point registers. If the graph
1815 // is debuggable, we need to deal with these registers differently. For
1816 // now, just block them.
1817 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1818 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1819 }
1820 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001821}
1822
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001823size_t CodeGeneratorMIPS::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1824 __ StoreToOffset(kStoreWord, Register(reg_id), SP, stack_index);
1825 return kMipsWordSize;
1826}
1827
1828size_t CodeGeneratorMIPS::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1829 __ LoadFromOffset(kLoadWord, Register(reg_id), SP, stack_index);
1830 return kMipsWordSize;
1831}
1832
1833size_t CodeGeneratorMIPS::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1834 __ StoreDToOffset(FRegister(reg_id), SP, stack_index);
1835 return kMipsDoublewordSize;
1836}
1837
1838size_t CodeGeneratorMIPS::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1839 __ LoadDFromOffset(FRegister(reg_id), SP, stack_index);
1840 return kMipsDoublewordSize;
1841}
1842
1843void CodeGeneratorMIPS::DumpCoreRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001844 stream << Register(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001845}
1846
1847void CodeGeneratorMIPS::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001848 stream << FRegister(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001849}
1850
Serban Constantinescufca16662016-07-14 09:21:59 +01001851constexpr size_t kMipsDirectEntrypointRuntimeOffset = 16;
1852
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001853void CodeGeneratorMIPS::InvokeRuntime(QuickEntrypointEnum entrypoint,
1854 HInstruction* instruction,
1855 uint32_t dex_pc,
1856 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001857 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001858 GenerateInvokeRuntime(GetThreadOffset<kMipsPointerSize>(entrypoint).Int32Value(),
1859 IsDirectEntrypoint(entrypoint));
1860 if (EntrypointRequiresStackMap(entrypoint)) {
1861 RecordPcInfo(instruction, dex_pc, slow_path);
1862 }
1863}
1864
1865void CodeGeneratorMIPS::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1866 HInstruction* instruction,
1867 SlowPathCode* slow_path,
1868 bool direct) {
1869 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1870 GenerateInvokeRuntime(entry_point_offset, direct);
1871}
1872
1873void CodeGeneratorMIPS::GenerateInvokeRuntime(int32_t entry_point_offset, bool direct) {
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001874 bool reordering = __ SetReorder(false);
Alexey Frunze15958152017-02-09 19:08:30 -08001875 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001876 __ Jalr(T9);
Alexey Frunze15958152017-02-09 19:08:30 -08001877 if (direct) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001878 // Reserve argument space on stack (for $a0-$a3) for
1879 // entrypoints that directly reference native implementations.
1880 // Called function may use this space to store $a0-$a3 regs.
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001881 __ IncreaseFrameSize(kMipsDirectEntrypointRuntimeOffset); // Single instruction in delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001882 __ DecreaseFrameSize(kMipsDirectEntrypointRuntimeOffset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001883 } else {
1884 __ Nop(); // In delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001885 }
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001886 __ SetReorder(reordering);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001887}
1888
1889void InstructionCodeGeneratorMIPS::GenerateClassInitializationCheck(SlowPathCodeMIPS* slow_path,
1890 Register class_reg) {
1891 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1892 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1893 __ Blt(TMP, AT, slow_path->GetEntryLabel());
1894 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1895 __ Sync(0);
1896 __ Bind(slow_path->GetExitLabel());
1897}
1898
1899void InstructionCodeGeneratorMIPS::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1900 __ Sync(0); // Only stype 0 is supported.
1901}
1902
1903void InstructionCodeGeneratorMIPS::GenerateSuspendCheck(HSuspendCheck* instruction,
1904 HBasicBlock* successor) {
1905 SuspendCheckSlowPathMIPS* slow_path =
1906 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS(instruction, successor);
1907 codegen_->AddSlowPath(slow_path);
1908
1909 __ LoadFromOffset(kLoadUnsignedHalfword,
1910 TMP,
1911 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001912 Thread::ThreadFlagsOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001913 if (successor == nullptr) {
1914 __ Bnez(TMP, slow_path->GetEntryLabel());
1915 __ Bind(slow_path->GetReturnLabel());
1916 } else {
1917 __ Beqz(TMP, codegen_->GetLabelOf(successor));
1918 __ B(slow_path->GetEntryLabel());
1919 // slow_path will return to GetLabelOf(successor).
1920 }
1921}
1922
1923InstructionCodeGeneratorMIPS::InstructionCodeGeneratorMIPS(HGraph* graph,
1924 CodeGeneratorMIPS* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001925 : InstructionCodeGenerator(graph, codegen),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001926 assembler_(codegen->GetAssembler()),
1927 codegen_(codegen) {}
1928
1929void LocationsBuilderMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
1930 DCHECK_EQ(instruction->InputCount(), 2U);
1931 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1932 Primitive::Type type = instruction->GetResultType();
1933 switch (type) {
1934 case Primitive::kPrimInt: {
1935 locations->SetInAt(0, Location::RequiresRegister());
1936 HInstruction* right = instruction->InputAt(1);
1937 bool can_use_imm = false;
1938 if (right->IsConstant()) {
1939 int32_t imm = CodeGenerator::GetInt32ValueOf(right->AsConstant());
1940 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1941 can_use_imm = IsUint<16>(imm);
1942 } else if (instruction->IsAdd()) {
1943 can_use_imm = IsInt<16>(imm);
1944 } else {
1945 DCHECK(instruction->IsSub());
1946 can_use_imm = IsInt<16>(-imm);
1947 }
1948 }
1949 if (can_use_imm)
1950 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1951 else
1952 locations->SetInAt(1, Location::RequiresRegister());
1953 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1954 break;
1955 }
1956
1957 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001958 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001959 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1960 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001961 break;
1962 }
1963
1964 case Primitive::kPrimFloat:
1965 case Primitive::kPrimDouble:
1966 DCHECK(instruction->IsAdd() || instruction->IsSub());
1967 locations->SetInAt(0, Location::RequiresFpuRegister());
1968 locations->SetInAt(1, Location::RequiresFpuRegister());
1969 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1970 break;
1971
1972 default:
1973 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1974 }
1975}
1976
1977void InstructionCodeGeneratorMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
1978 Primitive::Type type = instruction->GetType();
1979 LocationSummary* locations = instruction->GetLocations();
1980
1981 switch (type) {
1982 case Primitive::kPrimInt: {
1983 Register dst = locations->Out().AsRegister<Register>();
1984 Register lhs = locations->InAt(0).AsRegister<Register>();
1985 Location rhs_location = locations->InAt(1);
1986
1987 Register rhs_reg = ZERO;
1988 int32_t rhs_imm = 0;
1989 bool use_imm = rhs_location.IsConstant();
1990 if (use_imm) {
1991 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
1992 } else {
1993 rhs_reg = rhs_location.AsRegister<Register>();
1994 }
1995
1996 if (instruction->IsAnd()) {
1997 if (use_imm)
1998 __ Andi(dst, lhs, rhs_imm);
1999 else
2000 __ And(dst, lhs, rhs_reg);
2001 } else if (instruction->IsOr()) {
2002 if (use_imm)
2003 __ Ori(dst, lhs, rhs_imm);
2004 else
2005 __ Or(dst, lhs, rhs_reg);
2006 } else if (instruction->IsXor()) {
2007 if (use_imm)
2008 __ Xori(dst, lhs, rhs_imm);
2009 else
2010 __ Xor(dst, lhs, rhs_reg);
2011 } else if (instruction->IsAdd()) {
2012 if (use_imm)
2013 __ Addiu(dst, lhs, rhs_imm);
2014 else
2015 __ Addu(dst, lhs, rhs_reg);
2016 } else {
2017 DCHECK(instruction->IsSub());
2018 if (use_imm)
2019 __ Addiu(dst, lhs, -rhs_imm);
2020 else
2021 __ Subu(dst, lhs, rhs_reg);
2022 }
2023 break;
2024 }
2025
2026 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002027 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2028 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2029 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2030 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002031 Location rhs_location = locations->InAt(1);
2032 bool use_imm = rhs_location.IsConstant();
2033 if (!use_imm) {
2034 Register rhs_high = rhs_location.AsRegisterPairHigh<Register>();
2035 Register rhs_low = rhs_location.AsRegisterPairLow<Register>();
2036 if (instruction->IsAnd()) {
2037 __ And(dst_low, lhs_low, rhs_low);
2038 __ And(dst_high, lhs_high, rhs_high);
2039 } else if (instruction->IsOr()) {
2040 __ Or(dst_low, lhs_low, rhs_low);
2041 __ Or(dst_high, lhs_high, rhs_high);
2042 } else if (instruction->IsXor()) {
2043 __ Xor(dst_low, lhs_low, rhs_low);
2044 __ Xor(dst_high, lhs_high, rhs_high);
2045 } else if (instruction->IsAdd()) {
2046 if (lhs_low == rhs_low) {
2047 // Special case for lhs = rhs and the sum potentially overwriting both lhs and rhs.
2048 __ Slt(TMP, lhs_low, ZERO);
2049 __ Addu(dst_low, lhs_low, rhs_low);
2050 } else {
2051 __ Addu(dst_low, lhs_low, rhs_low);
2052 // If the sum overwrites rhs, lhs remains unchanged, otherwise rhs remains unchanged.
2053 __ Sltu(TMP, dst_low, (dst_low == rhs_low) ? lhs_low : rhs_low);
2054 }
2055 __ Addu(dst_high, lhs_high, rhs_high);
2056 __ Addu(dst_high, dst_high, TMP);
2057 } else {
2058 DCHECK(instruction->IsSub());
2059 __ Sltu(TMP, lhs_low, rhs_low);
2060 __ Subu(dst_low, lhs_low, rhs_low);
2061 __ Subu(dst_high, lhs_high, rhs_high);
2062 __ Subu(dst_high, dst_high, TMP);
2063 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002064 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002065 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
2066 if (instruction->IsOr()) {
2067 uint32_t low = Low32Bits(value);
2068 uint32_t high = High32Bits(value);
2069 if (IsUint<16>(low)) {
2070 if (dst_low != lhs_low || low != 0) {
2071 __ Ori(dst_low, lhs_low, low);
2072 }
2073 } else {
2074 __ LoadConst32(TMP, low);
2075 __ Or(dst_low, lhs_low, TMP);
2076 }
2077 if (IsUint<16>(high)) {
2078 if (dst_high != lhs_high || high != 0) {
2079 __ Ori(dst_high, lhs_high, high);
2080 }
2081 } else {
2082 if (high != low) {
2083 __ LoadConst32(TMP, high);
2084 }
2085 __ Or(dst_high, lhs_high, TMP);
2086 }
2087 } else if (instruction->IsXor()) {
2088 uint32_t low = Low32Bits(value);
2089 uint32_t high = High32Bits(value);
2090 if (IsUint<16>(low)) {
2091 if (dst_low != lhs_low || low != 0) {
2092 __ Xori(dst_low, lhs_low, low);
2093 }
2094 } else {
2095 __ LoadConst32(TMP, low);
2096 __ Xor(dst_low, lhs_low, TMP);
2097 }
2098 if (IsUint<16>(high)) {
2099 if (dst_high != lhs_high || high != 0) {
2100 __ Xori(dst_high, lhs_high, high);
2101 }
2102 } else {
2103 if (high != low) {
2104 __ LoadConst32(TMP, high);
2105 }
2106 __ Xor(dst_high, lhs_high, TMP);
2107 }
2108 } else if (instruction->IsAnd()) {
2109 uint32_t low = Low32Bits(value);
2110 uint32_t high = High32Bits(value);
2111 if (IsUint<16>(low)) {
2112 __ Andi(dst_low, lhs_low, low);
2113 } else if (low != 0xFFFFFFFF) {
2114 __ LoadConst32(TMP, low);
2115 __ And(dst_low, lhs_low, TMP);
2116 } else if (dst_low != lhs_low) {
2117 __ Move(dst_low, lhs_low);
2118 }
2119 if (IsUint<16>(high)) {
2120 __ Andi(dst_high, lhs_high, high);
2121 } else if (high != 0xFFFFFFFF) {
2122 if (high != low) {
2123 __ LoadConst32(TMP, high);
2124 }
2125 __ And(dst_high, lhs_high, TMP);
2126 } else if (dst_high != lhs_high) {
2127 __ Move(dst_high, lhs_high);
2128 }
2129 } else {
2130 if (instruction->IsSub()) {
2131 value = -value;
2132 } else {
2133 DCHECK(instruction->IsAdd());
2134 }
2135 int32_t low = Low32Bits(value);
2136 int32_t high = High32Bits(value);
2137 if (IsInt<16>(low)) {
2138 if (dst_low != lhs_low || low != 0) {
2139 __ Addiu(dst_low, lhs_low, low);
2140 }
2141 if (low != 0) {
2142 __ Sltiu(AT, dst_low, low);
2143 }
2144 } else {
2145 __ LoadConst32(TMP, low);
2146 __ Addu(dst_low, lhs_low, TMP);
2147 __ Sltu(AT, dst_low, TMP);
2148 }
2149 if (IsInt<16>(high)) {
2150 if (dst_high != lhs_high || high != 0) {
2151 __ Addiu(dst_high, lhs_high, high);
2152 }
2153 } else {
2154 if (high != low) {
2155 __ LoadConst32(TMP, high);
2156 }
2157 __ Addu(dst_high, lhs_high, TMP);
2158 }
2159 if (low != 0) {
2160 __ Addu(dst_high, dst_high, AT);
2161 }
2162 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002163 }
2164 break;
2165 }
2166
2167 case Primitive::kPrimFloat:
2168 case Primitive::kPrimDouble: {
2169 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
2170 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
2171 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
2172 if (instruction->IsAdd()) {
2173 if (type == Primitive::kPrimFloat) {
2174 __ AddS(dst, lhs, rhs);
2175 } else {
2176 __ AddD(dst, lhs, rhs);
2177 }
2178 } else {
2179 DCHECK(instruction->IsSub());
2180 if (type == Primitive::kPrimFloat) {
2181 __ SubS(dst, lhs, rhs);
2182 } else {
2183 __ SubD(dst, lhs, rhs);
2184 }
2185 }
2186 break;
2187 }
2188
2189 default:
2190 LOG(FATAL) << "Unexpected binary operation type " << type;
2191 }
2192}
2193
2194void LocationsBuilderMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002195 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002196
2197 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2198 Primitive::Type type = instr->GetResultType();
2199 switch (type) {
2200 case Primitive::kPrimInt:
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002201 locations->SetInAt(0, Location::RequiresRegister());
2202 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2203 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2204 break;
2205 case Primitive::kPrimLong:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002206 locations->SetInAt(0, Location::RequiresRegister());
2207 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2208 locations->SetOut(Location::RequiresRegister());
2209 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002210 default:
2211 LOG(FATAL) << "Unexpected shift type " << type;
2212 }
2213}
2214
2215static constexpr size_t kMipsBitsPerWord = kMipsWordSize * kBitsPerByte;
2216
2217void InstructionCodeGeneratorMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002218 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002219 LocationSummary* locations = instr->GetLocations();
2220 Primitive::Type type = instr->GetType();
2221
2222 Location rhs_location = locations->InAt(1);
2223 bool use_imm = rhs_location.IsConstant();
2224 Register rhs_reg = use_imm ? ZERO : rhs_location.AsRegister<Register>();
2225 int64_t rhs_imm = use_imm ? CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()) : 0;
Roland Levillain5b5b9312016-03-22 14:57:31 +00002226 const uint32_t shift_mask =
2227 (type == Primitive::kPrimInt) ? kMaxIntShiftDistance : kMaxLongShiftDistance;
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002228 const uint32_t shift_value = rhs_imm & shift_mask;
Alexey Frunze92d90602015-12-18 18:16:36 -08002229 // Are the INS (Insert Bit Field) and ROTR instructions supported?
2230 bool has_ins_rotr = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002231
2232 switch (type) {
2233 case Primitive::kPrimInt: {
2234 Register dst = locations->Out().AsRegister<Register>();
2235 Register lhs = locations->InAt(0).AsRegister<Register>();
2236 if (use_imm) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002237 if (shift_value == 0) {
2238 if (dst != lhs) {
2239 __ Move(dst, lhs);
2240 }
2241 } else if (instr->IsShl()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002242 __ Sll(dst, lhs, shift_value);
2243 } else if (instr->IsShr()) {
2244 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002245 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002246 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002247 } else {
2248 if (has_ins_rotr) {
2249 __ Rotr(dst, lhs, shift_value);
2250 } else {
2251 __ Sll(TMP, lhs, (kMipsBitsPerWord - shift_value) & shift_mask);
2252 __ Srl(dst, lhs, shift_value);
2253 __ Or(dst, dst, TMP);
2254 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002255 }
2256 } else {
2257 if (instr->IsShl()) {
2258 __ Sllv(dst, lhs, rhs_reg);
2259 } else if (instr->IsShr()) {
2260 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002261 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002262 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002263 } else {
2264 if (has_ins_rotr) {
2265 __ Rotrv(dst, lhs, rhs_reg);
2266 } else {
2267 __ Subu(TMP, ZERO, rhs_reg);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002268 // 32-bit shift instructions use the 5 least significant bits of the shift count, so
2269 // shifting by `-rhs_reg` is equivalent to shifting by `(32 - rhs_reg) & 31`. The case
2270 // when `rhs_reg & 31 == 0` is OK even though we don't shift `lhs` left all the way out
2271 // by 32, because the result in this case is computed as `(lhs >> 0) | (lhs << 0)`,
2272 // IOW, the OR'd values are equal.
Alexey Frunze92d90602015-12-18 18:16:36 -08002273 __ Sllv(TMP, lhs, TMP);
2274 __ Srlv(dst, lhs, rhs_reg);
2275 __ Or(dst, dst, TMP);
2276 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002277 }
2278 }
2279 break;
2280 }
2281
2282 case Primitive::kPrimLong: {
2283 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2284 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2285 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2286 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
2287 if (use_imm) {
2288 if (shift_value == 0) {
2289 codegen_->Move64(locations->Out(), locations->InAt(0));
2290 } else if (shift_value < kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002291 if (has_ins_rotr) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002292 if (instr->IsShl()) {
2293 __ Srl(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2294 __ Ins(dst_high, lhs_high, shift_value, kMipsBitsPerWord - shift_value);
2295 __ Sll(dst_low, lhs_low, shift_value);
2296 } else if (instr->IsShr()) {
2297 __ Srl(dst_low, lhs_low, shift_value);
2298 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2299 __ Sra(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002300 } else if (instr->IsUShr()) {
2301 __ Srl(dst_low, lhs_low, shift_value);
2302 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2303 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002304 } else {
2305 __ Srl(dst_low, lhs_low, shift_value);
2306 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2307 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002308 __ Ins(dst_high, lhs_low, kMipsBitsPerWord - shift_value, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002309 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002310 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002311 if (instr->IsShl()) {
2312 __ Sll(dst_low, lhs_low, shift_value);
2313 __ Srl(TMP, lhs_low, kMipsBitsPerWord - shift_value);
2314 __ Sll(dst_high, lhs_high, shift_value);
2315 __ Or(dst_high, dst_high, TMP);
2316 } else if (instr->IsShr()) {
2317 __ Sra(dst_high, lhs_high, shift_value);
2318 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2319 __ Srl(dst_low, lhs_low, shift_value);
2320 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002321 } else if (instr->IsUShr()) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002322 __ Srl(dst_high, lhs_high, shift_value);
2323 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2324 __ Srl(dst_low, lhs_low, shift_value);
2325 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002326 } else {
2327 __ Srl(TMP, lhs_low, shift_value);
2328 __ Sll(dst_low, lhs_high, kMipsBitsPerWord - shift_value);
2329 __ Or(dst_low, dst_low, TMP);
2330 __ Srl(TMP, lhs_high, shift_value);
2331 __ Sll(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2332 __ Or(dst_high, dst_high, TMP);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002333 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002334 }
2335 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002336 const uint32_t shift_value_high = shift_value - kMipsBitsPerWord;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002337 if (instr->IsShl()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002338 __ Sll(dst_high, lhs_low, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002339 __ Move(dst_low, ZERO);
2340 } else if (instr->IsShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002341 __ Sra(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002342 __ Sra(dst_high, dst_low, kMipsBitsPerWord - 1);
Alexey Frunze92d90602015-12-18 18:16:36 -08002343 } else if (instr->IsUShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002344 __ Srl(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002345 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002346 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002347 if (shift_value == kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002348 // 64-bit rotation by 32 is just a swap.
2349 __ Move(dst_low, lhs_high);
2350 __ Move(dst_high, lhs_low);
2351 } else {
2352 if (has_ins_rotr) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002353 __ Srl(dst_low, lhs_high, shift_value_high);
2354 __ Ins(dst_low, lhs_low, kMipsBitsPerWord - shift_value_high, shift_value_high);
2355 __ Srl(dst_high, lhs_low, shift_value_high);
2356 __ Ins(dst_high, lhs_high, kMipsBitsPerWord - shift_value_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002357 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002358 __ Sll(TMP, lhs_low, kMipsBitsPerWord - shift_value_high);
2359 __ Srl(dst_low, lhs_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002360 __ Or(dst_low, dst_low, TMP);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002361 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value_high);
2362 __ Srl(dst_high, lhs_low, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002363 __ Or(dst_high, dst_high, TMP);
2364 }
2365 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002366 }
2367 }
2368 } else {
2369 MipsLabel done;
2370 if (instr->IsShl()) {
2371 __ Sllv(dst_low, lhs_low, rhs_reg);
2372 __ Nor(AT, ZERO, rhs_reg);
2373 __ Srl(TMP, lhs_low, 1);
2374 __ Srlv(TMP, TMP, AT);
2375 __ Sllv(dst_high, lhs_high, rhs_reg);
2376 __ Or(dst_high, dst_high, TMP);
2377 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2378 __ Beqz(TMP, &done);
2379 __ Move(dst_high, dst_low);
2380 __ Move(dst_low, ZERO);
2381 } else if (instr->IsShr()) {
2382 __ Srav(dst_high, lhs_high, rhs_reg);
2383 __ Nor(AT, ZERO, rhs_reg);
2384 __ Sll(TMP, lhs_high, 1);
2385 __ Sllv(TMP, TMP, AT);
2386 __ Srlv(dst_low, lhs_low, rhs_reg);
2387 __ Or(dst_low, dst_low, TMP);
2388 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2389 __ Beqz(TMP, &done);
2390 __ Move(dst_low, dst_high);
2391 __ Sra(dst_high, dst_high, 31);
Alexey Frunze92d90602015-12-18 18:16:36 -08002392 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002393 __ Srlv(dst_high, lhs_high, rhs_reg);
2394 __ Nor(AT, ZERO, rhs_reg);
2395 __ Sll(TMP, lhs_high, 1);
2396 __ Sllv(TMP, TMP, AT);
2397 __ Srlv(dst_low, lhs_low, rhs_reg);
2398 __ Or(dst_low, dst_low, TMP);
2399 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2400 __ Beqz(TMP, &done);
2401 __ Move(dst_low, dst_high);
2402 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002403 } else {
2404 __ Nor(AT, ZERO, rhs_reg);
2405 __ Srlv(TMP, lhs_low, rhs_reg);
2406 __ Sll(dst_low, lhs_high, 1);
2407 __ Sllv(dst_low, dst_low, AT);
2408 __ Or(dst_low, dst_low, TMP);
2409 __ Srlv(TMP, lhs_high, rhs_reg);
2410 __ Sll(dst_high, lhs_low, 1);
2411 __ Sllv(dst_high, dst_high, AT);
2412 __ Or(dst_high, dst_high, TMP);
2413 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2414 __ Beqz(TMP, &done);
2415 __ Move(TMP, dst_high);
2416 __ Move(dst_high, dst_low);
2417 __ Move(dst_low, TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002418 }
2419 __ Bind(&done);
2420 }
2421 break;
2422 }
2423
2424 default:
2425 LOG(FATAL) << "Unexpected shift operation type " << type;
2426 }
2427}
2428
2429void LocationsBuilderMIPS::VisitAdd(HAdd* instruction) {
2430 HandleBinaryOp(instruction);
2431}
2432
2433void InstructionCodeGeneratorMIPS::VisitAdd(HAdd* instruction) {
2434 HandleBinaryOp(instruction);
2435}
2436
2437void LocationsBuilderMIPS::VisitAnd(HAnd* instruction) {
2438 HandleBinaryOp(instruction);
2439}
2440
2441void InstructionCodeGeneratorMIPS::VisitAnd(HAnd* instruction) {
2442 HandleBinaryOp(instruction);
2443}
2444
2445void LocationsBuilderMIPS::VisitArrayGet(HArrayGet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002446 Primitive::Type type = instruction->GetType();
2447 bool object_array_get_with_read_barrier =
2448 kEmitCompilerReadBarrier && (type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002449 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08002450 new (GetGraph()->GetArena()) LocationSummary(instruction,
2451 object_array_get_with_read_barrier
2452 ? LocationSummary::kCallOnSlowPath
2453 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002454 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2455 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2456 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002457 locations->SetInAt(0, Location::RequiresRegister());
2458 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexey Frunze15958152017-02-09 19:08:30 -08002459 if (Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002460 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2461 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002462 // The output overlaps in the case of an object array get with
2463 // read barriers enabled: we do not want the move to overwrite the
2464 // array's location, as we need it to emit the read barrier.
2465 locations->SetOut(Location::RequiresRegister(),
2466 object_array_get_with_read_barrier
2467 ? Location::kOutputOverlap
2468 : Location::kNoOutputOverlap);
2469 }
2470 // We need a temporary register for the read barrier marking slow
2471 // path in CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier.
2472 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2473 locations->AddTemp(Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002474 }
2475}
2476
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002477static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS* codegen) {
2478 auto null_checker = [codegen, instruction]() {
2479 codegen->MaybeRecordImplicitNullCheck(instruction);
Alexey Frunze2923db72016-08-20 01:55:47 -07002480 };
2481 return null_checker;
2482}
2483
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002484void InstructionCodeGeneratorMIPS::VisitArrayGet(HArrayGet* instruction) {
2485 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002486 Location obj_loc = locations->InAt(0);
2487 Register obj = obj_loc.AsRegister<Register>();
2488 Location out_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002489 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002490 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002491 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002492
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002493 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002494 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2495 instruction->IsStringCharAt();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002496 switch (type) {
2497 case Primitive::kPrimBoolean: {
Alexey Frunze15958152017-02-09 19:08:30 -08002498 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002499 if (index.IsConstant()) {
2500 size_t offset =
2501 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002502 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002503 } else {
2504 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002505 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002506 }
2507 break;
2508 }
2509
2510 case Primitive::kPrimByte: {
Alexey Frunze15958152017-02-09 19:08:30 -08002511 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002512 if (index.IsConstant()) {
2513 size_t offset =
2514 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002515 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002516 } else {
2517 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002518 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002519 }
2520 break;
2521 }
2522
2523 case Primitive::kPrimShort: {
Alexey Frunze15958152017-02-09 19:08:30 -08002524 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002525 if (index.IsConstant()) {
2526 size_t offset =
2527 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002528 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002529 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002530 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_2, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002531 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002532 }
2533 break;
2534 }
2535
2536 case Primitive::kPrimChar: {
Alexey Frunze15958152017-02-09 19:08:30 -08002537 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002538 if (maybe_compressed_char_at) {
2539 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2540 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
2541 __ Sll(TMP, TMP, 31); // Extract compression flag into the most significant bit of TMP.
2542 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2543 "Expecting 0=compressed, 1=uncompressed");
2544 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002545 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002546 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2547 if (maybe_compressed_char_at) {
2548 MipsLabel uncompressed_load, done;
2549 __ Bnez(TMP, &uncompressed_load);
2550 __ LoadFromOffset(kLoadUnsignedByte,
2551 out,
2552 obj,
2553 data_offset + (const_index << TIMES_1));
2554 __ B(&done);
2555 __ Bind(&uncompressed_load);
2556 __ LoadFromOffset(kLoadUnsignedHalfword,
2557 out,
2558 obj,
2559 data_offset + (const_index << TIMES_2));
2560 __ Bind(&done);
2561 } else {
2562 __ LoadFromOffset(kLoadUnsignedHalfword,
2563 out,
2564 obj,
2565 data_offset + (const_index << TIMES_2),
2566 null_checker);
2567 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002568 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002569 Register index_reg = index.AsRegister<Register>();
2570 if (maybe_compressed_char_at) {
2571 MipsLabel uncompressed_load, done;
2572 __ Bnez(TMP, &uncompressed_load);
2573 __ Addu(TMP, obj, index_reg);
2574 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2575 __ B(&done);
2576 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002577 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002578 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2579 __ Bind(&done);
2580 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002581 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002582 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
2583 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002584 }
2585 break;
2586 }
2587
Alexey Frunze15958152017-02-09 19:08:30 -08002588 case Primitive::kPrimInt: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002589 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002590 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002591 if (index.IsConstant()) {
2592 size_t offset =
2593 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002594 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002595 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002596 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002597 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002598 }
2599 break;
2600 }
2601
Alexey Frunze15958152017-02-09 19:08:30 -08002602 case Primitive::kPrimNot: {
2603 static_assert(
2604 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2605 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2606 // /* HeapReference<Object> */ out =
2607 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2608 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2609 Location temp = locations->GetTemp(0);
2610 // Note that a potential implicit null check is handled in this
2611 // CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier call.
2612 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2613 out_loc,
2614 obj,
2615 data_offset,
2616 index,
2617 temp,
2618 /* needs_null_check */ true);
2619 } else {
2620 Register out = out_loc.AsRegister<Register>();
2621 if (index.IsConstant()) {
2622 size_t offset =
2623 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2624 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
2625 // If read barriers are enabled, emit read barriers other than
2626 // Baker's using a slow path (and also unpoison the loaded
2627 // reference, if heap poisoning is enabled).
2628 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2629 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002630 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08002631 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
2632 // If read barriers are enabled, emit read barriers other than
2633 // Baker's using a slow path (and also unpoison the loaded
2634 // reference, if heap poisoning is enabled).
2635 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2636 out_loc,
2637 out_loc,
2638 obj_loc,
2639 data_offset,
2640 index);
2641 }
2642 }
2643 break;
2644 }
2645
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002646 case Primitive::kPrimLong: {
Alexey Frunze15958152017-02-09 19:08:30 -08002647 Register out = out_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002648 if (index.IsConstant()) {
2649 size_t offset =
2650 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002651 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002652 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002653 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002654 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002655 }
2656 break;
2657 }
2658
2659 case Primitive::kPrimFloat: {
Alexey Frunze15958152017-02-09 19:08:30 -08002660 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002661 if (index.IsConstant()) {
2662 size_t offset =
2663 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002664 __ LoadSFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002665 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002666 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002667 __ LoadSFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002668 }
2669 break;
2670 }
2671
2672 case Primitive::kPrimDouble: {
Alexey Frunze15958152017-02-09 19:08:30 -08002673 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002674 if (index.IsConstant()) {
2675 size_t offset =
2676 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002677 __ LoadDFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002678 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002679 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002680 __ LoadDFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002681 }
2682 break;
2683 }
2684
2685 case Primitive::kPrimVoid:
2686 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2687 UNREACHABLE();
2688 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002689}
2690
2691void LocationsBuilderMIPS::VisitArrayLength(HArrayLength* instruction) {
2692 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2693 locations->SetInAt(0, Location::RequiresRegister());
2694 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2695}
2696
2697void InstructionCodeGeneratorMIPS::VisitArrayLength(HArrayLength* instruction) {
2698 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002699 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002700 Register obj = locations->InAt(0).AsRegister<Register>();
2701 Register out = locations->Out().AsRegister<Register>();
2702 __ LoadFromOffset(kLoadWord, out, obj, offset);
2703 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002704 // Mask out compression flag from String's array length.
2705 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2706 __ Srl(out, out, 1u);
2707 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002708}
2709
Alexey Frunzef58b2482016-09-02 22:14:06 -07002710Location LocationsBuilderMIPS::RegisterOrZeroConstant(HInstruction* instruction) {
2711 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2712 ? Location::ConstantLocation(instruction->AsConstant())
2713 : Location::RequiresRegister();
2714}
2715
2716Location LocationsBuilderMIPS::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2717 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2718 // We can store a non-zero float or double constant without first loading it into the FPU,
2719 // but we should only prefer this if the constant has a single use.
2720 if (instruction->IsConstant() &&
2721 (instruction->AsConstant()->IsZeroBitPattern() ||
2722 instruction->GetUses().HasExactlyOneElement())) {
2723 return Location::ConstantLocation(instruction->AsConstant());
2724 // Otherwise fall through and require an FPU register for the constant.
2725 }
2726 return Location::RequiresFpuRegister();
2727}
2728
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002729void LocationsBuilderMIPS::VisitArraySet(HArraySet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002730 Primitive::Type value_type = instruction->GetComponentType();
2731
2732 bool needs_write_barrier =
2733 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2734 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2735
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002736 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2737 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002738 may_need_runtime_call_for_type_check ?
2739 LocationSummary::kCallOnSlowPath :
2740 LocationSummary::kNoCall);
2741
2742 locations->SetInAt(0, Location::RequiresRegister());
2743 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2744 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
2745 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002746 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002747 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2748 }
2749 if (needs_write_barrier) {
2750 // Temporary register for the write barrier.
2751 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002752 }
2753}
2754
2755void InstructionCodeGeneratorMIPS::VisitArraySet(HArraySet* instruction) {
2756 LocationSummary* locations = instruction->GetLocations();
2757 Register obj = locations->InAt(0).AsRegister<Register>();
2758 Location index = locations->InAt(1);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002759 Location value_location = locations->InAt(2);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002760 Primitive::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002761 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002762 bool needs_write_barrier =
2763 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002764 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002765 Register base_reg = index.IsConstant() ? obj : TMP;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002766
2767 switch (value_type) {
2768 case Primitive::kPrimBoolean:
2769 case Primitive::kPrimByte: {
2770 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002771 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002772 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002773 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002774 __ Addu(base_reg, obj, index.AsRegister<Register>());
2775 }
2776 if (value_location.IsConstant()) {
2777 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2778 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2779 } else {
2780 Register value = value_location.AsRegister<Register>();
2781 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002782 }
2783 break;
2784 }
2785
2786 case Primitive::kPrimShort:
2787 case Primitive::kPrimChar: {
2788 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002789 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002790 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002791 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002792 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_2, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002793 }
2794 if (value_location.IsConstant()) {
2795 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2796 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2797 } else {
2798 Register value = value_location.AsRegister<Register>();
2799 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002800 }
2801 break;
2802 }
2803
Alexey Frunze15958152017-02-09 19:08:30 -08002804 case Primitive::kPrimInt: {
2805 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2806 if (index.IsConstant()) {
2807 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2808 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002809 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08002810 }
2811 if (value_location.IsConstant()) {
2812 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2813 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2814 } else {
2815 Register value = value_location.AsRegister<Register>();
2816 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2817 }
2818 break;
2819 }
2820
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002821 case Primitive::kPrimNot: {
Alexey Frunze15958152017-02-09 19:08:30 -08002822 if (value_location.IsConstant()) {
2823 // Just setting null.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002824 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002825 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002826 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002827 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002828 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002829 }
Alexey Frunze15958152017-02-09 19:08:30 -08002830 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2831 DCHECK_EQ(value, 0);
2832 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2833 DCHECK(!needs_write_barrier);
2834 DCHECK(!may_need_runtime_call_for_type_check);
2835 break;
2836 }
2837
2838 DCHECK(needs_write_barrier);
2839 Register value = value_location.AsRegister<Register>();
2840 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
2841 Register temp2 = TMP; // Doesn't need to survive slow path.
2842 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2843 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2844 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2845 MipsLabel done;
2846 SlowPathCodeMIPS* slow_path = nullptr;
2847
2848 if (may_need_runtime_call_for_type_check) {
2849 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS(instruction);
2850 codegen_->AddSlowPath(slow_path);
2851 if (instruction->GetValueCanBeNull()) {
2852 MipsLabel non_zero;
2853 __ Bnez(value, &non_zero);
2854 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2855 if (index.IsConstant()) {
2856 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunzec061de12017-02-14 13:27:23 -08002857 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002858 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzec061de12017-02-14 13:27:23 -08002859 }
Alexey Frunze15958152017-02-09 19:08:30 -08002860 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2861 __ B(&done);
2862 __ Bind(&non_zero);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002863 }
Alexey Frunze15958152017-02-09 19:08:30 -08002864
2865 // Note that when read barriers are enabled, the type checks
2866 // are performed without read barriers. This is fine, even in
2867 // the case where a class object is in the from-space after
2868 // the flip, as a comparison involving such a type would not
2869 // produce a false positive; it may of course produce a false
2870 // negative, in which case we would take the ArraySet slow
2871 // path.
2872
2873 // /* HeapReference<Class> */ temp1 = obj->klass_
2874 __ LoadFromOffset(kLoadWord, temp1, obj, class_offset, null_checker);
2875 __ MaybeUnpoisonHeapReference(temp1);
2876
2877 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2878 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
2879 // /* HeapReference<Class> */ temp2 = value->klass_
2880 __ LoadFromOffset(kLoadWord, temp2, value, class_offset);
2881 // If heap poisoning is enabled, no need to unpoison `temp1`
2882 // nor `temp2`, as we are comparing two poisoned references.
2883
2884 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2885 MipsLabel do_put;
2886 __ Beq(temp1, temp2, &do_put);
2887 // If heap poisoning is enabled, the `temp1` reference has
2888 // not been unpoisoned yet; unpoison it now.
2889 __ MaybeUnpoisonHeapReference(temp1);
2890
2891 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2892 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
2893 // If heap poisoning is enabled, no need to unpoison
2894 // `temp1`, as we are comparing against null below.
2895 __ Bnez(temp1, slow_path->GetEntryLabel());
2896 __ Bind(&do_put);
2897 } else {
2898 __ Bne(temp1, temp2, slow_path->GetEntryLabel());
2899 }
2900 }
2901
2902 Register source = value;
2903 if (kPoisonHeapReferences) {
2904 // Note that in the case where `value` is a null reference,
2905 // we do not enter this block, as a null reference does not
2906 // need poisoning.
2907 __ Move(temp1, value);
2908 __ PoisonHeapReference(temp1);
2909 source = temp1;
2910 }
2911
2912 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2913 if (index.IsConstant()) {
2914 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002915 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002916 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08002917 }
2918 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2919
2920 if (!may_need_runtime_call_for_type_check) {
2921 codegen_->MaybeRecordImplicitNullCheck(instruction);
2922 }
2923
2924 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2925
2926 if (done.IsLinked()) {
2927 __ Bind(&done);
2928 }
2929
2930 if (slow_path != nullptr) {
2931 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002932 }
2933 break;
2934 }
2935
2936 case Primitive::kPrimLong: {
2937 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002938 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002939 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002940 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002941 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002942 }
2943 if (value_location.IsConstant()) {
2944 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2945 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2946 } else {
2947 Register value = value_location.AsRegisterPairLow<Register>();
2948 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002949 }
2950 break;
2951 }
2952
2953 case Primitive::kPrimFloat: {
2954 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002955 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002956 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002957 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002958 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002959 }
2960 if (value_location.IsConstant()) {
2961 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2962 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2963 } else {
2964 FRegister value = value_location.AsFpuRegister<FRegister>();
2965 __ StoreSToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002966 }
2967 break;
2968 }
2969
2970 case Primitive::kPrimDouble: {
2971 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002972 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002973 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002974 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002975 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002976 }
2977 if (value_location.IsConstant()) {
2978 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2979 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2980 } else {
2981 FRegister value = value_location.AsFpuRegister<FRegister>();
2982 __ StoreDToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002983 }
2984 break;
2985 }
2986
2987 case Primitive::kPrimVoid:
2988 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2989 UNREACHABLE();
2990 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002991}
2992
2993void LocationsBuilderMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002994 RegisterSet caller_saves = RegisterSet::Empty();
2995 InvokeRuntimeCallingConvention calling_convention;
2996 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2997 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2998 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002999 locations->SetInAt(0, Location::RequiresRegister());
3000 locations->SetInAt(1, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003001}
3002
3003void InstructionCodeGeneratorMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
3004 LocationSummary* locations = instruction->GetLocations();
3005 BoundsCheckSlowPathMIPS* slow_path =
3006 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS(instruction);
3007 codegen_->AddSlowPath(slow_path);
3008
3009 Register index = locations->InAt(0).AsRegister<Register>();
3010 Register length = locations->InAt(1).AsRegister<Register>();
3011
3012 // length is limited by the maximum positive signed 32-bit integer.
3013 // Unsigned comparison of length and index checks for index < 0
3014 // and for length <= index simultaneously.
3015 __ Bgeu(index, length, slow_path->GetEntryLabel());
3016}
3017
Alexey Frunze15958152017-02-09 19:08:30 -08003018// Temp is used for read barrier.
3019static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3020 if (kEmitCompilerReadBarrier &&
3021 (kUseBakerReadBarrier ||
3022 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3023 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3024 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3025 return 1;
3026 }
3027 return 0;
3028}
3029
3030// Extra temp is used for read barrier.
3031static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3032 return 1 + NumberOfInstanceOfTemps(type_check_kind);
3033}
3034
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003035void LocationsBuilderMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003036 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3037 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3038
3039 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3040 switch (type_check_kind) {
3041 case TypeCheckKind::kExactCheck:
3042 case TypeCheckKind::kAbstractClassCheck:
3043 case TypeCheckKind::kClassHierarchyCheck:
3044 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08003045 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003046 ? LocationSummary::kCallOnSlowPath
3047 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
3048 break;
3049 case TypeCheckKind::kArrayCheck:
3050 case TypeCheckKind::kUnresolvedCheck:
3051 case TypeCheckKind::kInterfaceCheck:
3052 call_kind = LocationSummary::kCallOnSlowPath;
3053 break;
3054 }
3055
3056 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003057 locations->SetInAt(0, Location::RequiresRegister());
3058 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08003059 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003060}
3061
3062void InstructionCodeGeneratorMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003063 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003064 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08003065 Location obj_loc = locations->InAt(0);
3066 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003067 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08003068 Location temp_loc = locations->GetTemp(0);
3069 Register temp = temp_loc.AsRegister<Register>();
3070 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3071 DCHECK_LE(num_temps, 2u);
3072 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003073 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3074 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3075 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3076 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3077 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3078 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3079 const uint32_t object_array_data_offset =
3080 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
3081 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003082
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003083 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
3084 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
3085 // read barriers is done for performance and code size reasons.
3086 bool is_type_check_slow_path_fatal = false;
3087 if (!kEmitCompilerReadBarrier) {
3088 is_type_check_slow_path_fatal =
3089 (type_check_kind == TypeCheckKind::kExactCheck ||
3090 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3091 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3092 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3093 !instruction->CanThrowIntoCatchBlock();
3094 }
3095 SlowPathCodeMIPS* slow_path =
3096 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
3097 is_type_check_slow_path_fatal);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003098 codegen_->AddSlowPath(slow_path);
3099
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003100 // Avoid this check if we know `obj` is not null.
3101 if (instruction->MustDoNullCheck()) {
3102 __ Beqz(obj, &done);
3103 }
3104
3105 switch (type_check_kind) {
3106 case TypeCheckKind::kExactCheck:
3107 case TypeCheckKind::kArrayCheck: {
3108 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003109 GenerateReferenceLoadTwoRegisters(instruction,
3110 temp_loc,
3111 obj_loc,
3112 class_offset,
3113 maybe_temp2_loc,
3114 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003115 // Jump to slow path for throwing the exception or doing a
3116 // more involved array check.
3117 __ Bne(temp, cls, slow_path->GetEntryLabel());
3118 break;
3119 }
3120
3121 case TypeCheckKind::kAbstractClassCheck: {
3122 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003123 GenerateReferenceLoadTwoRegisters(instruction,
3124 temp_loc,
3125 obj_loc,
3126 class_offset,
3127 maybe_temp2_loc,
3128 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003129 // If the class is abstract, we eagerly fetch the super class of the
3130 // object to avoid doing a comparison we know will fail.
3131 MipsLabel loop;
3132 __ Bind(&loop);
3133 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003134 GenerateReferenceLoadOneRegister(instruction,
3135 temp_loc,
3136 super_offset,
3137 maybe_temp2_loc,
3138 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003139 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3140 // exception.
3141 __ Beqz(temp, slow_path->GetEntryLabel());
3142 // Otherwise, compare the classes.
3143 __ Bne(temp, cls, &loop);
3144 break;
3145 }
3146
3147 case TypeCheckKind::kClassHierarchyCheck: {
3148 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003149 GenerateReferenceLoadTwoRegisters(instruction,
3150 temp_loc,
3151 obj_loc,
3152 class_offset,
3153 maybe_temp2_loc,
3154 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003155 // Walk over the class hierarchy to find a match.
3156 MipsLabel loop;
3157 __ Bind(&loop);
3158 __ Beq(temp, cls, &done);
3159 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003160 GenerateReferenceLoadOneRegister(instruction,
3161 temp_loc,
3162 super_offset,
3163 maybe_temp2_loc,
3164 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003165 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3166 // exception. Otherwise, jump to the beginning of the loop.
3167 __ Bnez(temp, &loop);
3168 __ B(slow_path->GetEntryLabel());
3169 break;
3170 }
3171
3172 case TypeCheckKind::kArrayObjectCheck: {
3173 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003174 GenerateReferenceLoadTwoRegisters(instruction,
3175 temp_loc,
3176 obj_loc,
3177 class_offset,
3178 maybe_temp2_loc,
3179 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003180 // Do an exact check.
3181 __ Beq(temp, cls, &done);
3182 // Otherwise, we need to check that the object's class is a non-primitive array.
3183 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08003184 GenerateReferenceLoadOneRegister(instruction,
3185 temp_loc,
3186 component_offset,
3187 maybe_temp2_loc,
3188 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003189 // If the component type is null, jump to the slow path to throw the exception.
3190 __ Beqz(temp, slow_path->GetEntryLabel());
3191 // Otherwise, the object is indeed an array, further check that this component
3192 // type is not a primitive type.
3193 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
3194 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3195 __ Bnez(temp, slow_path->GetEntryLabel());
3196 break;
3197 }
3198
3199 case TypeCheckKind::kUnresolvedCheck:
3200 // We always go into the type check slow path for the unresolved check case.
3201 // We cannot directly call the CheckCast runtime entry point
3202 // without resorting to a type checking slow path here (i.e. by
3203 // calling InvokeRuntime directly), as it would require to
3204 // assign fixed registers for the inputs of this HInstanceOf
3205 // instruction (following the runtime calling convention), which
3206 // might be cluttered by the potential first read barrier
3207 // emission at the beginning of this method.
3208 __ B(slow_path->GetEntryLabel());
3209 break;
3210
3211 case TypeCheckKind::kInterfaceCheck: {
3212 // Avoid read barriers to improve performance of the fast path. We can not get false
3213 // positives by doing this.
3214 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003215 GenerateReferenceLoadTwoRegisters(instruction,
3216 temp_loc,
3217 obj_loc,
3218 class_offset,
3219 maybe_temp2_loc,
3220 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003221 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003222 GenerateReferenceLoadTwoRegisters(instruction,
3223 temp_loc,
3224 temp_loc,
3225 iftable_offset,
3226 maybe_temp2_loc,
3227 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003228 // Iftable is never null.
3229 __ Lw(TMP, temp, array_length_offset);
3230 // Loop through the iftable and check if any class matches.
3231 MipsLabel loop;
3232 __ Bind(&loop);
3233 __ Addiu(temp, temp, 2 * kHeapReferenceSize); // Possibly in delay slot on R2.
3234 __ Beqz(TMP, slow_path->GetEntryLabel());
3235 __ Lw(AT, temp, object_array_data_offset - 2 * kHeapReferenceSize);
3236 __ MaybeUnpoisonHeapReference(AT);
3237 // Go to next interface.
3238 __ Addiu(TMP, TMP, -2);
3239 // Compare the classes and continue the loop if they do not match.
3240 __ Bne(AT, cls, &loop);
3241 break;
3242 }
3243 }
3244
3245 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003246 __ Bind(slow_path->GetExitLabel());
3247}
3248
3249void LocationsBuilderMIPS::VisitClinitCheck(HClinitCheck* check) {
3250 LocationSummary* locations =
3251 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3252 locations->SetInAt(0, Location::RequiresRegister());
3253 if (check->HasUses()) {
3254 locations->SetOut(Location::SameAsFirstInput());
3255 }
3256}
3257
3258void InstructionCodeGeneratorMIPS::VisitClinitCheck(HClinitCheck* check) {
3259 // We assume the class is not null.
3260 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
3261 check->GetLoadClass(),
3262 check,
3263 check->GetDexPc(),
3264 true);
3265 codegen_->AddSlowPath(slow_path);
3266 GenerateClassInitializationCheck(slow_path,
3267 check->GetLocations()->InAt(0).AsRegister<Register>());
3268}
3269
3270void LocationsBuilderMIPS::VisitCompare(HCompare* compare) {
3271 Primitive::Type in_type = compare->InputAt(0)->GetType();
3272
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003273 LocationSummary* locations =
3274 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003275
3276 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003277 case Primitive::kPrimBoolean:
3278 case Primitive::kPrimByte:
3279 case Primitive::kPrimShort:
3280 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003281 case Primitive::kPrimInt:
Alexey Frunzee7697712016-09-15 21:37:49 -07003282 locations->SetInAt(0, Location::RequiresRegister());
3283 locations->SetInAt(1, Location::RequiresRegister());
3284 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3285 break;
3286
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003287 case Primitive::kPrimLong:
3288 locations->SetInAt(0, Location::RequiresRegister());
3289 locations->SetInAt(1, Location::RequiresRegister());
3290 // Output overlaps because it is written before doing the low comparison.
3291 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3292 break;
3293
3294 case Primitive::kPrimFloat:
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003295 case Primitive::kPrimDouble:
3296 locations->SetInAt(0, Location::RequiresFpuRegister());
3297 locations->SetInAt(1, Location::RequiresFpuRegister());
3298 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003299 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003300
3301 default:
3302 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3303 }
3304}
3305
3306void InstructionCodeGeneratorMIPS::VisitCompare(HCompare* instruction) {
3307 LocationSummary* locations = instruction->GetLocations();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003308 Register res = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003309 Primitive::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003310 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003311
3312 // 0 if: left == right
3313 // 1 if: left > right
3314 // -1 if: left < right
3315 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003316 case Primitive::kPrimBoolean:
3317 case Primitive::kPrimByte:
3318 case Primitive::kPrimShort:
3319 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003320 case Primitive::kPrimInt: {
3321 Register lhs = locations->InAt(0).AsRegister<Register>();
3322 Register rhs = locations->InAt(1).AsRegister<Register>();
3323 __ Slt(TMP, lhs, rhs);
3324 __ Slt(res, rhs, lhs);
3325 __ Subu(res, res, TMP);
3326 break;
3327 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003328 case Primitive::kPrimLong: {
3329 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003330 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
3331 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
3332 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
3333 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
3334 // TODO: more efficient (direct) comparison with a constant.
3335 __ Slt(TMP, lhs_high, rhs_high);
3336 __ Slt(AT, rhs_high, lhs_high); // Inverted: is actually gt.
3337 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3338 __ Bnez(res, &done); // If we compared ==, check if lower bits are also equal.
3339 __ Sltu(TMP, lhs_low, rhs_low);
3340 __ Sltu(AT, rhs_low, lhs_low); // Inverted: is actually gt.
3341 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3342 __ Bind(&done);
3343 break;
3344 }
3345
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003346 case Primitive::kPrimFloat: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003347 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003348 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3349 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3350 MipsLabel done;
3351 if (isR6) {
3352 __ CmpEqS(FTMP, lhs, rhs);
3353 __ LoadConst32(res, 0);
3354 __ Bc1nez(FTMP, &done);
3355 if (gt_bias) {
3356 __ CmpLtS(FTMP, lhs, rhs);
3357 __ LoadConst32(res, -1);
3358 __ Bc1nez(FTMP, &done);
3359 __ LoadConst32(res, 1);
3360 } else {
3361 __ CmpLtS(FTMP, rhs, lhs);
3362 __ LoadConst32(res, 1);
3363 __ Bc1nez(FTMP, &done);
3364 __ LoadConst32(res, -1);
3365 }
3366 } else {
3367 if (gt_bias) {
3368 __ ColtS(0, lhs, rhs);
3369 __ LoadConst32(res, -1);
3370 __ Bc1t(0, &done);
3371 __ CeqS(0, lhs, rhs);
3372 __ LoadConst32(res, 1);
3373 __ Movt(res, ZERO, 0);
3374 } else {
3375 __ ColtS(0, rhs, lhs);
3376 __ LoadConst32(res, 1);
3377 __ Bc1t(0, &done);
3378 __ CeqS(0, lhs, rhs);
3379 __ LoadConst32(res, -1);
3380 __ Movt(res, ZERO, 0);
3381 }
3382 }
3383 __ Bind(&done);
3384 break;
3385 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003386 case Primitive::kPrimDouble: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003387 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003388 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3389 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3390 MipsLabel done;
3391 if (isR6) {
3392 __ CmpEqD(FTMP, lhs, rhs);
3393 __ LoadConst32(res, 0);
3394 __ Bc1nez(FTMP, &done);
3395 if (gt_bias) {
3396 __ CmpLtD(FTMP, lhs, rhs);
3397 __ LoadConst32(res, -1);
3398 __ Bc1nez(FTMP, &done);
3399 __ LoadConst32(res, 1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003400 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003401 __ CmpLtD(FTMP, rhs, lhs);
3402 __ LoadConst32(res, 1);
3403 __ Bc1nez(FTMP, &done);
3404 __ LoadConst32(res, -1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003405 }
3406 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003407 if (gt_bias) {
3408 __ ColtD(0, lhs, rhs);
3409 __ LoadConst32(res, -1);
3410 __ Bc1t(0, &done);
3411 __ CeqD(0, lhs, rhs);
3412 __ LoadConst32(res, 1);
3413 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003414 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003415 __ ColtD(0, rhs, lhs);
3416 __ LoadConst32(res, 1);
3417 __ Bc1t(0, &done);
3418 __ CeqD(0, lhs, rhs);
3419 __ LoadConst32(res, -1);
3420 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003421 }
3422 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003423 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003424 break;
3425 }
3426
3427 default:
3428 LOG(FATAL) << "Unimplemented compare type " << in_type;
3429 }
3430}
3431
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003432void LocationsBuilderMIPS::HandleCondition(HCondition* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003433 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003434 switch (instruction->InputAt(0)->GetType()) {
3435 default:
3436 case Primitive::kPrimLong:
3437 locations->SetInAt(0, Location::RequiresRegister());
3438 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3439 break;
3440
3441 case Primitive::kPrimFloat:
3442 case Primitive::kPrimDouble:
3443 locations->SetInAt(0, Location::RequiresFpuRegister());
3444 locations->SetInAt(1, Location::RequiresFpuRegister());
3445 break;
3446 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003447 if (!instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003448 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3449 }
3450}
3451
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003452void InstructionCodeGeneratorMIPS::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003453 if (instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003454 return;
3455 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003456
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003457 Primitive::Type type = instruction->InputAt(0)->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003458 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003459
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003460 switch (type) {
3461 default:
3462 // Integer case.
3463 GenerateIntCompare(instruction->GetCondition(), locations);
3464 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003465
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003466 case Primitive::kPrimLong:
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01003467 GenerateLongCompare(instruction->GetCondition(), locations);
3468 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003469
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003470 case Primitive::kPrimFloat:
3471 case Primitive::kPrimDouble:
Alexey Frunze2ddb7172016-09-06 17:04:55 -07003472 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3473 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003474 }
3475}
3476
Alexey Frunze7e99e052015-11-24 19:28:01 -08003477void InstructionCodeGeneratorMIPS::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3478 DCHECK(instruction->IsDiv() || instruction->IsRem());
3479 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3480
3481 LocationSummary* locations = instruction->GetLocations();
3482 Location second = locations->InAt(1);
3483 DCHECK(second.IsConstant());
3484
3485 Register out = locations->Out().AsRegister<Register>();
3486 Register dividend = locations->InAt(0).AsRegister<Register>();
3487 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3488 DCHECK(imm == 1 || imm == -1);
3489
3490 if (instruction->IsRem()) {
3491 __ Move(out, ZERO);
3492 } else {
3493 if (imm == -1) {
3494 __ Subu(out, ZERO, dividend);
3495 } else if (out != dividend) {
3496 __ Move(out, dividend);
3497 }
3498 }
3499}
3500
3501void InstructionCodeGeneratorMIPS::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3502 DCHECK(instruction->IsDiv() || instruction->IsRem());
3503 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3504
3505 LocationSummary* locations = instruction->GetLocations();
3506 Location second = locations->InAt(1);
3507 DCHECK(second.IsConstant());
3508
3509 Register out = locations->Out().AsRegister<Register>();
3510 Register dividend = locations->InAt(0).AsRegister<Register>();
3511 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003512 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Alexey Frunze7e99e052015-11-24 19:28:01 -08003513 int ctz_imm = CTZ(abs_imm);
3514
3515 if (instruction->IsDiv()) {
3516 if (ctz_imm == 1) {
3517 // Fast path for division by +/-2, which is very common.
3518 __ Srl(TMP, dividend, 31);
3519 } else {
3520 __ Sra(TMP, dividend, 31);
3521 __ Srl(TMP, TMP, 32 - ctz_imm);
3522 }
3523 __ Addu(out, dividend, TMP);
3524 __ Sra(out, out, ctz_imm);
3525 if (imm < 0) {
3526 __ Subu(out, ZERO, out);
3527 }
3528 } else {
3529 if (ctz_imm == 1) {
3530 // Fast path for modulo +/-2, which is very common.
3531 __ Sra(TMP, dividend, 31);
3532 __ Subu(out, dividend, TMP);
3533 __ Andi(out, out, 1);
3534 __ Addu(out, out, TMP);
3535 } else {
3536 __ Sra(TMP, dividend, 31);
3537 __ Srl(TMP, TMP, 32 - ctz_imm);
3538 __ Addu(out, dividend, TMP);
3539 if (IsUint<16>(abs_imm - 1)) {
3540 __ Andi(out, out, abs_imm - 1);
3541 } else {
3542 __ Sll(out, out, 32 - ctz_imm);
3543 __ Srl(out, out, 32 - ctz_imm);
3544 }
3545 __ Subu(out, out, TMP);
3546 }
3547 }
3548}
3549
3550void InstructionCodeGeneratorMIPS::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3551 DCHECK(instruction->IsDiv() || instruction->IsRem());
3552 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3553
3554 LocationSummary* locations = instruction->GetLocations();
3555 Location second = locations->InAt(1);
3556 DCHECK(second.IsConstant());
3557
3558 Register out = locations->Out().AsRegister<Register>();
3559 Register dividend = locations->InAt(0).AsRegister<Register>();
3560 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3561
3562 int64_t magic;
3563 int shift;
3564 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3565
3566 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3567
3568 __ LoadConst32(TMP, magic);
3569 if (isR6) {
3570 __ MuhR6(TMP, dividend, TMP);
3571 } else {
3572 __ MultR2(dividend, TMP);
3573 __ Mfhi(TMP);
3574 }
3575 if (imm > 0 && magic < 0) {
3576 __ Addu(TMP, TMP, dividend);
3577 } else if (imm < 0 && magic > 0) {
3578 __ Subu(TMP, TMP, dividend);
3579 }
3580
3581 if (shift != 0) {
3582 __ Sra(TMP, TMP, shift);
3583 }
3584
3585 if (instruction->IsDiv()) {
3586 __ Sra(out, TMP, 31);
3587 __ Subu(out, TMP, out);
3588 } else {
3589 __ Sra(AT, TMP, 31);
3590 __ Subu(AT, TMP, AT);
3591 __ LoadConst32(TMP, imm);
3592 if (isR6) {
3593 __ MulR6(TMP, AT, TMP);
3594 } else {
3595 __ MulR2(TMP, AT, TMP);
3596 }
3597 __ Subu(out, dividend, TMP);
3598 }
3599}
3600
3601void InstructionCodeGeneratorMIPS::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3602 DCHECK(instruction->IsDiv() || instruction->IsRem());
3603 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3604
3605 LocationSummary* locations = instruction->GetLocations();
3606 Register out = locations->Out().AsRegister<Register>();
3607 Location second = locations->InAt(1);
3608
3609 if (second.IsConstant()) {
3610 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3611 if (imm == 0) {
3612 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3613 } else if (imm == 1 || imm == -1) {
3614 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003615 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003616 DivRemByPowerOfTwo(instruction);
3617 } else {
3618 DCHECK(imm <= -2 || imm >= 2);
3619 GenerateDivRemWithAnyConstant(instruction);
3620 }
3621 } else {
3622 Register dividend = locations->InAt(0).AsRegister<Register>();
3623 Register divisor = second.AsRegister<Register>();
3624 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3625 if (instruction->IsDiv()) {
3626 if (isR6) {
3627 __ DivR6(out, dividend, divisor);
3628 } else {
3629 __ DivR2(out, dividend, divisor);
3630 }
3631 } else {
3632 if (isR6) {
3633 __ ModR6(out, dividend, divisor);
3634 } else {
3635 __ ModR2(out, dividend, divisor);
3636 }
3637 }
3638 }
3639}
3640
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003641void LocationsBuilderMIPS::VisitDiv(HDiv* div) {
3642 Primitive::Type type = div->GetResultType();
3643 LocationSummary::CallKind call_kind = (type == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003644 ? LocationSummary::kCallOnMainOnly
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003645 : LocationSummary::kNoCall;
3646
3647 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
3648
3649 switch (type) {
3650 case Primitive::kPrimInt:
3651 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08003652 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003653 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3654 break;
3655
3656 case Primitive::kPrimLong: {
3657 InvokeRuntimeCallingConvention calling_convention;
3658 locations->SetInAt(0, Location::RegisterPairLocation(
3659 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3660 locations->SetInAt(1, Location::RegisterPairLocation(
3661 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3662 locations->SetOut(calling_convention.GetReturnLocation(type));
3663 break;
3664 }
3665
3666 case Primitive::kPrimFloat:
3667 case Primitive::kPrimDouble:
3668 locations->SetInAt(0, Location::RequiresFpuRegister());
3669 locations->SetInAt(1, Location::RequiresFpuRegister());
3670 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3671 break;
3672
3673 default:
3674 LOG(FATAL) << "Unexpected div type " << type;
3675 }
3676}
3677
3678void InstructionCodeGeneratorMIPS::VisitDiv(HDiv* instruction) {
3679 Primitive::Type type = instruction->GetType();
3680 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003681
3682 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003683 case Primitive::kPrimInt:
3684 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003685 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003686 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01003687 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003688 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
3689 break;
3690 }
3691 case Primitive::kPrimFloat:
3692 case Primitive::kPrimDouble: {
3693 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
3694 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3695 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3696 if (type == Primitive::kPrimFloat) {
3697 __ DivS(dst, lhs, rhs);
3698 } else {
3699 __ DivD(dst, lhs, rhs);
3700 }
3701 break;
3702 }
3703 default:
3704 LOG(FATAL) << "Unexpected div type " << type;
3705 }
3706}
3707
3708void LocationsBuilderMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003709 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003710 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003711}
3712
3713void InstructionCodeGeneratorMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3714 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS(instruction);
3715 codegen_->AddSlowPath(slow_path);
3716 Location value = instruction->GetLocations()->InAt(0);
3717 Primitive::Type type = instruction->GetType();
3718
3719 switch (type) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003720 case Primitive::kPrimBoolean:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003721 case Primitive::kPrimByte:
3722 case Primitive::kPrimChar:
3723 case Primitive::kPrimShort:
3724 case Primitive::kPrimInt: {
3725 if (value.IsConstant()) {
3726 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3727 __ B(slow_path->GetEntryLabel());
3728 } else {
3729 // A division by a non-null constant is valid. We don't need to perform
3730 // any check, so simply fall through.
3731 }
3732 } else {
3733 DCHECK(value.IsRegister()) << value;
3734 __ Beqz(value.AsRegister<Register>(), slow_path->GetEntryLabel());
3735 }
3736 break;
3737 }
3738 case Primitive::kPrimLong: {
3739 if (value.IsConstant()) {
3740 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3741 __ B(slow_path->GetEntryLabel());
3742 } else {
3743 // A division by a non-null constant is valid. We don't need to perform
3744 // any check, so simply fall through.
3745 }
3746 } else {
3747 DCHECK(value.IsRegisterPair()) << value;
3748 __ Or(TMP, value.AsRegisterPairHigh<Register>(), value.AsRegisterPairLow<Register>());
3749 __ Beqz(TMP, slow_path->GetEntryLabel());
3750 }
3751 break;
3752 }
3753 default:
3754 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
3755 }
3756}
3757
3758void LocationsBuilderMIPS::VisitDoubleConstant(HDoubleConstant* constant) {
3759 LocationSummary* locations =
3760 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3761 locations->SetOut(Location::ConstantLocation(constant));
3762}
3763
3764void InstructionCodeGeneratorMIPS::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3765 // Will be generated at use site.
3766}
3767
3768void LocationsBuilderMIPS::VisitExit(HExit* exit) {
3769 exit->SetLocations(nullptr);
3770}
3771
3772void InstructionCodeGeneratorMIPS::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3773}
3774
3775void LocationsBuilderMIPS::VisitFloatConstant(HFloatConstant* constant) {
3776 LocationSummary* locations =
3777 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3778 locations->SetOut(Location::ConstantLocation(constant));
3779}
3780
3781void InstructionCodeGeneratorMIPS::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3782 // Will be generated at use site.
3783}
3784
3785void LocationsBuilderMIPS::VisitGoto(HGoto* got) {
3786 got->SetLocations(nullptr);
3787}
3788
3789void InstructionCodeGeneratorMIPS::HandleGoto(HInstruction* got, HBasicBlock* successor) {
3790 DCHECK(!successor->IsExitBlock());
3791 HBasicBlock* block = got->GetBlock();
3792 HInstruction* previous = got->GetPrevious();
3793 HLoopInformation* info = block->GetLoopInformation();
3794
3795 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3796 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3797 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3798 return;
3799 }
3800 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3801 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3802 }
3803 if (!codegen_->GoesToNextBlock(block, successor)) {
3804 __ B(codegen_->GetLabelOf(successor));
3805 }
3806}
3807
3808void InstructionCodeGeneratorMIPS::VisitGoto(HGoto* got) {
3809 HandleGoto(got, got->GetSuccessor());
3810}
3811
3812void LocationsBuilderMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3813 try_boundary->SetLocations(nullptr);
3814}
3815
3816void InstructionCodeGeneratorMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3817 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3818 if (!successor->IsExitBlock()) {
3819 HandleGoto(try_boundary, successor);
3820 }
3821}
3822
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003823void InstructionCodeGeneratorMIPS::GenerateIntCompare(IfCondition cond,
3824 LocationSummary* locations) {
3825 Register dst = locations->Out().AsRegister<Register>();
3826 Register lhs = locations->InAt(0).AsRegister<Register>();
3827 Location rhs_location = locations->InAt(1);
3828 Register rhs_reg = ZERO;
3829 int64_t rhs_imm = 0;
3830 bool use_imm = rhs_location.IsConstant();
3831 if (use_imm) {
3832 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3833 } else {
3834 rhs_reg = rhs_location.AsRegister<Register>();
3835 }
3836
3837 switch (cond) {
3838 case kCondEQ:
3839 case kCondNE:
Alexey Frunzee7697712016-09-15 21:37:49 -07003840 if (use_imm && IsInt<16>(-rhs_imm)) {
3841 if (rhs_imm == 0) {
3842 if (cond == kCondEQ) {
3843 __ Sltiu(dst, lhs, 1);
3844 } else {
3845 __ Sltu(dst, ZERO, lhs);
3846 }
3847 } else {
3848 __ Addiu(dst, lhs, -rhs_imm);
3849 if (cond == kCondEQ) {
3850 __ Sltiu(dst, dst, 1);
3851 } else {
3852 __ Sltu(dst, ZERO, dst);
3853 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003854 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003855 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07003856 if (use_imm && IsUint<16>(rhs_imm)) {
3857 __ Xori(dst, lhs, rhs_imm);
3858 } else {
3859 if (use_imm) {
3860 rhs_reg = TMP;
3861 __ LoadConst32(rhs_reg, rhs_imm);
3862 }
3863 __ Xor(dst, lhs, rhs_reg);
3864 }
3865 if (cond == kCondEQ) {
3866 __ Sltiu(dst, dst, 1);
3867 } else {
3868 __ Sltu(dst, ZERO, dst);
3869 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003870 }
3871 break;
3872
3873 case kCondLT:
3874 case kCondGE:
3875 if (use_imm && IsInt<16>(rhs_imm)) {
3876 __ Slti(dst, lhs, rhs_imm);
3877 } else {
3878 if (use_imm) {
3879 rhs_reg = TMP;
3880 __ LoadConst32(rhs_reg, rhs_imm);
3881 }
3882 __ Slt(dst, lhs, rhs_reg);
3883 }
3884 if (cond == kCondGE) {
3885 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3886 // only the slt instruction but no sge.
3887 __ Xori(dst, dst, 1);
3888 }
3889 break;
3890
3891 case kCondLE:
3892 case kCondGT:
3893 if (use_imm && IsInt<16>(rhs_imm + 1)) {
3894 // Simulate lhs <= rhs via lhs < rhs + 1.
3895 __ Slti(dst, lhs, rhs_imm + 1);
3896 if (cond == kCondGT) {
3897 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3898 // only the slti instruction but no sgti.
3899 __ Xori(dst, dst, 1);
3900 }
3901 } else {
3902 if (use_imm) {
3903 rhs_reg = TMP;
3904 __ LoadConst32(rhs_reg, rhs_imm);
3905 }
3906 __ Slt(dst, rhs_reg, lhs);
3907 if (cond == kCondLE) {
3908 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3909 // only the slt instruction but no sle.
3910 __ Xori(dst, dst, 1);
3911 }
3912 }
3913 break;
3914
3915 case kCondB:
3916 case kCondAE:
3917 if (use_imm && IsInt<16>(rhs_imm)) {
3918 // Sltiu sign-extends its 16-bit immediate operand before
3919 // the comparison and thus lets us compare directly with
3920 // unsigned values in the ranges [0, 0x7fff] and
3921 // [0xffff8000, 0xffffffff].
3922 __ Sltiu(dst, lhs, rhs_imm);
3923 } else {
3924 if (use_imm) {
3925 rhs_reg = TMP;
3926 __ LoadConst32(rhs_reg, rhs_imm);
3927 }
3928 __ Sltu(dst, lhs, rhs_reg);
3929 }
3930 if (cond == kCondAE) {
3931 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3932 // only the sltu instruction but no sgeu.
3933 __ Xori(dst, dst, 1);
3934 }
3935 break;
3936
3937 case kCondBE:
3938 case kCondA:
3939 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
3940 // Simulate lhs <= rhs via lhs < rhs + 1.
3941 // Note that this only works if rhs + 1 does not overflow
3942 // to 0, hence the check above.
3943 // Sltiu sign-extends its 16-bit immediate operand before
3944 // the comparison and thus lets us compare directly with
3945 // unsigned values in the ranges [0, 0x7fff] and
3946 // [0xffff8000, 0xffffffff].
3947 __ Sltiu(dst, lhs, rhs_imm + 1);
3948 if (cond == kCondA) {
3949 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3950 // only the sltiu instruction but no sgtiu.
3951 __ Xori(dst, dst, 1);
3952 }
3953 } else {
3954 if (use_imm) {
3955 rhs_reg = TMP;
3956 __ LoadConst32(rhs_reg, rhs_imm);
3957 }
3958 __ Sltu(dst, rhs_reg, lhs);
3959 if (cond == kCondBE) {
3960 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3961 // only the sltu instruction but no sleu.
3962 __ Xori(dst, dst, 1);
3963 }
3964 }
3965 break;
3966 }
3967}
3968
Alexey Frunze674b9ee2016-09-20 14:54:15 -07003969bool InstructionCodeGeneratorMIPS::MaterializeIntCompare(IfCondition cond,
3970 LocationSummary* input_locations,
3971 Register dst) {
3972 Register lhs = input_locations->InAt(0).AsRegister<Register>();
3973 Location rhs_location = input_locations->InAt(1);
3974 Register rhs_reg = ZERO;
3975 int64_t rhs_imm = 0;
3976 bool use_imm = rhs_location.IsConstant();
3977 if (use_imm) {
3978 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3979 } else {
3980 rhs_reg = rhs_location.AsRegister<Register>();
3981 }
3982
3983 switch (cond) {
3984 case kCondEQ:
3985 case kCondNE:
3986 if (use_imm && IsInt<16>(-rhs_imm)) {
3987 __ Addiu(dst, lhs, -rhs_imm);
3988 } else if (use_imm && IsUint<16>(rhs_imm)) {
3989 __ Xori(dst, lhs, rhs_imm);
3990 } else {
3991 if (use_imm) {
3992 rhs_reg = TMP;
3993 __ LoadConst32(rhs_reg, rhs_imm);
3994 }
3995 __ Xor(dst, lhs, rhs_reg);
3996 }
3997 return (cond == kCondEQ);
3998
3999 case kCondLT:
4000 case kCondGE:
4001 if (use_imm && IsInt<16>(rhs_imm)) {
4002 __ Slti(dst, lhs, rhs_imm);
4003 } else {
4004 if (use_imm) {
4005 rhs_reg = TMP;
4006 __ LoadConst32(rhs_reg, rhs_imm);
4007 }
4008 __ Slt(dst, lhs, rhs_reg);
4009 }
4010 return (cond == kCondGE);
4011
4012 case kCondLE:
4013 case kCondGT:
4014 if (use_imm && IsInt<16>(rhs_imm + 1)) {
4015 // Simulate lhs <= rhs via lhs < rhs + 1.
4016 __ Slti(dst, lhs, rhs_imm + 1);
4017 return (cond == kCondGT);
4018 } else {
4019 if (use_imm) {
4020 rhs_reg = TMP;
4021 __ LoadConst32(rhs_reg, rhs_imm);
4022 }
4023 __ Slt(dst, rhs_reg, lhs);
4024 return (cond == kCondLE);
4025 }
4026
4027 case kCondB:
4028 case kCondAE:
4029 if (use_imm && IsInt<16>(rhs_imm)) {
4030 // Sltiu sign-extends its 16-bit immediate operand before
4031 // the comparison and thus lets us compare directly with
4032 // unsigned values in the ranges [0, 0x7fff] and
4033 // [0xffff8000, 0xffffffff].
4034 __ Sltiu(dst, lhs, rhs_imm);
4035 } else {
4036 if (use_imm) {
4037 rhs_reg = TMP;
4038 __ LoadConst32(rhs_reg, rhs_imm);
4039 }
4040 __ Sltu(dst, lhs, rhs_reg);
4041 }
4042 return (cond == kCondAE);
4043
4044 case kCondBE:
4045 case kCondA:
4046 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4047 // Simulate lhs <= rhs via lhs < rhs + 1.
4048 // Note that this only works if rhs + 1 does not overflow
4049 // to 0, hence the check above.
4050 // Sltiu sign-extends its 16-bit immediate operand before
4051 // the comparison and thus lets us compare directly with
4052 // unsigned values in the ranges [0, 0x7fff] and
4053 // [0xffff8000, 0xffffffff].
4054 __ Sltiu(dst, lhs, rhs_imm + 1);
4055 return (cond == kCondA);
4056 } else {
4057 if (use_imm) {
4058 rhs_reg = TMP;
4059 __ LoadConst32(rhs_reg, rhs_imm);
4060 }
4061 __ Sltu(dst, rhs_reg, lhs);
4062 return (cond == kCondBE);
4063 }
4064 }
4065}
4066
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004067void InstructionCodeGeneratorMIPS::GenerateIntCompareAndBranch(IfCondition cond,
4068 LocationSummary* locations,
4069 MipsLabel* label) {
4070 Register lhs = locations->InAt(0).AsRegister<Register>();
4071 Location rhs_location = locations->InAt(1);
4072 Register rhs_reg = ZERO;
Alexey Frunzee7697712016-09-15 21:37:49 -07004073 int64_t rhs_imm = 0;
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004074 bool use_imm = rhs_location.IsConstant();
4075 if (use_imm) {
4076 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4077 } else {
4078 rhs_reg = rhs_location.AsRegister<Register>();
4079 }
4080
4081 if (use_imm && rhs_imm == 0) {
4082 switch (cond) {
4083 case kCondEQ:
4084 case kCondBE: // <= 0 if zero
4085 __ Beqz(lhs, label);
4086 break;
4087 case kCondNE:
4088 case kCondA: // > 0 if non-zero
4089 __ Bnez(lhs, label);
4090 break;
4091 case kCondLT:
4092 __ Bltz(lhs, label);
4093 break;
4094 case kCondGE:
4095 __ Bgez(lhs, label);
4096 break;
4097 case kCondLE:
4098 __ Blez(lhs, label);
4099 break;
4100 case kCondGT:
4101 __ Bgtz(lhs, label);
4102 break;
4103 case kCondB: // always false
4104 break;
4105 case kCondAE: // always true
4106 __ B(label);
4107 break;
4108 }
4109 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07004110 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4111 if (isR6 || !use_imm) {
4112 if (use_imm) {
4113 rhs_reg = TMP;
4114 __ LoadConst32(rhs_reg, rhs_imm);
4115 }
4116 switch (cond) {
4117 case kCondEQ:
4118 __ Beq(lhs, rhs_reg, label);
4119 break;
4120 case kCondNE:
4121 __ Bne(lhs, rhs_reg, label);
4122 break;
4123 case kCondLT:
4124 __ Blt(lhs, rhs_reg, label);
4125 break;
4126 case kCondGE:
4127 __ Bge(lhs, rhs_reg, label);
4128 break;
4129 case kCondLE:
4130 __ Bge(rhs_reg, lhs, label);
4131 break;
4132 case kCondGT:
4133 __ Blt(rhs_reg, lhs, label);
4134 break;
4135 case kCondB:
4136 __ Bltu(lhs, rhs_reg, label);
4137 break;
4138 case kCondAE:
4139 __ Bgeu(lhs, rhs_reg, label);
4140 break;
4141 case kCondBE:
4142 __ Bgeu(rhs_reg, lhs, label);
4143 break;
4144 case kCondA:
4145 __ Bltu(rhs_reg, lhs, label);
4146 break;
4147 }
4148 } else {
4149 // Special cases for more efficient comparison with constants on R2.
4150 switch (cond) {
4151 case kCondEQ:
4152 __ LoadConst32(TMP, rhs_imm);
4153 __ Beq(lhs, TMP, label);
4154 break;
4155 case kCondNE:
4156 __ LoadConst32(TMP, rhs_imm);
4157 __ Bne(lhs, TMP, label);
4158 break;
4159 case kCondLT:
4160 if (IsInt<16>(rhs_imm)) {
4161 __ Slti(TMP, lhs, rhs_imm);
4162 __ Bnez(TMP, label);
4163 } else {
4164 __ LoadConst32(TMP, rhs_imm);
4165 __ Blt(lhs, TMP, label);
4166 }
4167 break;
4168 case kCondGE:
4169 if (IsInt<16>(rhs_imm)) {
4170 __ Slti(TMP, lhs, rhs_imm);
4171 __ Beqz(TMP, label);
4172 } else {
4173 __ LoadConst32(TMP, rhs_imm);
4174 __ Bge(lhs, TMP, label);
4175 }
4176 break;
4177 case kCondLE:
4178 if (IsInt<16>(rhs_imm + 1)) {
4179 // Simulate lhs <= rhs via lhs < rhs + 1.
4180 __ Slti(TMP, lhs, rhs_imm + 1);
4181 __ Bnez(TMP, label);
4182 } else {
4183 __ LoadConst32(TMP, rhs_imm);
4184 __ Bge(TMP, lhs, label);
4185 }
4186 break;
4187 case kCondGT:
4188 if (IsInt<16>(rhs_imm + 1)) {
4189 // Simulate lhs > rhs via !(lhs < rhs + 1).
4190 __ Slti(TMP, lhs, rhs_imm + 1);
4191 __ Beqz(TMP, label);
4192 } else {
4193 __ LoadConst32(TMP, rhs_imm);
4194 __ Blt(TMP, lhs, label);
4195 }
4196 break;
4197 case kCondB:
4198 if (IsInt<16>(rhs_imm)) {
4199 __ Sltiu(TMP, lhs, rhs_imm);
4200 __ Bnez(TMP, label);
4201 } else {
4202 __ LoadConst32(TMP, rhs_imm);
4203 __ Bltu(lhs, TMP, label);
4204 }
4205 break;
4206 case kCondAE:
4207 if (IsInt<16>(rhs_imm)) {
4208 __ Sltiu(TMP, lhs, rhs_imm);
4209 __ Beqz(TMP, label);
4210 } else {
4211 __ LoadConst32(TMP, rhs_imm);
4212 __ Bgeu(lhs, TMP, label);
4213 }
4214 break;
4215 case kCondBE:
4216 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4217 // Simulate lhs <= rhs via lhs < rhs + 1.
4218 // Note that this only works if rhs + 1 does not overflow
4219 // to 0, hence the check above.
4220 __ Sltiu(TMP, lhs, rhs_imm + 1);
4221 __ Bnez(TMP, label);
4222 } else {
4223 __ LoadConst32(TMP, rhs_imm);
4224 __ Bgeu(TMP, lhs, label);
4225 }
4226 break;
4227 case kCondA:
4228 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4229 // Simulate lhs > rhs via !(lhs < rhs + 1).
4230 // Note that this only works if rhs + 1 does not overflow
4231 // to 0, hence the check above.
4232 __ Sltiu(TMP, lhs, rhs_imm + 1);
4233 __ Beqz(TMP, label);
4234 } else {
4235 __ LoadConst32(TMP, rhs_imm);
4236 __ Bltu(TMP, lhs, label);
4237 }
4238 break;
4239 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004240 }
4241 }
4242}
4243
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01004244void InstructionCodeGeneratorMIPS::GenerateLongCompare(IfCondition cond,
4245 LocationSummary* locations) {
4246 Register dst = locations->Out().AsRegister<Register>();
4247 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4248 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4249 Location rhs_location = locations->InAt(1);
4250 Register rhs_high = ZERO;
4251 Register rhs_low = ZERO;
4252 int64_t imm = 0;
4253 uint32_t imm_high = 0;
4254 uint32_t imm_low = 0;
4255 bool use_imm = rhs_location.IsConstant();
4256 if (use_imm) {
4257 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4258 imm_high = High32Bits(imm);
4259 imm_low = Low32Bits(imm);
4260 } else {
4261 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4262 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4263 }
4264 if (use_imm && imm == 0) {
4265 switch (cond) {
4266 case kCondEQ:
4267 case kCondBE: // <= 0 if zero
4268 __ Or(dst, lhs_high, lhs_low);
4269 __ Sltiu(dst, dst, 1);
4270 break;
4271 case kCondNE:
4272 case kCondA: // > 0 if non-zero
4273 __ Or(dst, lhs_high, lhs_low);
4274 __ Sltu(dst, ZERO, dst);
4275 break;
4276 case kCondLT:
4277 __ Slt(dst, lhs_high, ZERO);
4278 break;
4279 case kCondGE:
4280 __ Slt(dst, lhs_high, ZERO);
4281 __ Xori(dst, dst, 1);
4282 break;
4283 case kCondLE:
4284 __ Or(TMP, lhs_high, lhs_low);
4285 __ Sra(AT, lhs_high, 31);
4286 __ Sltu(dst, AT, TMP);
4287 __ Xori(dst, dst, 1);
4288 break;
4289 case kCondGT:
4290 __ Or(TMP, lhs_high, lhs_low);
4291 __ Sra(AT, lhs_high, 31);
4292 __ Sltu(dst, AT, TMP);
4293 break;
4294 case kCondB: // always false
4295 __ Andi(dst, dst, 0);
4296 break;
4297 case kCondAE: // always true
4298 __ Ori(dst, ZERO, 1);
4299 break;
4300 }
4301 } else if (use_imm) {
4302 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4303 switch (cond) {
4304 case kCondEQ:
4305 __ LoadConst32(TMP, imm_high);
4306 __ Xor(TMP, TMP, lhs_high);
4307 __ LoadConst32(AT, imm_low);
4308 __ Xor(AT, AT, lhs_low);
4309 __ Or(dst, TMP, AT);
4310 __ Sltiu(dst, dst, 1);
4311 break;
4312 case kCondNE:
4313 __ LoadConst32(TMP, imm_high);
4314 __ Xor(TMP, TMP, lhs_high);
4315 __ LoadConst32(AT, imm_low);
4316 __ Xor(AT, AT, lhs_low);
4317 __ Or(dst, TMP, AT);
4318 __ Sltu(dst, ZERO, dst);
4319 break;
4320 case kCondLT:
4321 case kCondGE:
4322 if (dst == lhs_low) {
4323 __ LoadConst32(TMP, imm_low);
4324 __ Sltu(dst, lhs_low, TMP);
4325 }
4326 __ LoadConst32(TMP, imm_high);
4327 __ Slt(AT, lhs_high, TMP);
4328 __ Slt(TMP, TMP, lhs_high);
4329 if (dst != lhs_low) {
4330 __ LoadConst32(dst, imm_low);
4331 __ Sltu(dst, lhs_low, dst);
4332 }
4333 __ Slt(dst, TMP, dst);
4334 __ Or(dst, dst, AT);
4335 if (cond == kCondGE) {
4336 __ Xori(dst, dst, 1);
4337 }
4338 break;
4339 case kCondGT:
4340 case kCondLE:
4341 if (dst == lhs_low) {
4342 __ LoadConst32(TMP, imm_low);
4343 __ Sltu(dst, TMP, lhs_low);
4344 }
4345 __ LoadConst32(TMP, imm_high);
4346 __ Slt(AT, TMP, lhs_high);
4347 __ Slt(TMP, lhs_high, TMP);
4348 if (dst != lhs_low) {
4349 __ LoadConst32(dst, imm_low);
4350 __ Sltu(dst, dst, lhs_low);
4351 }
4352 __ Slt(dst, TMP, dst);
4353 __ Or(dst, dst, AT);
4354 if (cond == kCondLE) {
4355 __ Xori(dst, dst, 1);
4356 }
4357 break;
4358 case kCondB:
4359 case kCondAE:
4360 if (dst == lhs_low) {
4361 __ LoadConst32(TMP, imm_low);
4362 __ Sltu(dst, lhs_low, TMP);
4363 }
4364 __ LoadConst32(TMP, imm_high);
4365 __ Sltu(AT, lhs_high, TMP);
4366 __ Sltu(TMP, TMP, lhs_high);
4367 if (dst != lhs_low) {
4368 __ LoadConst32(dst, imm_low);
4369 __ Sltu(dst, lhs_low, dst);
4370 }
4371 __ Slt(dst, TMP, dst);
4372 __ Or(dst, dst, AT);
4373 if (cond == kCondAE) {
4374 __ Xori(dst, dst, 1);
4375 }
4376 break;
4377 case kCondA:
4378 case kCondBE:
4379 if (dst == lhs_low) {
4380 __ LoadConst32(TMP, imm_low);
4381 __ Sltu(dst, TMP, lhs_low);
4382 }
4383 __ LoadConst32(TMP, imm_high);
4384 __ Sltu(AT, TMP, lhs_high);
4385 __ Sltu(TMP, lhs_high, TMP);
4386 if (dst != lhs_low) {
4387 __ LoadConst32(dst, imm_low);
4388 __ Sltu(dst, dst, lhs_low);
4389 }
4390 __ Slt(dst, TMP, dst);
4391 __ Or(dst, dst, AT);
4392 if (cond == kCondBE) {
4393 __ Xori(dst, dst, 1);
4394 }
4395 break;
4396 }
4397 } else {
4398 switch (cond) {
4399 case kCondEQ:
4400 __ Xor(TMP, lhs_high, rhs_high);
4401 __ Xor(AT, lhs_low, rhs_low);
4402 __ Or(dst, TMP, AT);
4403 __ Sltiu(dst, dst, 1);
4404 break;
4405 case kCondNE:
4406 __ Xor(TMP, lhs_high, rhs_high);
4407 __ Xor(AT, lhs_low, rhs_low);
4408 __ Or(dst, TMP, AT);
4409 __ Sltu(dst, ZERO, dst);
4410 break;
4411 case kCondLT:
4412 case kCondGE:
4413 __ Slt(TMP, rhs_high, lhs_high);
4414 __ Sltu(AT, lhs_low, rhs_low);
4415 __ Slt(TMP, TMP, AT);
4416 __ Slt(AT, lhs_high, rhs_high);
4417 __ Or(dst, AT, TMP);
4418 if (cond == kCondGE) {
4419 __ Xori(dst, dst, 1);
4420 }
4421 break;
4422 case kCondGT:
4423 case kCondLE:
4424 __ Slt(TMP, lhs_high, rhs_high);
4425 __ Sltu(AT, rhs_low, lhs_low);
4426 __ Slt(TMP, TMP, AT);
4427 __ Slt(AT, rhs_high, lhs_high);
4428 __ Or(dst, AT, TMP);
4429 if (cond == kCondLE) {
4430 __ Xori(dst, dst, 1);
4431 }
4432 break;
4433 case kCondB:
4434 case kCondAE:
4435 __ Sltu(TMP, rhs_high, lhs_high);
4436 __ Sltu(AT, lhs_low, rhs_low);
4437 __ Slt(TMP, TMP, AT);
4438 __ Sltu(AT, lhs_high, rhs_high);
4439 __ Or(dst, AT, TMP);
4440 if (cond == kCondAE) {
4441 __ Xori(dst, dst, 1);
4442 }
4443 break;
4444 case kCondA:
4445 case kCondBE:
4446 __ Sltu(TMP, lhs_high, rhs_high);
4447 __ Sltu(AT, rhs_low, lhs_low);
4448 __ Slt(TMP, TMP, AT);
4449 __ Sltu(AT, rhs_high, lhs_high);
4450 __ Or(dst, AT, TMP);
4451 if (cond == kCondBE) {
4452 __ Xori(dst, dst, 1);
4453 }
4454 break;
4455 }
4456 }
4457}
4458
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004459void InstructionCodeGeneratorMIPS::GenerateLongCompareAndBranch(IfCondition cond,
4460 LocationSummary* locations,
4461 MipsLabel* label) {
4462 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4463 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4464 Location rhs_location = locations->InAt(1);
4465 Register rhs_high = ZERO;
4466 Register rhs_low = ZERO;
4467 int64_t imm = 0;
4468 uint32_t imm_high = 0;
4469 uint32_t imm_low = 0;
4470 bool use_imm = rhs_location.IsConstant();
4471 if (use_imm) {
4472 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4473 imm_high = High32Bits(imm);
4474 imm_low = Low32Bits(imm);
4475 } else {
4476 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4477 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4478 }
4479
4480 if (use_imm && imm == 0) {
4481 switch (cond) {
4482 case kCondEQ:
4483 case kCondBE: // <= 0 if zero
4484 __ Or(TMP, lhs_high, lhs_low);
4485 __ Beqz(TMP, label);
4486 break;
4487 case kCondNE:
4488 case kCondA: // > 0 if non-zero
4489 __ Or(TMP, lhs_high, lhs_low);
4490 __ Bnez(TMP, label);
4491 break;
4492 case kCondLT:
4493 __ Bltz(lhs_high, label);
4494 break;
4495 case kCondGE:
4496 __ Bgez(lhs_high, label);
4497 break;
4498 case kCondLE:
4499 __ Or(TMP, lhs_high, lhs_low);
4500 __ Sra(AT, lhs_high, 31);
4501 __ Bgeu(AT, TMP, label);
4502 break;
4503 case kCondGT:
4504 __ Or(TMP, lhs_high, lhs_low);
4505 __ Sra(AT, lhs_high, 31);
4506 __ Bltu(AT, TMP, label);
4507 break;
4508 case kCondB: // always false
4509 break;
4510 case kCondAE: // always true
4511 __ B(label);
4512 break;
4513 }
4514 } else if (use_imm) {
4515 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4516 switch (cond) {
4517 case kCondEQ:
4518 __ LoadConst32(TMP, imm_high);
4519 __ Xor(TMP, TMP, lhs_high);
4520 __ LoadConst32(AT, imm_low);
4521 __ Xor(AT, AT, lhs_low);
4522 __ Or(TMP, TMP, AT);
4523 __ Beqz(TMP, label);
4524 break;
4525 case kCondNE:
4526 __ LoadConst32(TMP, imm_high);
4527 __ Xor(TMP, TMP, lhs_high);
4528 __ LoadConst32(AT, imm_low);
4529 __ Xor(AT, AT, lhs_low);
4530 __ Or(TMP, TMP, AT);
4531 __ Bnez(TMP, label);
4532 break;
4533 case kCondLT:
4534 __ LoadConst32(TMP, imm_high);
4535 __ Blt(lhs_high, TMP, label);
4536 __ Slt(TMP, TMP, lhs_high);
4537 __ LoadConst32(AT, imm_low);
4538 __ Sltu(AT, lhs_low, AT);
4539 __ Blt(TMP, AT, label);
4540 break;
4541 case kCondGE:
4542 __ LoadConst32(TMP, imm_high);
4543 __ Blt(TMP, lhs_high, label);
4544 __ Slt(TMP, lhs_high, TMP);
4545 __ LoadConst32(AT, imm_low);
4546 __ Sltu(AT, lhs_low, AT);
4547 __ Or(TMP, TMP, AT);
4548 __ Beqz(TMP, label);
4549 break;
4550 case kCondLE:
4551 __ LoadConst32(TMP, imm_high);
4552 __ Blt(lhs_high, TMP, label);
4553 __ Slt(TMP, TMP, lhs_high);
4554 __ LoadConst32(AT, imm_low);
4555 __ Sltu(AT, AT, lhs_low);
4556 __ Or(TMP, TMP, AT);
4557 __ Beqz(TMP, label);
4558 break;
4559 case kCondGT:
4560 __ LoadConst32(TMP, imm_high);
4561 __ Blt(TMP, lhs_high, label);
4562 __ Slt(TMP, lhs_high, TMP);
4563 __ LoadConst32(AT, imm_low);
4564 __ Sltu(AT, AT, lhs_low);
4565 __ Blt(TMP, AT, label);
4566 break;
4567 case kCondB:
4568 __ LoadConst32(TMP, imm_high);
4569 __ Bltu(lhs_high, TMP, label);
4570 __ Sltu(TMP, TMP, lhs_high);
4571 __ LoadConst32(AT, imm_low);
4572 __ Sltu(AT, lhs_low, AT);
4573 __ Blt(TMP, AT, label);
4574 break;
4575 case kCondAE:
4576 __ LoadConst32(TMP, imm_high);
4577 __ Bltu(TMP, lhs_high, label);
4578 __ Sltu(TMP, lhs_high, TMP);
4579 __ LoadConst32(AT, imm_low);
4580 __ Sltu(AT, lhs_low, AT);
4581 __ Or(TMP, TMP, AT);
4582 __ Beqz(TMP, label);
4583 break;
4584 case kCondBE:
4585 __ LoadConst32(TMP, imm_high);
4586 __ Bltu(lhs_high, TMP, label);
4587 __ Sltu(TMP, TMP, lhs_high);
4588 __ LoadConst32(AT, imm_low);
4589 __ Sltu(AT, AT, lhs_low);
4590 __ Or(TMP, TMP, AT);
4591 __ Beqz(TMP, label);
4592 break;
4593 case kCondA:
4594 __ LoadConst32(TMP, imm_high);
4595 __ Bltu(TMP, lhs_high, label);
4596 __ Sltu(TMP, lhs_high, TMP);
4597 __ LoadConst32(AT, imm_low);
4598 __ Sltu(AT, AT, lhs_low);
4599 __ Blt(TMP, AT, label);
4600 break;
4601 }
4602 } else {
4603 switch (cond) {
4604 case kCondEQ:
4605 __ Xor(TMP, lhs_high, rhs_high);
4606 __ Xor(AT, lhs_low, rhs_low);
4607 __ Or(TMP, TMP, AT);
4608 __ Beqz(TMP, label);
4609 break;
4610 case kCondNE:
4611 __ Xor(TMP, lhs_high, rhs_high);
4612 __ Xor(AT, lhs_low, rhs_low);
4613 __ Or(TMP, TMP, AT);
4614 __ Bnez(TMP, label);
4615 break;
4616 case kCondLT:
4617 __ Blt(lhs_high, rhs_high, label);
4618 __ Slt(TMP, rhs_high, lhs_high);
4619 __ Sltu(AT, lhs_low, rhs_low);
4620 __ Blt(TMP, AT, label);
4621 break;
4622 case kCondGE:
4623 __ Blt(rhs_high, lhs_high, label);
4624 __ Slt(TMP, lhs_high, rhs_high);
4625 __ Sltu(AT, lhs_low, rhs_low);
4626 __ Or(TMP, TMP, AT);
4627 __ Beqz(TMP, label);
4628 break;
4629 case kCondLE:
4630 __ Blt(lhs_high, rhs_high, label);
4631 __ Slt(TMP, rhs_high, lhs_high);
4632 __ Sltu(AT, rhs_low, lhs_low);
4633 __ Or(TMP, TMP, AT);
4634 __ Beqz(TMP, label);
4635 break;
4636 case kCondGT:
4637 __ Blt(rhs_high, lhs_high, label);
4638 __ Slt(TMP, lhs_high, rhs_high);
4639 __ Sltu(AT, rhs_low, lhs_low);
4640 __ Blt(TMP, AT, label);
4641 break;
4642 case kCondB:
4643 __ Bltu(lhs_high, rhs_high, label);
4644 __ Sltu(TMP, rhs_high, lhs_high);
4645 __ Sltu(AT, lhs_low, rhs_low);
4646 __ Blt(TMP, AT, label);
4647 break;
4648 case kCondAE:
4649 __ Bltu(rhs_high, lhs_high, label);
4650 __ Sltu(TMP, lhs_high, rhs_high);
4651 __ Sltu(AT, lhs_low, rhs_low);
4652 __ Or(TMP, TMP, AT);
4653 __ Beqz(TMP, label);
4654 break;
4655 case kCondBE:
4656 __ Bltu(lhs_high, rhs_high, label);
4657 __ Sltu(TMP, rhs_high, lhs_high);
4658 __ Sltu(AT, rhs_low, lhs_low);
4659 __ Or(TMP, TMP, AT);
4660 __ Beqz(TMP, label);
4661 break;
4662 case kCondA:
4663 __ Bltu(rhs_high, lhs_high, label);
4664 __ Sltu(TMP, lhs_high, rhs_high);
4665 __ Sltu(AT, rhs_low, lhs_low);
4666 __ Blt(TMP, AT, label);
4667 break;
4668 }
4669 }
4670}
4671
Alexey Frunze2ddb7172016-09-06 17:04:55 -07004672void InstructionCodeGeneratorMIPS::GenerateFpCompare(IfCondition cond,
4673 bool gt_bias,
4674 Primitive::Type type,
4675 LocationSummary* locations) {
4676 Register dst = locations->Out().AsRegister<Register>();
4677 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
4678 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
4679 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4680 if (type == Primitive::kPrimFloat) {
4681 if (isR6) {
4682 switch (cond) {
4683 case kCondEQ:
4684 __ CmpEqS(FTMP, lhs, rhs);
4685 __ Mfc1(dst, FTMP);
4686 __ Andi(dst, dst, 1);
4687 break;
4688 case kCondNE:
4689 __ CmpEqS(FTMP, lhs, rhs);
4690 __ Mfc1(dst, FTMP);
4691 __ Addiu(dst, dst, 1);
4692 break;
4693 case kCondLT:
4694 if (gt_bias) {
4695 __ CmpLtS(FTMP, lhs, rhs);
4696 } else {
4697 __ CmpUltS(FTMP, lhs, rhs);
4698 }
4699 __ Mfc1(dst, FTMP);
4700 __ Andi(dst, dst, 1);
4701 break;
4702 case kCondLE:
4703 if (gt_bias) {
4704 __ CmpLeS(FTMP, lhs, rhs);
4705 } else {
4706 __ CmpUleS(FTMP, lhs, rhs);
4707 }
4708 __ Mfc1(dst, FTMP);
4709 __ Andi(dst, dst, 1);
4710 break;
4711 case kCondGT:
4712 if (gt_bias) {
4713 __ CmpUltS(FTMP, rhs, lhs);
4714 } else {
4715 __ CmpLtS(FTMP, rhs, lhs);
4716 }
4717 __ Mfc1(dst, FTMP);
4718 __ Andi(dst, dst, 1);
4719 break;
4720 case kCondGE:
4721 if (gt_bias) {
4722 __ CmpUleS(FTMP, rhs, lhs);
4723 } else {
4724 __ CmpLeS(FTMP, rhs, lhs);
4725 }
4726 __ Mfc1(dst, FTMP);
4727 __ Andi(dst, dst, 1);
4728 break;
4729 default:
4730 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4731 UNREACHABLE();
4732 }
4733 } else {
4734 switch (cond) {
4735 case kCondEQ:
4736 __ CeqS(0, lhs, rhs);
4737 __ LoadConst32(dst, 1);
4738 __ Movf(dst, ZERO, 0);
4739 break;
4740 case kCondNE:
4741 __ CeqS(0, lhs, rhs);
4742 __ LoadConst32(dst, 1);
4743 __ Movt(dst, ZERO, 0);
4744 break;
4745 case kCondLT:
4746 if (gt_bias) {
4747 __ ColtS(0, lhs, rhs);
4748 } else {
4749 __ CultS(0, lhs, rhs);
4750 }
4751 __ LoadConst32(dst, 1);
4752 __ Movf(dst, ZERO, 0);
4753 break;
4754 case kCondLE:
4755 if (gt_bias) {
4756 __ ColeS(0, lhs, rhs);
4757 } else {
4758 __ CuleS(0, lhs, rhs);
4759 }
4760 __ LoadConst32(dst, 1);
4761 __ Movf(dst, ZERO, 0);
4762 break;
4763 case kCondGT:
4764 if (gt_bias) {
4765 __ CultS(0, rhs, lhs);
4766 } else {
4767 __ ColtS(0, rhs, lhs);
4768 }
4769 __ LoadConst32(dst, 1);
4770 __ Movf(dst, ZERO, 0);
4771 break;
4772 case kCondGE:
4773 if (gt_bias) {
4774 __ CuleS(0, rhs, lhs);
4775 } else {
4776 __ ColeS(0, rhs, lhs);
4777 }
4778 __ LoadConst32(dst, 1);
4779 __ Movf(dst, ZERO, 0);
4780 break;
4781 default:
4782 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4783 UNREACHABLE();
4784 }
4785 }
4786 } else {
4787 DCHECK_EQ(type, Primitive::kPrimDouble);
4788 if (isR6) {
4789 switch (cond) {
4790 case kCondEQ:
4791 __ CmpEqD(FTMP, lhs, rhs);
4792 __ Mfc1(dst, FTMP);
4793 __ Andi(dst, dst, 1);
4794 break;
4795 case kCondNE:
4796 __ CmpEqD(FTMP, lhs, rhs);
4797 __ Mfc1(dst, FTMP);
4798 __ Addiu(dst, dst, 1);
4799 break;
4800 case kCondLT:
4801 if (gt_bias) {
4802 __ CmpLtD(FTMP, lhs, rhs);
4803 } else {
4804 __ CmpUltD(FTMP, lhs, rhs);
4805 }
4806 __ Mfc1(dst, FTMP);
4807 __ Andi(dst, dst, 1);
4808 break;
4809 case kCondLE:
4810 if (gt_bias) {
4811 __ CmpLeD(FTMP, lhs, rhs);
4812 } else {
4813 __ CmpUleD(FTMP, lhs, rhs);
4814 }
4815 __ Mfc1(dst, FTMP);
4816 __ Andi(dst, dst, 1);
4817 break;
4818 case kCondGT:
4819 if (gt_bias) {
4820 __ CmpUltD(FTMP, rhs, lhs);
4821 } else {
4822 __ CmpLtD(FTMP, rhs, lhs);
4823 }
4824 __ Mfc1(dst, FTMP);
4825 __ Andi(dst, dst, 1);
4826 break;
4827 case kCondGE:
4828 if (gt_bias) {
4829 __ CmpUleD(FTMP, rhs, lhs);
4830 } else {
4831 __ CmpLeD(FTMP, rhs, lhs);
4832 }
4833 __ Mfc1(dst, FTMP);
4834 __ Andi(dst, dst, 1);
4835 break;
4836 default:
4837 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4838 UNREACHABLE();
4839 }
4840 } else {
4841 switch (cond) {
4842 case kCondEQ:
4843 __ CeqD(0, lhs, rhs);
4844 __ LoadConst32(dst, 1);
4845 __ Movf(dst, ZERO, 0);
4846 break;
4847 case kCondNE:
4848 __ CeqD(0, lhs, rhs);
4849 __ LoadConst32(dst, 1);
4850 __ Movt(dst, ZERO, 0);
4851 break;
4852 case kCondLT:
4853 if (gt_bias) {
4854 __ ColtD(0, lhs, rhs);
4855 } else {
4856 __ CultD(0, lhs, rhs);
4857 }
4858 __ LoadConst32(dst, 1);
4859 __ Movf(dst, ZERO, 0);
4860 break;
4861 case kCondLE:
4862 if (gt_bias) {
4863 __ ColeD(0, lhs, rhs);
4864 } else {
4865 __ CuleD(0, lhs, rhs);
4866 }
4867 __ LoadConst32(dst, 1);
4868 __ Movf(dst, ZERO, 0);
4869 break;
4870 case kCondGT:
4871 if (gt_bias) {
4872 __ CultD(0, rhs, lhs);
4873 } else {
4874 __ ColtD(0, rhs, lhs);
4875 }
4876 __ LoadConst32(dst, 1);
4877 __ Movf(dst, ZERO, 0);
4878 break;
4879 case kCondGE:
4880 if (gt_bias) {
4881 __ CuleD(0, rhs, lhs);
4882 } else {
4883 __ ColeD(0, rhs, lhs);
4884 }
4885 __ LoadConst32(dst, 1);
4886 __ Movf(dst, ZERO, 0);
4887 break;
4888 default:
4889 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4890 UNREACHABLE();
4891 }
4892 }
4893 }
4894}
4895
Alexey Frunze674b9ee2016-09-20 14:54:15 -07004896bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR2(IfCondition cond,
4897 bool gt_bias,
4898 Primitive::Type type,
4899 LocationSummary* input_locations,
4900 int cc) {
4901 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
4902 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
4903 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
4904 if (type == Primitive::kPrimFloat) {
4905 switch (cond) {
4906 case kCondEQ:
4907 __ CeqS(cc, lhs, rhs);
4908 return false;
4909 case kCondNE:
4910 __ CeqS(cc, lhs, rhs);
4911 return true;
4912 case kCondLT:
4913 if (gt_bias) {
4914 __ ColtS(cc, lhs, rhs);
4915 } else {
4916 __ CultS(cc, lhs, rhs);
4917 }
4918 return false;
4919 case kCondLE:
4920 if (gt_bias) {
4921 __ ColeS(cc, lhs, rhs);
4922 } else {
4923 __ CuleS(cc, lhs, rhs);
4924 }
4925 return false;
4926 case kCondGT:
4927 if (gt_bias) {
4928 __ CultS(cc, rhs, lhs);
4929 } else {
4930 __ ColtS(cc, rhs, lhs);
4931 }
4932 return false;
4933 case kCondGE:
4934 if (gt_bias) {
4935 __ CuleS(cc, rhs, lhs);
4936 } else {
4937 __ ColeS(cc, rhs, lhs);
4938 }
4939 return false;
4940 default:
4941 LOG(FATAL) << "Unexpected non-floating-point condition";
4942 UNREACHABLE();
4943 }
4944 } else {
4945 DCHECK_EQ(type, Primitive::kPrimDouble);
4946 switch (cond) {
4947 case kCondEQ:
4948 __ CeqD(cc, lhs, rhs);
4949 return false;
4950 case kCondNE:
4951 __ CeqD(cc, lhs, rhs);
4952 return true;
4953 case kCondLT:
4954 if (gt_bias) {
4955 __ ColtD(cc, lhs, rhs);
4956 } else {
4957 __ CultD(cc, lhs, rhs);
4958 }
4959 return false;
4960 case kCondLE:
4961 if (gt_bias) {
4962 __ ColeD(cc, lhs, rhs);
4963 } else {
4964 __ CuleD(cc, lhs, rhs);
4965 }
4966 return false;
4967 case kCondGT:
4968 if (gt_bias) {
4969 __ CultD(cc, rhs, lhs);
4970 } else {
4971 __ ColtD(cc, rhs, lhs);
4972 }
4973 return false;
4974 case kCondGE:
4975 if (gt_bias) {
4976 __ CuleD(cc, rhs, lhs);
4977 } else {
4978 __ ColeD(cc, rhs, lhs);
4979 }
4980 return false;
4981 default:
4982 LOG(FATAL) << "Unexpected non-floating-point condition";
4983 UNREACHABLE();
4984 }
4985 }
4986}
4987
4988bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR6(IfCondition cond,
4989 bool gt_bias,
4990 Primitive::Type type,
4991 LocationSummary* input_locations,
4992 FRegister dst) {
4993 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
4994 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
4995 CHECK(codegen_->GetInstructionSetFeatures().IsR6());
4996 if (type == Primitive::kPrimFloat) {
4997 switch (cond) {
4998 case kCondEQ:
4999 __ CmpEqS(dst, lhs, rhs);
5000 return false;
5001 case kCondNE:
5002 __ CmpEqS(dst, lhs, rhs);
5003 return true;
5004 case kCondLT:
5005 if (gt_bias) {
5006 __ CmpLtS(dst, lhs, rhs);
5007 } else {
5008 __ CmpUltS(dst, lhs, rhs);
5009 }
5010 return false;
5011 case kCondLE:
5012 if (gt_bias) {
5013 __ CmpLeS(dst, lhs, rhs);
5014 } else {
5015 __ CmpUleS(dst, lhs, rhs);
5016 }
5017 return false;
5018 case kCondGT:
5019 if (gt_bias) {
5020 __ CmpUltS(dst, rhs, lhs);
5021 } else {
5022 __ CmpLtS(dst, rhs, lhs);
5023 }
5024 return false;
5025 case kCondGE:
5026 if (gt_bias) {
5027 __ CmpUleS(dst, rhs, lhs);
5028 } else {
5029 __ CmpLeS(dst, rhs, lhs);
5030 }
5031 return false;
5032 default:
5033 LOG(FATAL) << "Unexpected non-floating-point condition";
5034 UNREACHABLE();
5035 }
5036 } else {
5037 DCHECK_EQ(type, Primitive::kPrimDouble);
5038 switch (cond) {
5039 case kCondEQ:
5040 __ CmpEqD(dst, lhs, rhs);
5041 return false;
5042 case kCondNE:
5043 __ CmpEqD(dst, lhs, rhs);
5044 return true;
5045 case kCondLT:
5046 if (gt_bias) {
5047 __ CmpLtD(dst, lhs, rhs);
5048 } else {
5049 __ CmpUltD(dst, lhs, rhs);
5050 }
5051 return false;
5052 case kCondLE:
5053 if (gt_bias) {
5054 __ CmpLeD(dst, lhs, rhs);
5055 } else {
5056 __ CmpUleD(dst, lhs, rhs);
5057 }
5058 return false;
5059 case kCondGT:
5060 if (gt_bias) {
5061 __ CmpUltD(dst, rhs, lhs);
5062 } else {
5063 __ CmpLtD(dst, rhs, lhs);
5064 }
5065 return false;
5066 case kCondGE:
5067 if (gt_bias) {
5068 __ CmpUleD(dst, rhs, lhs);
5069 } else {
5070 __ CmpLeD(dst, rhs, lhs);
5071 }
5072 return false;
5073 default:
5074 LOG(FATAL) << "Unexpected non-floating-point condition";
5075 UNREACHABLE();
5076 }
5077 }
5078}
5079
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005080void InstructionCodeGeneratorMIPS::GenerateFpCompareAndBranch(IfCondition cond,
5081 bool gt_bias,
5082 Primitive::Type type,
5083 LocationSummary* locations,
5084 MipsLabel* label) {
5085 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
5086 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
5087 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
5088 if (type == Primitive::kPrimFloat) {
5089 if (isR6) {
5090 switch (cond) {
5091 case kCondEQ:
5092 __ CmpEqS(FTMP, lhs, rhs);
5093 __ Bc1nez(FTMP, label);
5094 break;
5095 case kCondNE:
5096 __ CmpEqS(FTMP, lhs, rhs);
5097 __ Bc1eqz(FTMP, label);
5098 break;
5099 case kCondLT:
5100 if (gt_bias) {
5101 __ CmpLtS(FTMP, lhs, rhs);
5102 } else {
5103 __ CmpUltS(FTMP, lhs, rhs);
5104 }
5105 __ Bc1nez(FTMP, label);
5106 break;
5107 case kCondLE:
5108 if (gt_bias) {
5109 __ CmpLeS(FTMP, lhs, rhs);
5110 } else {
5111 __ CmpUleS(FTMP, lhs, rhs);
5112 }
5113 __ Bc1nez(FTMP, label);
5114 break;
5115 case kCondGT:
5116 if (gt_bias) {
5117 __ CmpUltS(FTMP, rhs, lhs);
5118 } else {
5119 __ CmpLtS(FTMP, rhs, lhs);
5120 }
5121 __ Bc1nez(FTMP, label);
5122 break;
5123 case kCondGE:
5124 if (gt_bias) {
5125 __ CmpUleS(FTMP, rhs, lhs);
5126 } else {
5127 __ CmpLeS(FTMP, rhs, lhs);
5128 }
5129 __ Bc1nez(FTMP, label);
5130 break;
5131 default:
5132 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005133 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005134 }
5135 } else {
5136 switch (cond) {
5137 case kCondEQ:
5138 __ CeqS(0, lhs, rhs);
5139 __ Bc1t(0, label);
5140 break;
5141 case kCondNE:
5142 __ CeqS(0, lhs, rhs);
5143 __ Bc1f(0, label);
5144 break;
5145 case kCondLT:
5146 if (gt_bias) {
5147 __ ColtS(0, lhs, rhs);
5148 } else {
5149 __ CultS(0, lhs, rhs);
5150 }
5151 __ Bc1t(0, label);
5152 break;
5153 case kCondLE:
5154 if (gt_bias) {
5155 __ ColeS(0, lhs, rhs);
5156 } else {
5157 __ CuleS(0, lhs, rhs);
5158 }
5159 __ Bc1t(0, label);
5160 break;
5161 case kCondGT:
5162 if (gt_bias) {
5163 __ CultS(0, rhs, lhs);
5164 } else {
5165 __ ColtS(0, rhs, lhs);
5166 }
5167 __ Bc1t(0, label);
5168 break;
5169 case kCondGE:
5170 if (gt_bias) {
5171 __ CuleS(0, rhs, lhs);
5172 } else {
5173 __ ColeS(0, rhs, lhs);
5174 }
5175 __ Bc1t(0, label);
5176 break;
5177 default:
5178 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005179 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005180 }
5181 }
5182 } else {
5183 DCHECK_EQ(type, Primitive::kPrimDouble);
5184 if (isR6) {
5185 switch (cond) {
5186 case kCondEQ:
5187 __ CmpEqD(FTMP, lhs, rhs);
5188 __ Bc1nez(FTMP, label);
5189 break;
5190 case kCondNE:
5191 __ CmpEqD(FTMP, lhs, rhs);
5192 __ Bc1eqz(FTMP, label);
5193 break;
5194 case kCondLT:
5195 if (gt_bias) {
5196 __ CmpLtD(FTMP, lhs, rhs);
5197 } else {
5198 __ CmpUltD(FTMP, lhs, rhs);
5199 }
5200 __ Bc1nez(FTMP, label);
5201 break;
5202 case kCondLE:
5203 if (gt_bias) {
5204 __ CmpLeD(FTMP, lhs, rhs);
5205 } else {
5206 __ CmpUleD(FTMP, lhs, rhs);
5207 }
5208 __ Bc1nez(FTMP, label);
5209 break;
5210 case kCondGT:
5211 if (gt_bias) {
5212 __ CmpUltD(FTMP, rhs, lhs);
5213 } else {
5214 __ CmpLtD(FTMP, rhs, lhs);
5215 }
5216 __ Bc1nez(FTMP, label);
5217 break;
5218 case kCondGE:
5219 if (gt_bias) {
5220 __ CmpUleD(FTMP, rhs, lhs);
5221 } else {
5222 __ CmpLeD(FTMP, rhs, lhs);
5223 }
5224 __ Bc1nez(FTMP, label);
5225 break;
5226 default:
5227 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005228 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005229 }
5230 } else {
5231 switch (cond) {
5232 case kCondEQ:
5233 __ CeqD(0, lhs, rhs);
5234 __ Bc1t(0, label);
5235 break;
5236 case kCondNE:
5237 __ CeqD(0, lhs, rhs);
5238 __ Bc1f(0, label);
5239 break;
5240 case kCondLT:
5241 if (gt_bias) {
5242 __ ColtD(0, lhs, rhs);
5243 } else {
5244 __ CultD(0, lhs, rhs);
5245 }
5246 __ Bc1t(0, label);
5247 break;
5248 case kCondLE:
5249 if (gt_bias) {
5250 __ ColeD(0, lhs, rhs);
5251 } else {
5252 __ CuleD(0, lhs, rhs);
5253 }
5254 __ Bc1t(0, label);
5255 break;
5256 case kCondGT:
5257 if (gt_bias) {
5258 __ CultD(0, rhs, lhs);
5259 } else {
5260 __ ColtD(0, rhs, lhs);
5261 }
5262 __ Bc1t(0, label);
5263 break;
5264 case kCondGE:
5265 if (gt_bias) {
5266 __ CuleD(0, rhs, lhs);
5267 } else {
5268 __ ColeD(0, rhs, lhs);
5269 }
5270 __ Bc1t(0, label);
5271 break;
5272 default:
5273 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005274 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005275 }
5276 }
5277 }
5278}
5279
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005280void InstructionCodeGeneratorMIPS::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00005281 size_t condition_input_index,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005282 MipsLabel* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00005283 MipsLabel* false_target) {
5284 HInstruction* cond = instruction->InputAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005285
David Brazdil0debae72015-11-12 18:37:00 +00005286 if (true_target == nullptr && false_target == nullptr) {
5287 // Nothing to do. The code always falls through.
5288 return;
5289 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00005290 // Constant condition, statically compared against "true" (integer value 1).
5291 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00005292 if (true_target != nullptr) {
5293 __ B(true_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005294 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005295 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00005296 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00005297 if (false_target != nullptr) {
5298 __ B(false_target);
5299 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005300 }
David Brazdil0debae72015-11-12 18:37:00 +00005301 return;
5302 }
5303
5304 // The following code generates these patterns:
5305 // (1) true_target == nullptr && false_target != nullptr
5306 // - opposite condition true => branch to false_target
5307 // (2) true_target != nullptr && false_target == nullptr
5308 // - condition true => branch to true_target
5309 // (3) true_target != nullptr && false_target != nullptr
5310 // - condition true => branch to true_target
5311 // - branch to false_target
5312 if (IsBooleanValueOrMaterializedCondition(cond)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005313 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00005314 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005315 DCHECK(cond_val.IsRegister());
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005316 if (true_target == nullptr) {
David Brazdil0debae72015-11-12 18:37:00 +00005317 __ Beqz(cond_val.AsRegister<Register>(), false_target);
5318 } else {
5319 __ Bnez(cond_val.AsRegister<Register>(), true_target);
5320 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005321 } else {
5322 // The condition instruction has not been materialized, use its inputs as
5323 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00005324 HCondition* condition = cond->AsCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005325 Primitive::Type type = condition->InputAt(0)->GetType();
5326 LocationSummary* locations = cond->GetLocations();
5327 IfCondition if_cond = condition->GetCondition();
5328 MipsLabel* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00005329
David Brazdil0debae72015-11-12 18:37:00 +00005330 if (true_target == nullptr) {
5331 if_cond = condition->GetOppositeCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005332 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00005333 }
5334
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005335 switch (type) {
5336 default:
5337 GenerateIntCompareAndBranch(if_cond, locations, branch_target);
5338 break;
5339 case Primitive::kPrimLong:
5340 GenerateLongCompareAndBranch(if_cond, locations, branch_target);
5341 break;
5342 case Primitive::kPrimFloat:
5343 case Primitive::kPrimDouble:
5344 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
5345 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005346 }
5347 }
David Brazdil0debae72015-11-12 18:37:00 +00005348
5349 // If neither branch falls through (case 3), the conditional branch to `true_target`
5350 // was already emitted (case 2) and we need to emit a jump to `false_target`.
5351 if (true_target != nullptr && false_target != nullptr) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005352 __ B(false_target);
5353 }
5354}
5355
5356void LocationsBuilderMIPS::VisitIf(HIf* if_instr) {
5357 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00005358 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005359 locations->SetInAt(0, Location::RequiresRegister());
5360 }
5361}
5362
5363void InstructionCodeGeneratorMIPS::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00005364 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
5365 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
5366 MipsLabel* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
5367 nullptr : codegen_->GetLabelOf(true_successor);
5368 MipsLabel* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
5369 nullptr : codegen_->GetLabelOf(false_successor);
5370 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005371}
5372
5373void LocationsBuilderMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
5374 LocationSummary* locations = new (GetGraph()->GetArena())
5375 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01005376 InvokeRuntimeCallingConvention calling_convention;
5377 RegisterSet caller_saves = RegisterSet::Empty();
5378 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5379 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00005380 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005381 locations->SetInAt(0, Location::RequiresRegister());
5382 }
5383}
5384
5385void InstructionCodeGeneratorMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08005386 SlowPathCodeMIPS* slow_path =
5387 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00005388 GenerateTestAndBranch(deoptimize,
5389 /* condition_input_index */ 0,
5390 slow_path->GetEntryLabel(),
5391 /* false_target */ nullptr);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005392}
5393
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005394// This function returns true if a conditional move can be generated for HSelect.
5395// Otherwise it returns false and HSelect must be implemented in terms of conditonal
5396// branches and regular moves.
5397//
5398// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
5399//
5400// While determining feasibility of a conditional move and setting inputs/outputs
5401// are two distinct tasks, this function does both because they share quite a bit
5402// of common logic.
5403static bool CanMoveConditionally(HSelect* select, bool is_r6, LocationSummary* locations_to_set) {
5404 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
5405 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5406 HCondition* condition = cond->AsCondition();
5407
5408 Primitive::Type cond_type = materialized ? Primitive::kPrimInt : condition->InputAt(0)->GetType();
5409 Primitive::Type dst_type = select->GetType();
5410
5411 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
5412 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
5413 bool is_true_value_zero_constant =
5414 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
5415 bool is_false_value_zero_constant =
5416 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
5417
5418 bool can_move_conditionally = false;
5419 bool use_const_for_false_in = false;
5420 bool use_const_for_true_in = false;
5421
5422 if (!cond->IsConstant()) {
5423 switch (cond_type) {
5424 default:
5425 switch (dst_type) {
5426 default:
5427 // Moving int on int condition.
5428 if (is_r6) {
5429 if (is_true_value_zero_constant) {
5430 // seleqz out_reg, false_reg, cond_reg
5431 can_move_conditionally = true;
5432 use_const_for_true_in = true;
5433 } else if (is_false_value_zero_constant) {
5434 // selnez out_reg, true_reg, cond_reg
5435 can_move_conditionally = true;
5436 use_const_for_false_in = true;
5437 } else if (materialized) {
5438 // Not materializing unmaterialized int conditions
5439 // to keep the instruction count low.
5440 // selnez AT, true_reg, cond_reg
5441 // seleqz TMP, false_reg, cond_reg
5442 // or out_reg, AT, TMP
5443 can_move_conditionally = true;
5444 }
5445 } else {
5446 // movn out_reg, true_reg/ZERO, cond_reg
5447 can_move_conditionally = true;
5448 use_const_for_true_in = is_true_value_zero_constant;
5449 }
5450 break;
5451 case Primitive::kPrimLong:
5452 // Moving long on int condition.
5453 if (is_r6) {
5454 if (is_true_value_zero_constant) {
5455 // seleqz out_reg_lo, false_reg_lo, cond_reg
5456 // seleqz out_reg_hi, false_reg_hi, cond_reg
5457 can_move_conditionally = true;
5458 use_const_for_true_in = true;
5459 } else if (is_false_value_zero_constant) {
5460 // selnez out_reg_lo, true_reg_lo, cond_reg
5461 // selnez out_reg_hi, true_reg_hi, cond_reg
5462 can_move_conditionally = true;
5463 use_const_for_false_in = true;
5464 }
5465 // Other long conditional moves would generate 6+ instructions,
5466 // which is too many.
5467 } else {
5468 // movn out_reg_lo, true_reg_lo/ZERO, cond_reg
5469 // movn out_reg_hi, true_reg_hi/ZERO, cond_reg
5470 can_move_conditionally = true;
5471 use_const_for_true_in = is_true_value_zero_constant;
5472 }
5473 break;
5474 case Primitive::kPrimFloat:
5475 case Primitive::kPrimDouble:
5476 // Moving float/double on int condition.
5477 if (is_r6) {
5478 if (materialized) {
5479 // Not materializing unmaterialized int conditions
5480 // to keep the instruction count low.
5481 can_move_conditionally = true;
5482 if (is_true_value_zero_constant) {
5483 // sltu TMP, ZERO, cond_reg
5484 // mtc1 TMP, temp_cond_reg
5485 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5486 use_const_for_true_in = true;
5487 } else if (is_false_value_zero_constant) {
5488 // sltu TMP, ZERO, cond_reg
5489 // mtc1 TMP, temp_cond_reg
5490 // selnez.fmt out_reg, true_reg, temp_cond_reg
5491 use_const_for_false_in = true;
5492 } else {
5493 // sltu TMP, ZERO, cond_reg
5494 // mtc1 TMP, temp_cond_reg
5495 // sel.fmt temp_cond_reg, false_reg, true_reg
5496 // mov.fmt out_reg, temp_cond_reg
5497 }
5498 }
5499 } else {
5500 // movn.fmt out_reg, true_reg, cond_reg
5501 can_move_conditionally = true;
5502 }
5503 break;
5504 }
5505 break;
5506 case Primitive::kPrimLong:
5507 // We don't materialize long comparison now
5508 // and use conditional branches instead.
5509 break;
5510 case Primitive::kPrimFloat:
5511 case Primitive::kPrimDouble:
5512 switch (dst_type) {
5513 default:
5514 // Moving int on float/double condition.
5515 if (is_r6) {
5516 if (is_true_value_zero_constant) {
5517 // mfc1 TMP, temp_cond_reg
5518 // seleqz out_reg, false_reg, TMP
5519 can_move_conditionally = true;
5520 use_const_for_true_in = true;
5521 } else if (is_false_value_zero_constant) {
5522 // mfc1 TMP, temp_cond_reg
5523 // selnez out_reg, true_reg, TMP
5524 can_move_conditionally = true;
5525 use_const_for_false_in = true;
5526 } else {
5527 // mfc1 TMP, temp_cond_reg
5528 // selnez AT, true_reg, TMP
5529 // seleqz TMP, false_reg, TMP
5530 // or out_reg, AT, TMP
5531 can_move_conditionally = true;
5532 }
5533 } else {
5534 // movt out_reg, true_reg/ZERO, cc
5535 can_move_conditionally = true;
5536 use_const_for_true_in = is_true_value_zero_constant;
5537 }
5538 break;
5539 case Primitive::kPrimLong:
5540 // Moving long on float/double condition.
5541 if (is_r6) {
5542 if (is_true_value_zero_constant) {
5543 // mfc1 TMP, temp_cond_reg
5544 // seleqz out_reg_lo, false_reg_lo, TMP
5545 // seleqz out_reg_hi, false_reg_hi, TMP
5546 can_move_conditionally = true;
5547 use_const_for_true_in = true;
5548 } else if (is_false_value_zero_constant) {
5549 // mfc1 TMP, temp_cond_reg
5550 // selnez out_reg_lo, true_reg_lo, TMP
5551 // selnez out_reg_hi, true_reg_hi, TMP
5552 can_move_conditionally = true;
5553 use_const_for_false_in = true;
5554 }
5555 // Other long conditional moves would generate 6+ instructions,
5556 // which is too many.
5557 } else {
5558 // movt out_reg_lo, true_reg_lo/ZERO, cc
5559 // movt out_reg_hi, true_reg_hi/ZERO, cc
5560 can_move_conditionally = true;
5561 use_const_for_true_in = is_true_value_zero_constant;
5562 }
5563 break;
5564 case Primitive::kPrimFloat:
5565 case Primitive::kPrimDouble:
5566 // Moving float/double on float/double condition.
5567 if (is_r6) {
5568 can_move_conditionally = true;
5569 if (is_true_value_zero_constant) {
5570 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5571 use_const_for_true_in = true;
5572 } else if (is_false_value_zero_constant) {
5573 // selnez.fmt out_reg, true_reg, temp_cond_reg
5574 use_const_for_false_in = true;
5575 } else {
5576 // sel.fmt temp_cond_reg, false_reg, true_reg
5577 // mov.fmt out_reg, temp_cond_reg
5578 }
5579 } else {
5580 // movt.fmt out_reg, true_reg, cc
5581 can_move_conditionally = true;
5582 }
5583 break;
5584 }
5585 break;
5586 }
5587 }
5588
5589 if (can_move_conditionally) {
5590 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
5591 } else {
5592 DCHECK(!use_const_for_false_in);
5593 DCHECK(!use_const_for_true_in);
5594 }
5595
5596 if (locations_to_set != nullptr) {
5597 if (use_const_for_false_in) {
5598 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
5599 } else {
5600 locations_to_set->SetInAt(0,
5601 Primitive::IsFloatingPointType(dst_type)
5602 ? Location::RequiresFpuRegister()
5603 : Location::RequiresRegister());
5604 }
5605 if (use_const_for_true_in) {
5606 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
5607 } else {
5608 locations_to_set->SetInAt(1,
5609 Primitive::IsFloatingPointType(dst_type)
5610 ? Location::RequiresFpuRegister()
5611 : Location::RequiresRegister());
5612 }
5613 if (materialized) {
5614 locations_to_set->SetInAt(2, Location::RequiresRegister());
5615 }
5616 // On R6 we don't require the output to be the same as the
5617 // first input for conditional moves unlike on R2.
5618 bool is_out_same_as_first_in = !can_move_conditionally || !is_r6;
5619 if (is_out_same_as_first_in) {
5620 locations_to_set->SetOut(Location::SameAsFirstInput());
5621 } else {
5622 locations_to_set->SetOut(Primitive::IsFloatingPointType(dst_type)
5623 ? Location::RequiresFpuRegister()
5624 : Location::RequiresRegister());
5625 }
5626 }
5627
5628 return can_move_conditionally;
5629}
5630
5631void InstructionCodeGeneratorMIPS::GenConditionalMoveR2(HSelect* select) {
5632 LocationSummary* locations = select->GetLocations();
5633 Location dst = locations->Out();
5634 Location src = locations->InAt(1);
5635 Register src_reg = ZERO;
5636 Register src_reg_high = ZERO;
5637 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5638 Register cond_reg = TMP;
5639 int cond_cc = 0;
5640 Primitive::Type cond_type = Primitive::kPrimInt;
5641 bool cond_inverted = false;
5642 Primitive::Type dst_type = select->GetType();
5643
5644 if (IsBooleanValueOrMaterializedCondition(cond)) {
5645 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5646 } else {
5647 HCondition* condition = cond->AsCondition();
5648 LocationSummary* cond_locations = cond->GetLocations();
5649 IfCondition if_cond = condition->GetCondition();
5650 cond_type = condition->InputAt(0)->GetType();
5651 switch (cond_type) {
5652 default:
5653 DCHECK_NE(cond_type, Primitive::kPrimLong);
5654 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5655 break;
5656 case Primitive::kPrimFloat:
5657 case Primitive::kPrimDouble:
5658 cond_inverted = MaterializeFpCompareR2(if_cond,
5659 condition->IsGtBias(),
5660 cond_type,
5661 cond_locations,
5662 cond_cc);
5663 break;
5664 }
5665 }
5666
5667 DCHECK(dst.Equals(locations->InAt(0)));
5668 if (src.IsRegister()) {
5669 src_reg = src.AsRegister<Register>();
5670 } else if (src.IsRegisterPair()) {
5671 src_reg = src.AsRegisterPairLow<Register>();
5672 src_reg_high = src.AsRegisterPairHigh<Register>();
5673 } else if (src.IsConstant()) {
5674 DCHECK(src.GetConstant()->IsZeroBitPattern());
5675 }
5676
5677 switch (cond_type) {
5678 default:
5679 switch (dst_type) {
5680 default:
5681 if (cond_inverted) {
5682 __ Movz(dst.AsRegister<Register>(), src_reg, cond_reg);
5683 } else {
5684 __ Movn(dst.AsRegister<Register>(), src_reg, cond_reg);
5685 }
5686 break;
5687 case Primitive::kPrimLong:
5688 if (cond_inverted) {
5689 __ Movz(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5690 __ Movz(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5691 } else {
5692 __ Movn(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5693 __ Movn(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5694 }
5695 break;
5696 case Primitive::kPrimFloat:
5697 if (cond_inverted) {
5698 __ MovzS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5699 } else {
5700 __ MovnS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5701 }
5702 break;
5703 case Primitive::kPrimDouble:
5704 if (cond_inverted) {
5705 __ MovzD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5706 } else {
5707 __ MovnD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5708 }
5709 break;
5710 }
5711 break;
5712 case Primitive::kPrimLong:
5713 LOG(FATAL) << "Unreachable";
5714 UNREACHABLE();
5715 case Primitive::kPrimFloat:
5716 case Primitive::kPrimDouble:
5717 switch (dst_type) {
5718 default:
5719 if (cond_inverted) {
5720 __ Movf(dst.AsRegister<Register>(), src_reg, cond_cc);
5721 } else {
5722 __ Movt(dst.AsRegister<Register>(), src_reg, cond_cc);
5723 }
5724 break;
5725 case Primitive::kPrimLong:
5726 if (cond_inverted) {
5727 __ Movf(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5728 __ Movf(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5729 } else {
5730 __ Movt(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5731 __ Movt(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5732 }
5733 break;
5734 case Primitive::kPrimFloat:
5735 if (cond_inverted) {
5736 __ MovfS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5737 } else {
5738 __ MovtS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5739 }
5740 break;
5741 case Primitive::kPrimDouble:
5742 if (cond_inverted) {
5743 __ MovfD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5744 } else {
5745 __ MovtD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5746 }
5747 break;
5748 }
5749 break;
5750 }
5751}
5752
5753void InstructionCodeGeneratorMIPS::GenConditionalMoveR6(HSelect* select) {
5754 LocationSummary* locations = select->GetLocations();
5755 Location dst = locations->Out();
5756 Location false_src = locations->InAt(0);
5757 Location true_src = locations->InAt(1);
5758 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5759 Register cond_reg = TMP;
5760 FRegister fcond_reg = FTMP;
5761 Primitive::Type cond_type = Primitive::kPrimInt;
5762 bool cond_inverted = false;
5763 Primitive::Type dst_type = select->GetType();
5764
5765 if (IsBooleanValueOrMaterializedCondition(cond)) {
5766 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5767 } else {
5768 HCondition* condition = cond->AsCondition();
5769 LocationSummary* cond_locations = cond->GetLocations();
5770 IfCondition if_cond = condition->GetCondition();
5771 cond_type = condition->InputAt(0)->GetType();
5772 switch (cond_type) {
5773 default:
5774 DCHECK_NE(cond_type, Primitive::kPrimLong);
5775 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5776 break;
5777 case Primitive::kPrimFloat:
5778 case Primitive::kPrimDouble:
5779 cond_inverted = MaterializeFpCompareR6(if_cond,
5780 condition->IsGtBias(),
5781 cond_type,
5782 cond_locations,
5783 fcond_reg);
5784 break;
5785 }
5786 }
5787
5788 if (true_src.IsConstant()) {
5789 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
5790 }
5791 if (false_src.IsConstant()) {
5792 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
5793 }
5794
5795 switch (dst_type) {
5796 default:
5797 if (Primitive::IsFloatingPointType(cond_type)) {
5798 __ Mfc1(cond_reg, fcond_reg);
5799 }
5800 if (true_src.IsConstant()) {
5801 if (cond_inverted) {
5802 __ Selnez(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5803 } else {
5804 __ Seleqz(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5805 }
5806 } else if (false_src.IsConstant()) {
5807 if (cond_inverted) {
5808 __ Seleqz(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5809 } else {
5810 __ Selnez(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5811 }
5812 } else {
5813 DCHECK_NE(cond_reg, AT);
5814 if (cond_inverted) {
5815 __ Seleqz(AT, true_src.AsRegister<Register>(), cond_reg);
5816 __ Selnez(TMP, false_src.AsRegister<Register>(), cond_reg);
5817 } else {
5818 __ Selnez(AT, true_src.AsRegister<Register>(), cond_reg);
5819 __ Seleqz(TMP, false_src.AsRegister<Register>(), cond_reg);
5820 }
5821 __ Or(dst.AsRegister<Register>(), AT, TMP);
5822 }
5823 break;
5824 case Primitive::kPrimLong: {
5825 if (Primitive::IsFloatingPointType(cond_type)) {
5826 __ Mfc1(cond_reg, fcond_reg);
5827 }
5828 Register dst_lo = dst.AsRegisterPairLow<Register>();
5829 Register dst_hi = dst.AsRegisterPairHigh<Register>();
5830 if (true_src.IsConstant()) {
5831 Register src_lo = false_src.AsRegisterPairLow<Register>();
5832 Register src_hi = false_src.AsRegisterPairHigh<Register>();
5833 if (cond_inverted) {
5834 __ Selnez(dst_lo, src_lo, cond_reg);
5835 __ Selnez(dst_hi, src_hi, cond_reg);
5836 } else {
5837 __ Seleqz(dst_lo, src_lo, cond_reg);
5838 __ Seleqz(dst_hi, src_hi, cond_reg);
5839 }
5840 } else {
5841 DCHECK(false_src.IsConstant());
5842 Register src_lo = true_src.AsRegisterPairLow<Register>();
5843 Register src_hi = true_src.AsRegisterPairHigh<Register>();
5844 if (cond_inverted) {
5845 __ Seleqz(dst_lo, src_lo, cond_reg);
5846 __ Seleqz(dst_hi, src_hi, cond_reg);
5847 } else {
5848 __ Selnez(dst_lo, src_lo, cond_reg);
5849 __ Selnez(dst_hi, src_hi, cond_reg);
5850 }
5851 }
5852 break;
5853 }
5854 case Primitive::kPrimFloat: {
5855 if (!Primitive::IsFloatingPointType(cond_type)) {
5856 // sel*.fmt tests bit 0 of the condition register, account for that.
5857 __ Sltu(TMP, ZERO, cond_reg);
5858 __ Mtc1(TMP, fcond_reg);
5859 }
5860 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
5861 if (true_src.IsConstant()) {
5862 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
5863 if (cond_inverted) {
5864 __ SelnezS(dst_reg, src_reg, fcond_reg);
5865 } else {
5866 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5867 }
5868 } else if (false_src.IsConstant()) {
5869 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
5870 if (cond_inverted) {
5871 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5872 } else {
5873 __ SelnezS(dst_reg, src_reg, fcond_reg);
5874 }
5875 } else {
5876 if (cond_inverted) {
5877 __ SelS(fcond_reg,
5878 true_src.AsFpuRegister<FRegister>(),
5879 false_src.AsFpuRegister<FRegister>());
5880 } else {
5881 __ SelS(fcond_reg,
5882 false_src.AsFpuRegister<FRegister>(),
5883 true_src.AsFpuRegister<FRegister>());
5884 }
5885 __ MovS(dst_reg, fcond_reg);
5886 }
5887 break;
5888 }
5889 case Primitive::kPrimDouble: {
5890 if (!Primitive::IsFloatingPointType(cond_type)) {
5891 // sel*.fmt tests bit 0 of the condition register, account for that.
5892 __ Sltu(TMP, ZERO, cond_reg);
5893 __ Mtc1(TMP, fcond_reg);
5894 }
5895 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
5896 if (true_src.IsConstant()) {
5897 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
5898 if (cond_inverted) {
5899 __ SelnezD(dst_reg, src_reg, fcond_reg);
5900 } else {
5901 __ SeleqzD(dst_reg, src_reg, fcond_reg);
5902 }
5903 } else if (false_src.IsConstant()) {
5904 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
5905 if (cond_inverted) {
5906 __ SeleqzD(dst_reg, src_reg, fcond_reg);
5907 } else {
5908 __ SelnezD(dst_reg, src_reg, fcond_reg);
5909 }
5910 } else {
5911 if (cond_inverted) {
5912 __ SelD(fcond_reg,
5913 true_src.AsFpuRegister<FRegister>(),
5914 false_src.AsFpuRegister<FRegister>());
5915 } else {
5916 __ SelD(fcond_reg,
5917 false_src.AsFpuRegister<FRegister>(),
5918 true_src.AsFpuRegister<FRegister>());
5919 }
5920 __ MovD(dst_reg, fcond_reg);
5921 }
5922 break;
5923 }
5924 }
5925}
5926
Goran Jakovljevicc6418422016-12-05 16:31:55 +01005927void LocationsBuilderMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
5928 LocationSummary* locations = new (GetGraph()->GetArena())
5929 LocationSummary(flag, LocationSummary::kNoCall);
5930 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07005931}
5932
Goran Jakovljevicc6418422016-12-05 16:31:55 +01005933void InstructionCodeGeneratorMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
5934 __ LoadFromOffset(kLoadWord,
5935 flag->GetLocations()->Out().AsRegister<Register>(),
5936 SP,
5937 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07005938}
5939
David Brazdil74eb1b22015-12-14 11:44:01 +00005940void LocationsBuilderMIPS::VisitSelect(HSelect* select) {
5941 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005942 CanMoveConditionally(select, codegen_->GetInstructionSetFeatures().IsR6(), locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00005943}
5944
5945void InstructionCodeGeneratorMIPS::VisitSelect(HSelect* select) {
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005946 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
5947 if (CanMoveConditionally(select, is_r6, /* locations_to_set */ nullptr)) {
5948 if (is_r6) {
5949 GenConditionalMoveR6(select);
5950 } else {
5951 GenConditionalMoveR2(select);
5952 }
5953 } else {
5954 LocationSummary* locations = select->GetLocations();
5955 MipsLabel false_target;
5956 GenerateTestAndBranch(select,
5957 /* condition_input_index */ 2,
5958 /* true_target */ nullptr,
5959 &false_target);
5960 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
5961 __ Bind(&false_target);
5962 }
David Brazdil74eb1b22015-12-14 11:44:01 +00005963}
5964
David Srbecky0cf44932015-12-09 14:09:59 +00005965void LocationsBuilderMIPS::VisitNativeDebugInfo(HNativeDebugInfo* info) {
5966 new (GetGraph()->GetArena()) LocationSummary(info);
5967}
5968
David Srbeckyd28f4a02016-03-14 17:14:24 +00005969void InstructionCodeGeneratorMIPS::VisitNativeDebugInfo(HNativeDebugInfo*) {
5970 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00005971}
5972
5973void CodeGeneratorMIPS::GenerateNop() {
5974 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00005975}
5976
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005977void LocationsBuilderMIPS::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
5978 Primitive::Type field_type = field_info.GetFieldType();
5979 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
5980 bool generate_volatile = field_info.IsVolatile() && is_wide;
Alexey Frunze15958152017-02-09 19:08:30 -08005981 bool object_field_get_with_read_barrier =
5982 kEmitCompilerReadBarrier && (field_type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005983 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08005984 instruction,
5985 generate_volatile
5986 ? LocationSummary::kCallOnMainOnly
5987 : (object_field_get_with_read_barrier
5988 ? LocationSummary::kCallOnSlowPath
5989 : LocationSummary::kNoCall));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005990
Alexey Frunzec61c0762017-04-10 13:54:23 -07005991 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
5992 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5993 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005994 locations->SetInAt(0, Location::RequiresRegister());
5995 if (generate_volatile) {
5996 InvokeRuntimeCallingConvention calling_convention;
5997 // need A0 to hold base + offset
5998 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5999 if (field_type == Primitive::kPrimLong) {
6000 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimLong));
6001 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006002 // Use Location::Any() to prevent situations when running out of available fp registers.
6003 locations->SetOut(Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006004 // Need some temp core regs since FP results are returned in core registers
6005 Location reg = calling_convention.GetReturnLocation(Primitive::kPrimLong);
6006 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairLow<Register>()));
6007 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairHigh<Register>()));
6008 }
6009 } else {
6010 if (Primitive::IsFloatingPointType(instruction->GetType())) {
6011 locations->SetOut(Location::RequiresFpuRegister());
6012 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006013 // The output overlaps in the case of an object field get with
6014 // read barriers enabled: we do not want the move to overwrite the
6015 // object's location, as we need it to emit the read barrier.
6016 locations->SetOut(Location::RequiresRegister(),
6017 object_field_get_with_read_barrier
6018 ? Location::kOutputOverlap
6019 : Location::kNoOutputOverlap);
6020 }
6021 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6022 // We need a temporary register for the read barrier marking slow
6023 // path in CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier.
6024 locations->AddTemp(Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006025 }
6026 }
6027}
6028
6029void InstructionCodeGeneratorMIPS::HandleFieldGet(HInstruction* instruction,
6030 const FieldInfo& field_info,
6031 uint32_t dex_pc) {
6032 Primitive::Type type = field_info.GetFieldType();
6033 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08006034 Location obj_loc = locations->InAt(0);
6035 Register obj = obj_loc.AsRegister<Register>();
6036 Location dst_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006037 LoadOperandType load_type = kLoadUnsignedByte;
6038 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006039 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006040 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006041
6042 switch (type) {
6043 case Primitive::kPrimBoolean:
6044 load_type = kLoadUnsignedByte;
6045 break;
6046 case Primitive::kPrimByte:
6047 load_type = kLoadSignedByte;
6048 break;
6049 case Primitive::kPrimShort:
6050 load_type = kLoadSignedHalfword;
6051 break;
6052 case Primitive::kPrimChar:
6053 load_type = kLoadUnsignedHalfword;
6054 break;
6055 case Primitive::kPrimInt:
6056 case Primitive::kPrimFloat:
6057 case Primitive::kPrimNot:
6058 load_type = kLoadWord;
6059 break;
6060 case Primitive::kPrimLong:
6061 case Primitive::kPrimDouble:
6062 load_type = kLoadDoubleword;
6063 break;
6064 case Primitive::kPrimVoid:
6065 LOG(FATAL) << "Unreachable type " << type;
6066 UNREACHABLE();
6067 }
6068
6069 if (is_volatile && load_type == kLoadDoubleword) {
6070 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006071 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006072 // Do implicit Null check
6073 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6074 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
Serban Constantinescufca16662016-07-14 09:21:59 +01006075 codegen_->InvokeRuntime(kQuickA64Load, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006076 CheckEntrypointTypes<kQuickA64Load, int64_t, volatile const int64_t*>();
6077 if (type == Primitive::kPrimDouble) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006078 // FP results are returned in core registers. Need to move them.
Alexey Frunze15958152017-02-09 19:08:30 -08006079 if (dst_loc.IsFpuRegister()) {
6080 __ Mtc1(locations->GetTemp(1).AsRegister<Register>(), dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006081 __ MoveToFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunze15958152017-02-09 19:08:30 -08006082 dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006083 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006084 DCHECK(dst_loc.IsDoubleStackSlot());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006085 __ StoreToOffset(kStoreWord,
6086 locations->GetTemp(1).AsRegister<Register>(),
6087 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006088 dst_loc.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006089 __ StoreToOffset(kStoreWord,
6090 locations->GetTemp(2).AsRegister<Register>(),
6091 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006092 dst_loc.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006093 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006094 }
6095 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006096 if (type == Primitive::kPrimNot) {
6097 // /* HeapReference<Object> */ dst = *(obj + offset)
6098 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
6099 Location temp_loc = locations->GetTemp(0);
6100 // Note that a potential implicit null check is handled in this
6101 // CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier call.
6102 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6103 dst_loc,
6104 obj,
6105 offset,
6106 temp_loc,
6107 /* needs_null_check */ true);
6108 if (is_volatile) {
6109 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6110 }
6111 } else {
6112 __ LoadFromOffset(kLoadWord, dst_loc.AsRegister<Register>(), obj, offset, null_checker);
6113 if (is_volatile) {
6114 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6115 }
6116 // If read barriers are enabled, emit read barriers other than
6117 // Baker's using a slow path (and also unpoison the loaded
6118 // reference, if heap poisoning is enabled).
6119 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
6120 }
6121 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006122 Register dst;
6123 if (type == Primitive::kPrimLong) {
Alexey Frunze15958152017-02-09 19:08:30 -08006124 DCHECK(dst_loc.IsRegisterPair());
6125 dst = dst_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006126 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006127 DCHECK(dst_loc.IsRegister());
6128 dst = dst_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006129 }
Alexey Frunze2923db72016-08-20 01:55:47 -07006130 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006131 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006132 DCHECK(dst_loc.IsFpuRegister());
6133 FRegister dst = dst_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006134 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006135 __ LoadSFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006136 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006137 __ LoadDFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006138 }
6139 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006140 }
6141
Alexey Frunze15958152017-02-09 19:08:30 -08006142 // Memory barriers, in the case of references, are handled in the
6143 // previous switch statement.
6144 if (is_volatile && (type != Primitive::kPrimNot)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006145 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6146 }
6147}
6148
6149void LocationsBuilderMIPS::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
6150 Primitive::Type field_type = field_info.GetFieldType();
6151 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6152 bool generate_volatile = field_info.IsVolatile() && is_wide;
6153 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006154 instruction, generate_volatile ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006155
6156 locations->SetInAt(0, Location::RequiresRegister());
6157 if (generate_volatile) {
6158 InvokeRuntimeCallingConvention calling_convention;
6159 // need A0 to hold base + offset
6160 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6161 if (field_type == Primitive::kPrimLong) {
6162 locations->SetInAt(1, Location::RegisterPairLocation(
6163 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
6164 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006165 // Use Location::Any() to prevent situations when running out of available fp registers.
6166 locations->SetInAt(1, Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006167 // Pass FP parameters in core registers.
6168 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
6169 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
6170 }
6171 } else {
6172 if (Primitive::IsFloatingPointType(field_type)) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006173 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006174 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006175 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006176 }
6177 }
6178}
6179
6180void InstructionCodeGeneratorMIPS::HandleFieldSet(HInstruction* instruction,
6181 const FieldInfo& field_info,
Goran Jakovljevice114da22016-12-26 14:21:43 +01006182 uint32_t dex_pc,
6183 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006184 Primitive::Type type = field_info.GetFieldType();
6185 LocationSummary* locations = instruction->GetLocations();
6186 Register obj = locations->InAt(0).AsRegister<Register>();
Alexey Frunzef58b2482016-09-02 22:14:06 -07006187 Location value_location = locations->InAt(1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006188 StoreOperandType store_type = kStoreByte;
6189 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006190 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Alexey Frunzec061de12017-02-14 13:27:23 -08006191 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006192 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006193
6194 switch (type) {
6195 case Primitive::kPrimBoolean:
6196 case Primitive::kPrimByte:
6197 store_type = kStoreByte;
6198 break;
6199 case Primitive::kPrimShort:
6200 case Primitive::kPrimChar:
6201 store_type = kStoreHalfword;
6202 break;
6203 case Primitive::kPrimInt:
6204 case Primitive::kPrimFloat:
6205 case Primitive::kPrimNot:
6206 store_type = kStoreWord;
6207 break;
6208 case Primitive::kPrimLong:
6209 case Primitive::kPrimDouble:
6210 store_type = kStoreDoubleword;
6211 break;
6212 case Primitive::kPrimVoid:
6213 LOG(FATAL) << "Unreachable type " << type;
6214 UNREACHABLE();
6215 }
6216
6217 if (is_volatile) {
6218 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
6219 }
6220
6221 if (is_volatile && store_type == kStoreDoubleword) {
6222 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006223 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006224 // Do implicit Null check.
6225 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6226 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6227 if (type == Primitive::kPrimDouble) {
6228 // Pass FP parameters in core registers.
Alexey Frunzef58b2482016-09-02 22:14:06 -07006229 if (value_location.IsFpuRegister()) {
6230 __ Mfc1(locations->GetTemp(1).AsRegister<Register>(),
6231 value_location.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006232 __ MoveFromFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunzef58b2482016-09-02 22:14:06 -07006233 value_location.AsFpuRegister<FRegister>());
6234 } else if (value_location.IsDoubleStackSlot()) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006235 __ LoadFromOffset(kLoadWord,
6236 locations->GetTemp(1).AsRegister<Register>(),
6237 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006238 value_location.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006239 __ LoadFromOffset(kLoadWord,
6240 locations->GetTemp(2).AsRegister<Register>(),
6241 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006242 value_location.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006243 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006244 DCHECK(value_location.IsConstant());
6245 DCHECK(value_location.GetConstant()->IsDoubleConstant());
6246 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006247 __ LoadConst64(locations->GetTemp(2).AsRegister<Register>(),
6248 locations->GetTemp(1).AsRegister<Register>(),
6249 value);
6250 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006251 }
Serban Constantinescufca16662016-07-14 09:21:59 +01006252 codegen_->InvokeRuntime(kQuickA64Store, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006253 CheckEntrypointTypes<kQuickA64Store, void, volatile int64_t *, int64_t>();
6254 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006255 if (value_location.IsConstant()) {
6256 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
6257 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
6258 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006259 Register src;
6260 if (type == Primitive::kPrimLong) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006261 src = value_location.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006262 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006263 src = value_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006264 }
Alexey Frunzec061de12017-02-14 13:27:23 -08006265 if (kPoisonHeapReferences && needs_write_barrier) {
6266 // Note that in the case where `value` is a null reference,
6267 // we do not enter this block, as a null reference does not
6268 // need poisoning.
6269 DCHECK_EQ(type, Primitive::kPrimNot);
6270 __ PoisonHeapReference(TMP, src);
6271 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
6272 } else {
6273 __ StoreToOffset(store_type, src, obj, offset, null_checker);
6274 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006275 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006276 FRegister src = value_location.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006277 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006278 __ StoreSToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006279 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006280 __ StoreDToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006281 }
6282 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006283 }
6284
Alexey Frunzec061de12017-02-14 13:27:23 -08006285 if (needs_write_barrier) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006286 Register src = value_location.AsRegister<Register>();
Goran Jakovljevice114da22016-12-26 14:21:43 +01006287 codegen_->MarkGCCard(obj, src, value_can_be_null);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006288 }
6289
6290 if (is_volatile) {
6291 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
6292 }
6293}
6294
6295void LocationsBuilderMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6296 HandleFieldGet(instruction, instruction->GetFieldInfo());
6297}
6298
6299void InstructionCodeGeneratorMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6300 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
6301}
6302
6303void LocationsBuilderMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
6304 HandleFieldSet(instruction, instruction->GetFieldInfo());
6305}
6306
6307void InstructionCodeGeneratorMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01006308 HandleFieldSet(instruction,
6309 instruction->GetFieldInfo(),
6310 instruction->GetDexPc(),
6311 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006312}
6313
Alexey Frunze15958152017-02-09 19:08:30 -08006314void InstructionCodeGeneratorMIPS::GenerateReferenceLoadOneRegister(
6315 HInstruction* instruction,
6316 Location out,
6317 uint32_t offset,
6318 Location maybe_temp,
6319 ReadBarrierOption read_barrier_option) {
6320 Register out_reg = out.AsRegister<Register>();
6321 if (read_barrier_option == kWithReadBarrier) {
6322 CHECK(kEmitCompilerReadBarrier);
6323 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6324 if (kUseBakerReadBarrier) {
6325 // Load with fast path based Baker's read barrier.
6326 // /* HeapReference<Object> */ out = *(out + offset)
6327 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6328 out,
6329 out_reg,
6330 offset,
6331 maybe_temp,
6332 /* needs_null_check */ false);
6333 } else {
6334 // Load with slow path based read barrier.
6335 // Save the value of `out` into `maybe_temp` before overwriting it
6336 // in the following move operation, as we will need it for the
6337 // read barrier below.
6338 __ Move(maybe_temp.AsRegister<Register>(), out_reg);
6339 // /* HeapReference<Object> */ out = *(out + offset)
6340 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6341 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6342 }
6343 } else {
6344 // Plain load with no read barrier.
6345 // /* HeapReference<Object> */ out = *(out + offset)
6346 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6347 __ MaybeUnpoisonHeapReference(out_reg);
6348 }
6349}
6350
6351void InstructionCodeGeneratorMIPS::GenerateReferenceLoadTwoRegisters(
6352 HInstruction* instruction,
6353 Location out,
6354 Location obj,
6355 uint32_t offset,
6356 Location maybe_temp,
6357 ReadBarrierOption read_barrier_option) {
6358 Register out_reg = out.AsRegister<Register>();
6359 Register obj_reg = obj.AsRegister<Register>();
6360 if (read_barrier_option == kWithReadBarrier) {
6361 CHECK(kEmitCompilerReadBarrier);
6362 if (kUseBakerReadBarrier) {
6363 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6364 // Load with fast path based Baker's read barrier.
6365 // /* HeapReference<Object> */ out = *(obj + offset)
6366 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6367 out,
6368 obj_reg,
6369 offset,
6370 maybe_temp,
6371 /* needs_null_check */ false);
6372 } else {
6373 // Load with slow path based read barrier.
6374 // /* HeapReference<Object> */ out = *(obj + offset)
6375 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6376 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6377 }
6378 } else {
6379 // Plain load with no read barrier.
6380 // /* HeapReference<Object> */ out = *(obj + offset)
6381 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6382 __ MaybeUnpoisonHeapReference(out_reg);
6383 }
6384}
6385
6386void InstructionCodeGeneratorMIPS::GenerateGcRootFieldLoad(HInstruction* instruction,
6387 Location root,
6388 Register obj,
6389 uint32_t offset,
6390 ReadBarrierOption read_barrier_option) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07006391 Register root_reg = root.AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08006392 if (read_barrier_option == kWithReadBarrier) {
6393 DCHECK(kEmitCompilerReadBarrier);
6394 if (kUseBakerReadBarrier) {
6395 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6396 // Baker's read barrier are used:
6397 //
6398 // root = obj.field;
6399 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6400 // if (temp != null) {
6401 // root = temp(root)
6402 // }
6403
6404 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6405 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6406 static_assert(
6407 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6408 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6409 "have different sizes.");
6410 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6411 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6412 "have different sizes.");
6413
6414 // Slow path marking the GC root `root`.
6415 Location temp = Location::RegisterLocation(T9);
6416 SlowPathCodeMIPS* slow_path =
6417 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(
6418 instruction,
6419 root,
6420 /*entrypoint*/ temp);
6421 codegen_->AddSlowPath(slow_path);
6422
6423 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6424 const int32_t entry_point_offset =
6425 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(root.reg() - 1);
6426 // Loading the entrypoint does not require a load acquire since it is only changed when
6427 // threads are suspended or running a checkpoint.
6428 __ LoadFromOffset(kLoadWord, temp.AsRegister<Register>(), TR, entry_point_offset);
6429 // The entrypoint is null when the GC is not marking, this prevents one load compared to
6430 // checking GetIsGcMarking.
6431 __ Bnez(temp.AsRegister<Register>(), slow_path->GetEntryLabel());
6432 __ Bind(slow_path->GetExitLabel());
6433 } else {
6434 // GC root loaded through a slow path for read barriers other
6435 // than Baker's.
6436 // /* GcRoot<mirror::Object>* */ root = obj + offset
6437 __ Addiu32(root_reg, obj, offset);
6438 // /* mirror::Object* */ root = root->Read()
6439 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6440 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006441 } else {
6442 // Plain GC root load with no read barrier.
6443 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6444 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6445 // Note that GC roots are not affected by heap poisoning, thus we
6446 // do not have to unpoison `root_reg` here.
6447 }
6448}
6449
Alexey Frunze15958152017-02-09 19:08:30 -08006450void CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6451 Location ref,
6452 Register obj,
6453 uint32_t offset,
6454 Location temp,
6455 bool needs_null_check) {
6456 DCHECK(kEmitCompilerReadBarrier);
6457 DCHECK(kUseBakerReadBarrier);
6458
6459 // /* HeapReference<Object> */ ref = *(obj + offset)
6460 Location no_index = Location::NoLocation();
6461 ScaleFactor no_scale_factor = TIMES_1;
6462 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6463 ref,
6464 obj,
6465 offset,
6466 no_index,
6467 no_scale_factor,
6468 temp,
6469 needs_null_check);
6470}
6471
6472void CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6473 Location ref,
6474 Register obj,
6475 uint32_t data_offset,
6476 Location index,
6477 Location temp,
6478 bool needs_null_check) {
6479 DCHECK(kEmitCompilerReadBarrier);
6480 DCHECK(kUseBakerReadBarrier);
6481
6482 static_assert(
6483 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6484 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
6485 // /* HeapReference<Object> */ ref =
6486 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6487 ScaleFactor scale_factor = TIMES_4;
6488 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6489 ref,
6490 obj,
6491 data_offset,
6492 index,
6493 scale_factor,
6494 temp,
6495 needs_null_check);
6496}
6497
6498void CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6499 Location ref,
6500 Register obj,
6501 uint32_t offset,
6502 Location index,
6503 ScaleFactor scale_factor,
6504 Location temp,
6505 bool needs_null_check,
6506 bool always_update_field) {
6507 DCHECK(kEmitCompilerReadBarrier);
6508 DCHECK(kUseBakerReadBarrier);
6509
6510 // In slow path based read barriers, the read barrier call is
6511 // inserted after the original load. However, in fast path based
6512 // Baker's read barriers, we need to perform the load of
6513 // mirror::Object::monitor_ *before* the original reference load.
6514 // This load-load ordering is required by the read barrier.
6515 // The fast path/slow path (for Baker's algorithm) should look like:
6516 //
6517 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6518 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6519 // HeapReference<Object> ref = *src; // Original reference load.
6520 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6521 // if (is_gray) {
6522 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6523 // }
6524 //
6525 // Note: the original implementation in ReadBarrier::Barrier is
6526 // slightly more complex as it performs additional checks that we do
6527 // not do here for performance reasons.
6528
6529 Register ref_reg = ref.AsRegister<Register>();
6530 Register temp_reg = temp.AsRegister<Register>();
6531 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6532
6533 // /* int32_t */ monitor = obj->monitor_
6534 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
6535 if (needs_null_check) {
6536 MaybeRecordImplicitNullCheck(instruction);
6537 }
6538 // /* LockWord */ lock_word = LockWord(monitor)
6539 static_assert(sizeof(LockWord) == sizeof(int32_t),
6540 "art::LockWord and int32_t have different sizes.");
6541
6542 __ Sync(0); // Barrier to prevent load-load reordering.
6543
6544 // The actual reference load.
6545 if (index.IsValid()) {
6546 // Load types involving an "index": ArrayGet,
6547 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6548 // intrinsics.
6549 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
6550 if (index.IsConstant()) {
6551 size_t computed_offset =
6552 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
6553 __ LoadFromOffset(kLoadWord, ref_reg, obj, computed_offset);
6554 } else {
6555 // Handle the special case of the
6556 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6557 // intrinsics, which use a register pair as index ("long
6558 // offset"), of which only the low part contains data.
6559 Register index_reg = index.IsRegisterPair()
6560 ? index.AsRegisterPairLow<Register>()
6561 : index.AsRegister<Register>();
Chris Larsencd0295d2017-03-31 15:26:54 -07006562 __ ShiftAndAdd(TMP, index_reg, obj, scale_factor, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08006563 __ LoadFromOffset(kLoadWord, ref_reg, TMP, offset);
6564 }
6565 } else {
6566 // /* HeapReference<Object> */ ref = *(obj + offset)
6567 __ LoadFromOffset(kLoadWord, ref_reg, obj, offset);
6568 }
6569
6570 // Object* ref = ref_addr->AsMirrorPtr()
6571 __ MaybeUnpoisonHeapReference(ref_reg);
6572
6573 // Slow path marking the object `ref` when it is gray.
6574 SlowPathCodeMIPS* slow_path;
6575 if (always_update_field) {
6576 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS only supports address
6577 // of the form `obj + field_offset`, where `obj` is a register and
6578 // `field_offset` is a register pair (of which only the lower half
6579 // is used). Thus `offset` and `scale_factor` above are expected
6580 // to be null in this code path.
6581 DCHECK_EQ(offset, 0u);
6582 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
6583 slow_path = new (GetGraph()->GetArena())
6584 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(instruction,
6585 ref,
6586 obj,
6587 /* field_offset */ index,
6588 temp_reg);
6589 } else {
6590 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(instruction, ref);
6591 }
6592 AddSlowPath(slow_path);
6593
6594 // if (rb_state == ReadBarrier::GrayState())
6595 // ref = ReadBarrier::Mark(ref);
6596 // Given the numeric representation, it's enough to check the low bit of the
6597 // rb_state. We do that by shifting the bit into the sign bit (31) and
6598 // performing a branch on less than zero.
6599 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6600 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6601 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
6602 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
6603 __ Bltz(temp_reg, slow_path->GetEntryLabel());
6604 __ Bind(slow_path->GetExitLabel());
6605}
6606
6607void CodeGeneratorMIPS::GenerateReadBarrierSlow(HInstruction* instruction,
6608 Location out,
6609 Location ref,
6610 Location obj,
6611 uint32_t offset,
6612 Location index) {
6613 DCHECK(kEmitCompilerReadBarrier);
6614
6615 // Insert a slow path based read barrier *after* the reference load.
6616 //
6617 // If heap poisoning is enabled, the unpoisoning of the loaded
6618 // reference will be carried out by the runtime within the slow
6619 // path.
6620 //
6621 // Note that `ref` currently does not get unpoisoned (when heap
6622 // poisoning is enabled), which is alright as the `ref` argument is
6623 // not used by the artReadBarrierSlow entry point.
6624 //
6625 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6626 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena())
6627 ReadBarrierForHeapReferenceSlowPathMIPS(instruction, out, ref, obj, offset, index);
6628 AddSlowPath(slow_path);
6629
6630 __ B(slow_path->GetEntryLabel());
6631 __ Bind(slow_path->GetExitLabel());
6632}
6633
6634void CodeGeneratorMIPS::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6635 Location out,
6636 Location ref,
6637 Location obj,
6638 uint32_t offset,
6639 Location index) {
6640 if (kEmitCompilerReadBarrier) {
6641 // Baker's read barriers shall be handled by the fast path
6642 // (CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier).
6643 DCHECK(!kUseBakerReadBarrier);
6644 // If heap poisoning is enabled, unpoisoning will be taken care of
6645 // by the runtime within the slow path.
6646 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
6647 } else if (kPoisonHeapReferences) {
6648 __ UnpoisonHeapReference(out.AsRegister<Register>());
6649 }
6650}
6651
6652void CodeGeneratorMIPS::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6653 Location out,
6654 Location root) {
6655 DCHECK(kEmitCompilerReadBarrier);
6656
6657 // Insert a slow path based read barrier *after* the GC root load.
6658 //
6659 // Note that GC roots are not affected by heap poisoning, so we do
6660 // not need to do anything special for this here.
6661 SlowPathCodeMIPS* slow_path =
6662 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS(instruction, out, root);
6663 AddSlowPath(slow_path);
6664
6665 __ B(slow_path->GetEntryLabel());
6666 __ Bind(slow_path->GetExitLabel());
6667}
6668
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006669void LocationsBuilderMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006670 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
6671 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07006672 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006673 switch (type_check_kind) {
6674 case TypeCheckKind::kExactCheck:
6675 case TypeCheckKind::kAbstractClassCheck:
6676 case TypeCheckKind::kClassHierarchyCheck:
6677 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08006678 call_kind =
6679 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006680 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006681 break;
6682 case TypeCheckKind::kArrayCheck:
6683 case TypeCheckKind::kUnresolvedCheck:
6684 case TypeCheckKind::kInterfaceCheck:
6685 call_kind = LocationSummary::kCallOnSlowPath;
6686 break;
6687 }
6688
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006689 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07006690 if (baker_read_barrier_slow_path) {
6691 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6692 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006693 locations->SetInAt(0, Location::RequiresRegister());
6694 locations->SetInAt(1, Location::RequiresRegister());
6695 // The output does overlap inputs.
6696 // Note that TypeCheckSlowPathMIPS uses this register too.
6697 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08006698 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006699}
6700
6701void InstructionCodeGeneratorMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006702 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006703 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08006704 Location obj_loc = locations->InAt(0);
6705 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006706 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08006707 Location out_loc = locations->Out();
6708 Register out = out_loc.AsRegister<Register>();
6709 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6710 DCHECK_LE(num_temps, 1u);
6711 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006712 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6713 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6714 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6715 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006716 MipsLabel done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006717 SlowPathCodeMIPS* slow_path = nullptr;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006718
6719 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006720 // Avoid this check if we know `obj` is not null.
6721 if (instruction->MustDoNullCheck()) {
6722 __ Move(out, ZERO);
6723 __ Beqz(obj, &done);
6724 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006725
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006726 switch (type_check_kind) {
6727 case TypeCheckKind::kExactCheck: {
6728 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006729 GenerateReferenceLoadTwoRegisters(instruction,
6730 out_loc,
6731 obj_loc,
6732 class_offset,
6733 maybe_temp_loc,
6734 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006735 // Classes must be equal for the instanceof to succeed.
6736 __ Xor(out, out, cls);
6737 __ Sltiu(out, out, 1);
6738 break;
6739 }
6740
6741 case TypeCheckKind::kAbstractClassCheck: {
6742 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006743 GenerateReferenceLoadTwoRegisters(instruction,
6744 out_loc,
6745 obj_loc,
6746 class_offset,
6747 maybe_temp_loc,
6748 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006749 // If the class is abstract, we eagerly fetch the super class of the
6750 // object to avoid doing a comparison we know will fail.
6751 MipsLabel loop;
6752 __ Bind(&loop);
6753 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08006754 GenerateReferenceLoadOneRegister(instruction,
6755 out_loc,
6756 super_offset,
6757 maybe_temp_loc,
6758 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006759 // If `out` is null, we use it for the result, and jump to `done`.
6760 __ Beqz(out, &done);
6761 __ Bne(out, cls, &loop);
6762 __ LoadConst32(out, 1);
6763 break;
6764 }
6765
6766 case TypeCheckKind::kClassHierarchyCheck: {
6767 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006768 GenerateReferenceLoadTwoRegisters(instruction,
6769 out_loc,
6770 obj_loc,
6771 class_offset,
6772 maybe_temp_loc,
6773 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006774 // Walk over the class hierarchy to find a match.
6775 MipsLabel loop, success;
6776 __ Bind(&loop);
6777 __ Beq(out, cls, &success);
6778 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08006779 GenerateReferenceLoadOneRegister(instruction,
6780 out_loc,
6781 super_offset,
6782 maybe_temp_loc,
6783 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006784 __ Bnez(out, &loop);
6785 // If `out` is null, we use it for the result, and jump to `done`.
6786 __ B(&done);
6787 __ Bind(&success);
6788 __ LoadConst32(out, 1);
6789 break;
6790 }
6791
6792 case TypeCheckKind::kArrayObjectCheck: {
6793 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006794 GenerateReferenceLoadTwoRegisters(instruction,
6795 out_loc,
6796 obj_loc,
6797 class_offset,
6798 maybe_temp_loc,
6799 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006800 // Do an exact check.
6801 MipsLabel success;
6802 __ Beq(out, cls, &success);
6803 // Otherwise, we need to check that the object's class is a non-primitive array.
6804 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08006805 GenerateReferenceLoadOneRegister(instruction,
6806 out_loc,
6807 component_offset,
6808 maybe_temp_loc,
6809 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006810 // If `out` is null, we use it for the result, and jump to `done`.
6811 __ Beqz(out, &done);
6812 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
6813 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
6814 __ Sltiu(out, out, 1);
6815 __ B(&done);
6816 __ Bind(&success);
6817 __ LoadConst32(out, 1);
6818 break;
6819 }
6820
6821 case TypeCheckKind::kArrayCheck: {
6822 // No read barrier since the slow path will retry upon failure.
6823 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006824 GenerateReferenceLoadTwoRegisters(instruction,
6825 out_loc,
6826 obj_loc,
6827 class_offset,
6828 maybe_temp_loc,
6829 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006830 DCHECK(locations->OnlyCallsOnSlowPath());
6831 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
6832 /* is_fatal */ false);
6833 codegen_->AddSlowPath(slow_path);
6834 __ Bne(out, cls, slow_path->GetEntryLabel());
6835 __ LoadConst32(out, 1);
6836 break;
6837 }
6838
6839 case TypeCheckKind::kUnresolvedCheck:
6840 case TypeCheckKind::kInterfaceCheck: {
6841 // Note that we indeed only call on slow path, but we always go
6842 // into the slow path for the unresolved and interface check
6843 // cases.
6844 //
6845 // We cannot directly call the InstanceofNonTrivial runtime
6846 // entry point without resorting to a type checking slow path
6847 // here (i.e. by calling InvokeRuntime directly), as it would
6848 // require to assign fixed registers for the inputs of this
6849 // HInstanceOf instruction (following the runtime calling
6850 // convention), which might be cluttered by the potential first
6851 // read barrier emission at the beginning of this method.
6852 //
6853 // TODO: Introduce a new runtime entry point taking the object
6854 // to test (instead of its class) as argument, and let it deal
6855 // with the read barrier issues. This will let us refactor this
6856 // case of the `switch` code as it was previously (with a direct
6857 // call to the runtime not using a type checking slow path).
6858 // This should also be beneficial for the other cases above.
6859 DCHECK(locations->OnlyCallsOnSlowPath());
6860 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
6861 /* is_fatal */ false);
6862 codegen_->AddSlowPath(slow_path);
6863 __ B(slow_path->GetEntryLabel());
6864 break;
6865 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006866 }
6867
6868 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006869
6870 if (slow_path != nullptr) {
6871 __ Bind(slow_path->GetExitLabel());
6872 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006873}
6874
6875void LocationsBuilderMIPS::VisitIntConstant(HIntConstant* constant) {
6876 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
6877 locations->SetOut(Location::ConstantLocation(constant));
6878}
6879
6880void InstructionCodeGeneratorMIPS::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
6881 // Will be generated at use site.
6882}
6883
6884void LocationsBuilderMIPS::VisitNullConstant(HNullConstant* constant) {
6885 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
6886 locations->SetOut(Location::ConstantLocation(constant));
6887}
6888
6889void InstructionCodeGeneratorMIPS::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
6890 // Will be generated at use site.
6891}
6892
6893void LocationsBuilderMIPS::HandleInvoke(HInvoke* invoke) {
6894 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
6895 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
6896}
6897
6898void LocationsBuilderMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
6899 HandleInvoke(invoke);
Alexey Frunze1b8464d2016-11-12 17:22:05 -08006900 // The register T7 is required to be used for the hidden argument in
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006901 // art_quick_imt_conflict_trampoline, so add the hidden argument.
Alexey Frunze1b8464d2016-11-12 17:22:05 -08006902 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T7));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006903}
6904
6905void InstructionCodeGeneratorMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
6906 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
6907 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006908 Location receiver = invoke->GetLocations()->InAt(0);
6909 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07006910 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006911
6912 // Set the hidden argument.
6913 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
6914 invoke->GetDexMethodIndex());
6915
6916 // temp = object->GetClass();
6917 if (receiver.IsStackSlot()) {
6918 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
6919 __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
6920 } else {
6921 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
6922 }
6923 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08006924 // Instead of simply (possibly) unpoisoning `temp` here, we should
6925 // emit a read barrier for the previous class reference load.
6926 // However this is not required in practice, as this is an
6927 // intermediate/temporary reference and because the current
6928 // concurrent copying collector keeps the from-space memory
6929 // intact/accessible until the end of the marking phase (the
6930 // concurrent copying collector may not in the future).
6931 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006932 __ LoadFromOffset(kLoadWord, temp, temp,
6933 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
6934 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006935 invoke->GetImtIndex(), kMipsPointerSize));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006936 // temp = temp->GetImtEntryAt(method_offset);
6937 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
6938 // T9 = temp->GetEntryPoint();
6939 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
6940 // T9();
6941 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07006942 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006943 DCHECK(!codegen_->IsLeafMethod());
6944 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
6945}
6946
6947void LocationsBuilderMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen701566a2015-10-27 15:29:13 -07006948 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
6949 if (intrinsic.TryDispatch(invoke)) {
6950 return;
6951 }
6952
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006953 HandleInvoke(invoke);
6954}
6955
6956void LocationsBuilderMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00006957 // Explicit clinit checks triggered by static invokes must have been pruned by
6958 // art::PrepareForRegisterAllocation.
6959 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006960
Alexey Frunze6b892cd2017-01-03 17:11:38 -08006961 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01006962 bool has_extra_input = invoke->HasPcRelativeMethodLoadKind() && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07006963
Chris Larsen701566a2015-10-27 15:29:13 -07006964 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
6965 if (intrinsic.TryDispatch(invoke)) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07006966 if (invoke->GetLocations()->CanCall() && has_extra_input) {
6967 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
6968 }
Chris Larsen701566a2015-10-27 15:29:13 -07006969 return;
6970 }
6971
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006972 HandleInvoke(invoke);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07006973
6974 // Add the extra input register if either the dex cache array base register
6975 // or the PC-relative base register for accessing literals is needed.
6976 if (has_extra_input) {
6977 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
6978 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006979}
6980
Orion Hodsonac141392017-01-13 11:53:47 +00006981void LocationsBuilderMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
6982 HandleInvoke(invoke);
6983}
6984
6985void InstructionCodeGeneratorMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
6986 codegen_->GenerateInvokePolymorphicCall(invoke);
6987}
6988
Chris Larsen701566a2015-10-27 15:29:13 -07006989static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006990 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen701566a2015-10-27 15:29:13 -07006991 IntrinsicCodeGeneratorMIPS intrinsic(codegen);
6992 intrinsic.Dispatch(invoke);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006993 return true;
6994 }
6995 return false;
6996}
6997
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006998HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind(
Alexey Frunze06a46c42016-07-19 15:00:40 -07006999 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007000 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007001 // is incompatible with it.
Vladimir Markoaad75c62016-10-03 08:46:48 +00007002 // TODO: Create as many MipsDexCacheArraysBase instructions as needed for methods
7003 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007004 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007005 bool is_r6 = GetInstructionSetFeatures().IsR6();
7006 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007007 switch (desired_string_load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007008 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007009 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007010 DCHECK(!Runtime::Current()->UseJitCompilation());
Alexey Frunze06a46c42016-07-19 15:00:40 -07007011 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007012 case HLoadString::LoadKind::kBootImageAddress:
7013 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007014 case HLoadString::LoadKind::kJitTableAddress:
7015 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007016 fallback_load = false;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007017 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007018 case HLoadString::LoadKind::kDexCacheViaMethod:
7019 fallback_load = false;
7020 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007021 }
7022 if (fallback_load) {
7023 desired_string_load_kind = HLoadString::LoadKind::kDexCacheViaMethod;
7024 }
7025 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007026}
7027
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007028HLoadClass::LoadKind CodeGeneratorMIPS::GetSupportedLoadClassKind(
7029 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007030 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007031 // is incompatible with it.
7032 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007033 bool is_r6 = GetInstructionSetFeatures().IsR6();
7034 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007035 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007036 case HLoadClass::LoadKind::kInvalid:
7037 LOG(FATAL) << "UNREACHABLE";
7038 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007039 case HLoadClass::LoadKind::kReferrersClass:
7040 fallback_load = false;
7041 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007042 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007043 case HLoadClass::LoadKind::kBssEntry:
7044 DCHECK(!Runtime::Current()->UseJitCompilation());
7045 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007046 case HLoadClass::LoadKind::kBootImageAddress:
7047 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007048 case HLoadClass::LoadKind::kJitTableAddress:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007049 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007050 fallback_load = false;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007051 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007052 case HLoadClass::LoadKind::kDexCacheViaMethod:
7053 fallback_load = false;
7054 break;
7055 }
7056 if (fallback_load) {
7057 desired_class_load_kind = HLoadClass::LoadKind::kDexCacheViaMethod;
7058 }
7059 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007060}
7061
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007062Register CodeGeneratorMIPS::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
7063 Register temp) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007064 CHECK(!GetInstructionSetFeatures().IsR6());
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007065 CHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
7066 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
7067 if (!invoke->GetLocations()->Intrinsified()) {
7068 return location.AsRegister<Register>();
7069 }
7070 // For intrinsics we allow any location, so it may be on the stack.
7071 if (!location.IsRegister()) {
7072 __ LoadFromOffset(kLoadWord, temp, SP, location.GetStackIndex());
7073 return temp;
7074 }
7075 // For register locations, check if the register was saved. If so, get it from the stack.
7076 // Note: There is a chance that the register was saved but not overwritten, so we could
7077 // save one load. However, since this is just an intrinsic slow path we prefer this
7078 // simple and more robust approach rather that trying to determine if that's the case.
7079 SlowPathCode* slow_path = GetCurrentSlowPath();
7080 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
7081 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
7082 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
7083 __ LoadFromOffset(kLoadWord, temp, SP, stack_offset);
7084 return temp;
7085 }
7086 return location.AsRegister<Register>();
7087}
7088
Vladimir Markodc151b22015-10-15 18:02:30 +01007089HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS::GetSupportedInvokeStaticOrDirectDispatch(
7090 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01007091 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007092 HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007093 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007094 // is incompatible with it.
7095 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007096 bool is_r6 = GetInstructionSetFeatures().IsR6();
7097 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007098 switch (dispatch_info.method_load_kind) {
Vladimir Marko65979462017-05-19 17:25:12 +01007099 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Markodc151b22015-10-15 18:02:30 +01007100 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007101 break;
Vladimir Markodc151b22015-10-15 18:02:30 +01007102 default:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007103 fallback_load = false;
Vladimir Markodc151b22015-10-15 18:02:30 +01007104 break;
7105 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007106 if (fallback_load) {
7107 dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod;
7108 dispatch_info.method_load_data = 0;
7109 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007110 return dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01007111}
7112
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007113void CodeGeneratorMIPS::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
7114 // All registers are assumed to be correctly set up per the calling convention.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007115 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007116 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
7117 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007118 bool is_r6 = GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007119 Register base_reg = (invoke->HasPcRelativeMethodLoadKind() && !is_r6)
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007120 ? GetInvokeStaticOrDirectExtraParameter(invoke, temp.AsRegister<Register>())
7121 : ZERO;
7122
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007123 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007124 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007125 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007126 uint32_t offset =
7127 GetThreadOffset<kMipsPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007128 __ LoadFromOffset(kLoadWord,
7129 temp.AsRegister<Register>(),
7130 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007131 offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007132 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007133 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007134 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00007135 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007136 break;
Vladimir Marko65979462017-05-19 17:25:12 +01007137 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
7138 DCHECK(GetCompilerOptions().IsBootImage());
7139 PcRelativePatchInfo* info = NewPcRelativeMethodPatch(invoke->GetTargetMethod());
7140 bool reordering = __ SetReorder(false);
7141 Register temp_reg = temp.AsRegister<Register>();
7142 EmitPcRelativeAddressPlaceholderHigh(info, temp_reg, base_reg);
7143 __ Addiu(temp_reg, temp_reg, /* placeholder */ 0x5678);
7144 __ SetReorder(reordering);
7145 break;
7146 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007147 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
7148 __ LoadConst32(temp.AsRegister<Register>(), invoke->GetMethodAddress());
7149 break;
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007150 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
7151 if (is_r6) {
7152 uint32_t offset = invoke->GetDexCacheArrayOffset();
7153 CodeGeneratorMIPS::PcRelativePatchInfo* info =
7154 NewPcRelativeDexCacheArrayPatch(invoke->GetDexFileForPcRelativeDexCache(), offset);
7155 bool reordering = __ SetReorder(false);
7156 EmitPcRelativeAddressPlaceholderHigh(info, TMP, ZERO);
7157 __ Lw(temp.AsRegister<Register>(), TMP, /* placeholder */ 0x5678);
7158 __ SetReorder(reordering);
7159 } else {
7160 HMipsDexCacheArraysBase* base =
7161 invoke->InputAt(invoke->GetSpecialInputIndex())->AsMipsDexCacheArraysBase();
7162 int32_t offset =
7163 invoke->GetDexCacheArrayOffset() - base->GetElementOffset() - kDexCacheArrayLwOffset;
7164 __ LoadFromOffset(kLoadWord, temp.AsRegister<Register>(), base_reg, offset);
7165 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007166 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007167 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00007168 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007169 Register reg = temp.AsRegister<Register>();
7170 Register method_reg;
7171 if (current_method.IsRegister()) {
7172 method_reg = current_method.AsRegister<Register>();
7173 } else {
7174 // TODO: use the appropriate DCHECK() here if possible.
7175 // DCHECK(invoke->GetLocations()->Intrinsified());
7176 DCHECK(!current_method.IsValid());
7177 method_reg = reg;
7178 __ Lw(reg, SP, kCurrentMethodStackOffset);
7179 }
7180
7181 // temp = temp->dex_cache_resolved_methods_;
7182 __ LoadFromOffset(kLoadWord,
7183 reg,
7184 method_reg,
7185 ArtMethod::DexCacheResolvedMethodsOffset(kMipsPointerSize).Int32Value());
Vladimir Marko40ecb122016-04-06 17:33:41 +01007186 // temp = temp[index_in_cache];
7187 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
7188 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007189 __ LoadFromOffset(kLoadWord,
7190 reg,
7191 reg,
7192 CodeGenerator::GetCachePointerOffset(index_in_cache));
7193 break;
7194 }
7195 }
7196
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007197 switch (code_ptr_location) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007198 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007199 __ Bal(&frame_entry_label_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007200 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007201 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
7202 // T9 = callee_method->entry_point_from_quick_compiled_code_;
Goran Jakovljevic1a878372015-10-26 14:28:52 +01007203 __ LoadFromOffset(kLoadWord,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007204 T9,
7205 callee_method.AsRegister<Register>(),
7206 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07007207 kMipsPointerSize).Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007208 // T9()
7209 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007210 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007211 break;
7212 }
7213 DCHECK(!IsLeafMethod());
7214}
7215
7216void InstructionCodeGeneratorMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007217 // Explicit clinit checks triggered by static invokes must have been pruned by
7218 // art::PrepareForRegisterAllocation.
7219 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007220
7221 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7222 return;
7223 }
7224
7225 LocationSummary* locations = invoke->GetLocations();
7226 codegen_->GenerateStaticOrDirectCall(invoke,
7227 locations->HasTemps()
7228 ? locations->GetTemp(0)
7229 : Location::NoLocation());
7230 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
7231}
7232
Chris Larsen3acee732015-11-18 13:31:08 -08007233void CodeGeneratorMIPS::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_location) {
Goran Jakovljevice919b072016-10-04 10:17:34 +02007234 // Use the calling convention instead of the location of the receiver, as
7235 // intrinsics may have put the receiver in a different register. In the intrinsics
7236 // slow path, the arguments have been moved to the right place, so here we are
7237 // guaranteed that the receiver is the first register of the calling convention.
7238 InvokeDexCallingConvention calling_convention;
7239 Register receiver = calling_convention.GetRegisterAt(0);
7240
Chris Larsen3acee732015-11-18 13:31:08 -08007241 Register temp = temp_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007242 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7243 invoke->GetVTableIndex(), kMipsPointerSize).SizeValue();
7244 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07007245 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007246
7247 // temp = object->GetClass();
Goran Jakovljevice919b072016-10-04 10:17:34 +02007248 __ LoadFromOffset(kLoadWord, temp, receiver, class_offset);
Chris Larsen3acee732015-11-18 13:31:08 -08007249 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08007250 // Instead of simply (possibly) unpoisoning `temp` here, we should
7251 // emit a read barrier for the previous class reference load.
7252 // However this is not required in practice, as this is an
7253 // intermediate/temporary reference and because the current
7254 // concurrent copying collector keeps the from-space memory
7255 // intact/accessible until the end of the marking phase (the
7256 // concurrent copying collector may not in the future).
7257 __ MaybeUnpoisonHeapReference(temp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007258 // temp = temp->GetMethodAt(method_offset);
7259 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
7260 // T9 = temp->GetEntryPoint();
7261 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
7262 // T9();
7263 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007264 __ NopIfNoReordering();
Chris Larsen3acee732015-11-18 13:31:08 -08007265}
7266
7267void InstructionCodeGeneratorMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
7268 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7269 return;
7270 }
7271
7272 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007273 DCHECK(!codegen_->IsLeafMethod());
7274 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
7275}
7276
7277void LocationsBuilderMIPS::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00007278 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
7279 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007280 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007281 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
7282 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007283 return;
7284 }
Vladimir Marko41559982017-01-06 14:04:23 +00007285 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007286 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze15958152017-02-09 19:08:30 -08007287 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
7288 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunze06a46c42016-07-19 15:00:40 -07007289 ? LocationSummary::kCallOnSlowPath
7290 : LocationSummary::kNoCall;
7291 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007292 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
7293 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7294 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007295 switch (load_kind) {
7296 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007297 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007298 case HLoadClass::LoadKind::kBootImageAddress:
7299 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007300 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007301 break;
7302 }
7303 FALLTHROUGH_INTENDED;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007304 case HLoadClass::LoadKind::kReferrersClass:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007305 locations->SetInAt(0, Location::RequiresRegister());
7306 break;
7307 default:
7308 break;
7309 }
7310 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007311 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
7312 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7313 // Rely on the type resolution or initialization and marking to save everything we need.
7314 // Request a temp to hold the BSS entry location for the slow path on R2
7315 // (no benefit for R6).
7316 if (!isR6) {
7317 locations->AddTemp(Location::RequiresRegister());
7318 }
7319 RegisterSet caller_saves = RegisterSet::Empty();
7320 InvokeRuntimeCallingConvention calling_convention;
7321 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7322 locations->SetCustomSlowPathCallerSaves(caller_saves);
7323 } else {
7324 // For non-Baker read barriers we have a temp-clobbering call.
7325 }
7326 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007327}
7328
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007329// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7330// move.
7331void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00007332 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
7333 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
7334 codegen_->GenerateLoadClassRuntimeCall(cls);
Pavle Batutae87a7182015-10-28 13:10:42 +01007335 return;
7336 }
Vladimir Marko41559982017-01-06 14:04:23 +00007337 DCHECK(!cls->NeedsAccessCheck());
Pavle Batutae87a7182015-10-28 13:10:42 +01007338
Vladimir Marko41559982017-01-06 14:04:23 +00007339 LocationSummary* locations = cls->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007340 Location out_loc = locations->Out();
7341 Register out = out_loc.AsRegister<Register>();
7342 Register base_or_current_method_reg;
7343 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7344 switch (load_kind) {
7345 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007346 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007347 case HLoadClass::LoadKind::kBootImageAddress:
7348 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007349 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7350 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007351 case HLoadClass::LoadKind::kReferrersClass:
7352 case HLoadClass::LoadKind::kDexCacheViaMethod:
7353 base_or_current_method_reg = locations->InAt(0).AsRegister<Register>();
7354 break;
7355 default:
7356 base_or_current_method_reg = ZERO;
7357 break;
7358 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00007359
Alexey Frunze15958152017-02-09 19:08:30 -08007360 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
7361 ? kWithoutReadBarrier
7362 : kCompilerReadBarrierOption;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007363 bool generate_null_check = false;
7364 switch (load_kind) {
7365 case HLoadClass::LoadKind::kReferrersClass: {
7366 DCHECK(!cls->CanCallRuntime());
7367 DCHECK(!cls->MustGenerateClinitCheck());
7368 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7369 GenerateGcRootFieldLoad(cls,
7370 out_loc,
7371 base_or_current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08007372 ArtMethod::DeclaringClassOffset().Int32Value(),
7373 read_barrier_option);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007374 break;
7375 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007376 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007377 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08007378 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007379 CodeGeneratorMIPS::PcRelativePatchInfo* info =
7380 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007381 bool reordering = __ SetReorder(false);
7382 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, out, base_or_current_method_reg);
7383 __ Addiu(out, out, /* placeholder */ 0x5678);
7384 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007385 break;
7386 }
7387 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08007388 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007389 uint32_t address = dchecked_integral_cast<uint32_t>(
7390 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
7391 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007392 __ LoadLiteral(out,
7393 base_or_current_method_reg,
7394 codegen_->DeduplicateBootImageAddressLiteral(address));
7395 break;
7396 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007397 case HLoadClass::LoadKind::kBssEntry: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007398 CodeGeneratorMIPS::PcRelativePatchInfo* info =
Vladimir Marko1998cd02017-01-13 13:02:58 +00007399 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007400 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
7401 if (isR6 || non_baker_read_barrier) {
7402 bool reordering = __ SetReorder(false);
7403 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, out, base_or_current_method_reg);
7404 GenerateGcRootFieldLoad(cls, out_loc, out, /* placeholder */ 0x5678, read_barrier_option);
7405 __ SetReorder(reordering);
7406 } else {
7407 // On R2 save the BSS entry address in a temporary register instead of
7408 // recalculating it in the slow path.
7409 Register temp = locations->GetTemp(0).AsRegister<Register>();
7410 bool reordering = __ SetReorder(false);
7411 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, temp, base_or_current_method_reg);
7412 __ Addiu(temp, temp, /* placeholder */ 0x5678);
7413 __ SetReorder(reordering);
7414 GenerateGcRootFieldLoad(cls, out_loc, temp, /* offset */ 0, read_barrier_option);
7415 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007416 generate_null_check = true;
7417 break;
7418 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007419 case HLoadClass::LoadKind::kJitTableAddress: {
Alexey Frunze627c1a02017-01-30 19:28:14 -08007420 CodeGeneratorMIPS::JitPatchInfo* info = codegen_->NewJitRootClassPatch(cls->GetDexFile(),
7421 cls->GetTypeIndex(),
7422 cls->GetClass());
7423 bool reordering = __ SetReorder(false);
7424 __ Bind(&info->high_label);
7425 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze15958152017-02-09 19:08:30 -08007426 GenerateGcRootFieldLoad(cls, out_loc, out, /* placeholder */ 0x5678, read_barrier_option);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007427 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007428 break;
7429 }
Vladimir Marko41559982017-01-06 14:04:23 +00007430 case HLoadClass::LoadKind::kDexCacheViaMethod:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007431 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00007432 LOG(FATAL) << "UNREACHABLE";
7433 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007434 }
7435
7436 if (generate_null_check || cls->MustGenerateClinitCheck()) {
7437 DCHECK(cls->CanCallRuntime());
7438 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
7439 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
7440 codegen_->AddSlowPath(slow_path);
7441 if (generate_null_check) {
7442 __ Beqz(out, slow_path->GetEntryLabel());
7443 }
7444 if (cls->MustGenerateClinitCheck()) {
7445 GenerateClassInitializationCheck(slow_path, out);
7446 } else {
7447 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007448 }
7449 }
7450}
7451
7452static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07007453 return Thread::ExceptionOffset<kMipsPointerSize>().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007454}
7455
7456void LocationsBuilderMIPS::VisitLoadException(HLoadException* load) {
7457 LocationSummary* locations =
7458 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
7459 locations->SetOut(Location::RequiresRegister());
7460}
7461
7462void InstructionCodeGeneratorMIPS::VisitLoadException(HLoadException* load) {
7463 Register out = load->GetLocations()->Out().AsRegister<Register>();
7464 __ LoadFromOffset(kLoadWord, out, TR, GetExceptionTlsOffset());
7465}
7466
7467void LocationsBuilderMIPS::VisitClearException(HClearException* clear) {
7468 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
7469}
7470
7471void InstructionCodeGeneratorMIPS::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
7472 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
7473}
7474
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007475void LocationsBuilderMIPS::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08007476 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007477 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007478 HLoadString::LoadKind load_kind = load->GetLoadKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07007479 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007480 switch (load_kind) {
7481 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007482 case HLoadString::LoadKind::kBootImageAddress:
7483 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007484 case HLoadString::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007485 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007486 break;
7487 }
7488 FALLTHROUGH_INTENDED;
7489 // We need an extra register for PC-relative dex cache accesses.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007490 case HLoadString::LoadKind::kDexCacheViaMethod:
7491 locations->SetInAt(0, Location::RequiresRegister());
7492 break;
7493 default:
7494 break;
7495 }
Alexey Frunzebb51df82016-11-01 16:07:32 -07007496 if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
7497 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007498 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzebb51df82016-11-01 16:07:32 -07007499 } else {
7500 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007501 if (load_kind == HLoadString::LoadKind::kBssEntry) {
7502 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7503 // Rely on the pResolveString and marking to save everything we need.
7504 // Request a temp to hold the BSS entry location for the slow path on R2
7505 // (no benefit for R6).
7506 if (!isR6) {
7507 locations->AddTemp(Location::RequiresRegister());
7508 }
7509 RegisterSet caller_saves = RegisterSet::Empty();
7510 InvokeRuntimeCallingConvention calling_convention;
7511 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7512 locations->SetCustomSlowPathCallerSaves(caller_saves);
7513 } else {
7514 // For non-Baker read barriers we have a temp-clobbering call.
7515 }
7516 }
Alexey Frunzebb51df82016-11-01 16:07:32 -07007517 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007518}
7519
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007520// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7521// move.
7522void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007523 HLoadString::LoadKind load_kind = load->GetLoadKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007524 LocationSummary* locations = load->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007525 Location out_loc = locations->Out();
7526 Register out = out_loc.AsRegister<Register>();
7527 Register base_or_current_method_reg;
7528 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7529 switch (load_kind) {
7530 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007531 case HLoadString::LoadKind::kBootImageAddress:
7532 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007533 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007534 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7535 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007536 default:
7537 base_or_current_method_reg = ZERO;
7538 break;
7539 }
7540
7541 switch (load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007542 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00007543 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze06a46c42016-07-19 15:00:40 -07007544 CodeGeneratorMIPS::PcRelativePatchInfo* info =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007545 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007546 bool reordering = __ SetReorder(false);
7547 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, out, base_or_current_method_reg);
7548 __ Addiu(out, out, /* placeholder */ 0x5678);
7549 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007550 return; // No dex cache slow path.
7551 }
7552 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007553 uint32_t address = dchecked_integral_cast<uint32_t>(
7554 reinterpret_cast<uintptr_t>(load->GetString().Get()));
7555 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007556 __ LoadLiteral(out,
7557 base_or_current_method_reg,
7558 codegen_->DeduplicateBootImageAddressLiteral(address));
7559 return; // No dex cache slow path.
7560 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00007561 case HLoadString::LoadKind::kBssEntry: {
7562 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
7563 CodeGeneratorMIPS::PcRelativePatchInfo* info =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007564 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007565 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
7566 if (isR6 || non_baker_read_barrier) {
7567 bool reordering = __ SetReorder(false);
7568 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, out, base_or_current_method_reg);
7569 GenerateGcRootFieldLoad(load,
7570 out_loc,
7571 out,
7572 /* placeholder */ 0x5678,
7573 kCompilerReadBarrierOption);
7574 __ SetReorder(reordering);
7575 } else {
7576 // On R2 save the BSS entry address in a temporary register instead of
7577 // recalculating it in the slow path.
7578 Register temp = locations->GetTemp(0).AsRegister<Register>();
7579 bool reordering = __ SetReorder(false);
7580 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, temp, base_or_current_method_reg);
7581 __ Addiu(temp, temp, /* placeholder */ 0x5678);
7582 __ SetReorder(reordering);
7583 GenerateGcRootFieldLoad(load, out_loc, temp, /* offset */ 0, kCompilerReadBarrierOption);
7584 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00007585 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathMIPS(load);
7586 codegen_->AddSlowPath(slow_path);
7587 __ Beqz(out, slow_path->GetEntryLabel());
7588 __ Bind(slow_path->GetExitLabel());
7589 return;
7590 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08007591 case HLoadString::LoadKind::kJitTableAddress: {
7592 CodeGeneratorMIPS::JitPatchInfo* info =
7593 codegen_->NewJitRootStringPatch(load->GetDexFile(),
7594 load->GetStringIndex(),
7595 load->GetString());
7596 bool reordering = __ SetReorder(false);
7597 __ Bind(&info->high_label);
7598 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze15958152017-02-09 19:08:30 -08007599 GenerateGcRootFieldLoad(load,
7600 out_loc,
7601 out,
7602 /* placeholder */ 0x5678,
7603 kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007604 __ SetReorder(reordering);
7605 return;
7606 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007607 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007608 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007609 }
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007610
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007611 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Markoaad75c62016-10-03 08:46:48 +00007612 DCHECK(load_kind == HLoadString::LoadKind::kDexCacheViaMethod);
7613 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007614 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007615 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007616 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
7617 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007618}
7619
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007620void LocationsBuilderMIPS::VisitLongConstant(HLongConstant* constant) {
7621 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7622 locations->SetOut(Location::ConstantLocation(constant));
7623}
7624
7625void InstructionCodeGeneratorMIPS::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
7626 // Will be generated at use site.
7627}
7628
7629void LocationsBuilderMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7630 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007631 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007632 InvokeRuntimeCallingConvention calling_convention;
7633 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7634}
7635
7636void InstructionCodeGeneratorMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7637 if (instruction->IsEnter()) {
Serban Constantinescufca16662016-07-14 09:21:59 +01007638 codegen_->InvokeRuntime(kQuickLockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007639 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7640 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01007641 codegen_->InvokeRuntime(kQuickUnlockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007642 }
7643 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
7644}
7645
7646void LocationsBuilderMIPS::VisitMul(HMul* mul) {
7647 LocationSummary* locations =
7648 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
7649 switch (mul->GetResultType()) {
7650 case Primitive::kPrimInt:
7651 case Primitive::kPrimLong:
7652 locations->SetInAt(0, Location::RequiresRegister());
7653 locations->SetInAt(1, Location::RequiresRegister());
7654 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7655 break;
7656
7657 case Primitive::kPrimFloat:
7658 case Primitive::kPrimDouble:
7659 locations->SetInAt(0, Location::RequiresFpuRegister());
7660 locations->SetInAt(1, Location::RequiresFpuRegister());
7661 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
7662 break;
7663
7664 default:
7665 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
7666 }
7667}
7668
7669void InstructionCodeGeneratorMIPS::VisitMul(HMul* instruction) {
7670 Primitive::Type type = instruction->GetType();
7671 LocationSummary* locations = instruction->GetLocations();
7672 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7673
7674 switch (type) {
7675 case Primitive::kPrimInt: {
7676 Register dst = locations->Out().AsRegister<Register>();
7677 Register lhs = locations->InAt(0).AsRegister<Register>();
7678 Register rhs = locations->InAt(1).AsRegister<Register>();
7679
7680 if (isR6) {
7681 __ MulR6(dst, lhs, rhs);
7682 } else {
7683 __ MulR2(dst, lhs, rhs);
7684 }
7685 break;
7686 }
7687 case Primitive::kPrimLong: {
7688 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
7689 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
7690 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
7691 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
7692 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
7693 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
7694
7695 // Extra checks to protect caused by the existance of A1_A2.
7696 // The algorithm is wrong if dst_high is either lhs_lo or rhs_lo:
7697 // (e.g. lhs=a0_a1, rhs=a2_a3 and dst=a1_a2).
7698 DCHECK_NE(dst_high, lhs_low);
7699 DCHECK_NE(dst_high, rhs_low);
7700
7701 // A_B * C_D
7702 // dst_hi: [ low(A*D) + low(B*C) + hi(B*D) ]
7703 // dst_lo: [ low(B*D) ]
7704 // Note: R2 and R6 MUL produce the low 32 bit of the multiplication result.
7705
7706 if (isR6) {
7707 __ MulR6(TMP, lhs_high, rhs_low);
7708 __ MulR6(dst_high, lhs_low, rhs_high);
7709 __ Addu(dst_high, dst_high, TMP);
7710 __ MuhuR6(TMP, lhs_low, rhs_low);
7711 __ Addu(dst_high, dst_high, TMP);
7712 __ MulR6(dst_low, lhs_low, rhs_low);
7713 } else {
7714 __ MulR2(TMP, lhs_high, rhs_low);
7715 __ MulR2(dst_high, lhs_low, rhs_high);
7716 __ Addu(dst_high, dst_high, TMP);
7717 __ MultuR2(lhs_low, rhs_low);
7718 __ Mfhi(TMP);
7719 __ Addu(dst_high, dst_high, TMP);
7720 __ Mflo(dst_low);
7721 }
7722 break;
7723 }
7724 case Primitive::kPrimFloat:
7725 case Primitive::kPrimDouble: {
7726 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
7727 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
7728 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
7729 if (type == Primitive::kPrimFloat) {
7730 __ MulS(dst, lhs, rhs);
7731 } else {
7732 __ MulD(dst, lhs, rhs);
7733 }
7734 break;
7735 }
7736 default:
7737 LOG(FATAL) << "Unexpected mul type " << type;
7738 }
7739}
7740
7741void LocationsBuilderMIPS::VisitNeg(HNeg* neg) {
7742 LocationSummary* locations =
7743 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
7744 switch (neg->GetResultType()) {
7745 case Primitive::kPrimInt:
7746 case Primitive::kPrimLong:
7747 locations->SetInAt(0, Location::RequiresRegister());
7748 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7749 break;
7750
7751 case Primitive::kPrimFloat:
7752 case Primitive::kPrimDouble:
7753 locations->SetInAt(0, Location::RequiresFpuRegister());
7754 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
7755 break;
7756
7757 default:
7758 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
7759 }
7760}
7761
7762void InstructionCodeGeneratorMIPS::VisitNeg(HNeg* instruction) {
7763 Primitive::Type type = instruction->GetType();
7764 LocationSummary* locations = instruction->GetLocations();
7765
7766 switch (type) {
7767 case Primitive::kPrimInt: {
7768 Register dst = locations->Out().AsRegister<Register>();
7769 Register src = locations->InAt(0).AsRegister<Register>();
7770 __ Subu(dst, ZERO, src);
7771 break;
7772 }
7773 case Primitive::kPrimLong: {
7774 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
7775 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
7776 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
7777 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
7778 __ Subu(dst_low, ZERO, src_low);
7779 __ Sltu(TMP, ZERO, dst_low);
7780 __ Subu(dst_high, ZERO, src_high);
7781 __ Subu(dst_high, dst_high, TMP);
7782 break;
7783 }
7784 case Primitive::kPrimFloat:
7785 case Primitive::kPrimDouble: {
7786 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
7787 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
7788 if (type == Primitive::kPrimFloat) {
7789 __ NegS(dst, src);
7790 } else {
7791 __ NegD(dst, src);
7792 }
7793 break;
7794 }
7795 default:
7796 LOG(FATAL) << "Unexpected neg type " << type;
7797 }
7798}
7799
7800void LocationsBuilderMIPS::VisitNewArray(HNewArray* instruction) {
7801 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007802 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007803 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007804 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00007805 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7806 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007807}
7808
7809void InstructionCodeGeneratorMIPS::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08007810 // Note: if heap poisoning is enabled, the entry point takes care
7811 // of poisoning the reference.
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00007812 codegen_->InvokeRuntime(kQuickAllocArrayResolved, instruction, instruction->GetDexPc());
7813 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007814}
7815
7816void LocationsBuilderMIPS::VisitNewInstance(HNewInstance* instruction) {
7817 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007818 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007819 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00007820 if (instruction->IsStringAlloc()) {
7821 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
7822 } else {
7823 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00007824 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007825 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
7826}
7827
7828void InstructionCodeGeneratorMIPS::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08007829 // Note: if heap poisoning is enabled, the entry point takes care
7830 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00007831 if (instruction->IsStringAlloc()) {
7832 // String is allocated through StringFactory. Call NewEmptyString entry point.
7833 Register temp = instruction->GetLocations()->GetTemp(0).AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07007834 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00007835 __ LoadFromOffset(kLoadWord, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
7836 __ LoadFromOffset(kLoadWord, T9, temp, code_offset.Int32Value());
7837 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007838 __ NopIfNoReordering();
David Brazdil6de19382016-01-08 17:37:10 +00007839 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
7840 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01007841 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00007842 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00007843 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007844}
7845
7846void LocationsBuilderMIPS::VisitNot(HNot* instruction) {
7847 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
7848 locations->SetInAt(0, Location::RequiresRegister());
7849 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7850}
7851
7852void InstructionCodeGeneratorMIPS::VisitNot(HNot* instruction) {
7853 Primitive::Type type = instruction->GetType();
7854 LocationSummary* locations = instruction->GetLocations();
7855
7856 switch (type) {
7857 case Primitive::kPrimInt: {
7858 Register dst = locations->Out().AsRegister<Register>();
7859 Register src = locations->InAt(0).AsRegister<Register>();
7860 __ Nor(dst, src, ZERO);
7861 break;
7862 }
7863
7864 case Primitive::kPrimLong: {
7865 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
7866 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
7867 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
7868 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
7869 __ Nor(dst_high, src_high, ZERO);
7870 __ Nor(dst_low, src_low, ZERO);
7871 break;
7872 }
7873
7874 default:
7875 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
7876 }
7877}
7878
7879void LocationsBuilderMIPS::VisitBooleanNot(HBooleanNot* instruction) {
7880 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
7881 locations->SetInAt(0, Location::RequiresRegister());
7882 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7883}
7884
7885void InstructionCodeGeneratorMIPS::VisitBooleanNot(HBooleanNot* instruction) {
7886 LocationSummary* locations = instruction->GetLocations();
7887 __ Xori(locations->Out().AsRegister<Register>(),
7888 locations->InAt(0).AsRegister<Register>(),
7889 1);
7890}
7891
7892void LocationsBuilderMIPS::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01007893 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
7894 locations->SetInAt(0, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007895}
7896
Calin Juravle2ae48182016-03-16 14:05:09 +00007897void CodeGeneratorMIPS::GenerateImplicitNullCheck(HNullCheck* instruction) {
7898 if (CanMoveNullCheckToUser(instruction)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007899 return;
7900 }
7901 Location obj = instruction->GetLocations()->InAt(0);
7902
7903 __ Lw(ZERO, obj.AsRegister<Register>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00007904 RecordPcInfo(instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007905}
7906
Calin Juravle2ae48182016-03-16 14:05:09 +00007907void CodeGeneratorMIPS::GenerateExplicitNullCheck(HNullCheck* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007908 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00007909 AddSlowPath(slow_path);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007910
7911 Location obj = instruction->GetLocations()->InAt(0);
7912
7913 __ Beqz(obj.AsRegister<Register>(), slow_path->GetEntryLabel());
7914}
7915
7916void InstructionCodeGeneratorMIPS::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00007917 codegen_->GenerateNullCheck(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007918}
7919
7920void LocationsBuilderMIPS::VisitOr(HOr* instruction) {
7921 HandleBinaryOp(instruction);
7922}
7923
7924void InstructionCodeGeneratorMIPS::VisitOr(HOr* instruction) {
7925 HandleBinaryOp(instruction);
7926}
7927
7928void LocationsBuilderMIPS::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
7929 LOG(FATAL) << "Unreachable";
7930}
7931
7932void InstructionCodeGeneratorMIPS::VisitParallelMove(HParallelMove* instruction) {
7933 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
7934}
7935
7936void LocationsBuilderMIPS::VisitParameterValue(HParameterValue* instruction) {
7937 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
7938 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
7939 if (location.IsStackSlot()) {
7940 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
7941 } else if (location.IsDoubleStackSlot()) {
7942 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
7943 }
7944 locations->SetOut(location);
7945}
7946
7947void InstructionCodeGeneratorMIPS::VisitParameterValue(HParameterValue* instruction
7948 ATTRIBUTE_UNUSED) {
7949 // Nothing to do, the parameter is already at its location.
7950}
7951
7952void LocationsBuilderMIPS::VisitCurrentMethod(HCurrentMethod* instruction) {
7953 LocationSummary* locations =
7954 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
7955 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
7956}
7957
7958void InstructionCodeGeneratorMIPS::VisitCurrentMethod(HCurrentMethod* instruction
7959 ATTRIBUTE_UNUSED) {
7960 // Nothing to do, the method is already at its location.
7961}
7962
7963void LocationsBuilderMIPS::VisitPhi(HPhi* instruction) {
7964 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01007965 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007966 locations->SetInAt(i, Location::Any());
7967 }
7968 locations->SetOut(Location::Any());
7969}
7970
7971void InstructionCodeGeneratorMIPS::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
7972 LOG(FATAL) << "Unreachable";
7973}
7974
7975void LocationsBuilderMIPS::VisitRem(HRem* rem) {
7976 Primitive::Type type = rem->GetResultType();
7977 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007978 (type == Primitive::kPrimInt) ? LocationSummary::kNoCall : LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007979 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
7980
7981 switch (type) {
7982 case Primitive::kPrimInt:
7983 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08007984 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007985 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7986 break;
7987
7988 case Primitive::kPrimLong: {
7989 InvokeRuntimeCallingConvention calling_convention;
7990 locations->SetInAt(0, Location::RegisterPairLocation(
7991 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
7992 locations->SetInAt(1, Location::RegisterPairLocation(
7993 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
7994 locations->SetOut(calling_convention.GetReturnLocation(type));
7995 break;
7996 }
7997
7998 case Primitive::kPrimFloat:
7999 case Primitive::kPrimDouble: {
8000 InvokeRuntimeCallingConvention calling_convention;
8001 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8002 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
8003 locations->SetOut(calling_convention.GetReturnLocation(type));
8004 break;
8005 }
8006
8007 default:
8008 LOG(FATAL) << "Unexpected rem type " << type;
8009 }
8010}
8011
8012void InstructionCodeGeneratorMIPS::VisitRem(HRem* instruction) {
8013 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008014
8015 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08008016 case Primitive::kPrimInt:
8017 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008018 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008019 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008020 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008021 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
8022 break;
8023 }
8024 case Primitive::kPrimFloat: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008025 codegen_->InvokeRuntime(kQuickFmodf, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008026 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008027 break;
8028 }
8029 case Primitive::kPrimDouble: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008030 codegen_->InvokeRuntime(kQuickFmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008031 CheckEntrypointTypes<kQuickFmod, double, double, double>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008032 break;
8033 }
8034 default:
8035 LOG(FATAL) << "Unexpected rem type " << type;
8036 }
8037}
8038
Igor Murashkind01745e2017-04-05 16:40:31 -07008039void LocationsBuilderMIPS::VisitConstructorFence(HConstructorFence* constructor_fence) {
8040 constructor_fence->SetLocations(nullptr);
8041}
8042
8043void InstructionCodeGeneratorMIPS::VisitConstructorFence(
8044 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
8045 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
8046}
8047
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008048void LocationsBuilderMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8049 memory_barrier->SetLocations(nullptr);
8050}
8051
8052void InstructionCodeGeneratorMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8053 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
8054}
8055
8056void LocationsBuilderMIPS::VisitReturn(HReturn* ret) {
8057 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
8058 Primitive::Type return_type = ret->InputAt(0)->GetType();
8059 locations->SetInAt(0, MipsReturnLocation(return_type));
8060}
8061
8062void InstructionCodeGeneratorMIPS::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
8063 codegen_->GenerateFrameExit();
8064}
8065
8066void LocationsBuilderMIPS::VisitReturnVoid(HReturnVoid* ret) {
8067 ret->SetLocations(nullptr);
8068}
8069
8070void InstructionCodeGeneratorMIPS::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
8071 codegen_->GenerateFrameExit();
8072}
8073
Alexey Frunze92d90602015-12-18 18:16:36 -08008074void LocationsBuilderMIPS::VisitRor(HRor* ror) {
8075 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008076}
8077
Alexey Frunze92d90602015-12-18 18:16:36 -08008078void InstructionCodeGeneratorMIPS::VisitRor(HRor* ror) {
8079 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008080}
8081
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008082void LocationsBuilderMIPS::VisitShl(HShl* shl) {
8083 HandleShift(shl);
8084}
8085
8086void InstructionCodeGeneratorMIPS::VisitShl(HShl* shl) {
8087 HandleShift(shl);
8088}
8089
8090void LocationsBuilderMIPS::VisitShr(HShr* shr) {
8091 HandleShift(shr);
8092}
8093
8094void InstructionCodeGeneratorMIPS::VisitShr(HShr* shr) {
8095 HandleShift(shr);
8096}
8097
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008098void LocationsBuilderMIPS::VisitSub(HSub* instruction) {
8099 HandleBinaryOp(instruction);
8100}
8101
8102void InstructionCodeGeneratorMIPS::VisitSub(HSub* instruction) {
8103 HandleBinaryOp(instruction);
8104}
8105
8106void LocationsBuilderMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8107 HandleFieldGet(instruction, instruction->GetFieldInfo());
8108}
8109
8110void InstructionCodeGeneratorMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8111 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
8112}
8113
8114void LocationsBuilderMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
8115 HandleFieldSet(instruction, instruction->GetFieldInfo());
8116}
8117
8118void InstructionCodeGeneratorMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01008119 HandleFieldSet(instruction,
8120 instruction->GetFieldInfo(),
8121 instruction->GetDexPc(),
8122 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008123}
8124
8125void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldGet(
8126 HUnresolvedInstanceFieldGet* instruction) {
8127 FieldAccessCallingConventionMIPS calling_convention;
8128 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8129 instruction->GetFieldType(),
8130 calling_convention);
8131}
8132
8133void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldGet(
8134 HUnresolvedInstanceFieldGet* instruction) {
8135 FieldAccessCallingConventionMIPS calling_convention;
8136 codegen_->GenerateUnresolvedFieldAccess(instruction,
8137 instruction->GetFieldType(),
8138 instruction->GetFieldIndex(),
8139 instruction->GetDexPc(),
8140 calling_convention);
8141}
8142
8143void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldSet(
8144 HUnresolvedInstanceFieldSet* instruction) {
8145 FieldAccessCallingConventionMIPS calling_convention;
8146 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8147 instruction->GetFieldType(),
8148 calling_convention);
8149}
8150
8151void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldSet(
8152 HUnresolvedInstanceFieldSet* instruction) {
8153 FieldAccessCallingConventionMIPS calling_convention;
8154 codegen_->GenerateUnresolvedFieldAccess(instruction,
8155 instruction->GetFieldType(),
8156 instruction->GetFieldIndex(),
8157 instruction->GetDexPc(),
8158 calling_convention);
8159}
8160
8161void LocationsBuilderMIPS::VisitUnresolvedStaticFieldGet(
8162 HUnresolvedStaticFieldGet* instruction) {
8163 FieldAccessCallingConventionMIPS calling_convention;
8164 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8165 instruction->GetFieldType(),
8166 calling_convention);
8167}
8168
8169void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldGet(
8170 HUnresolvedStaticFieldGet* instruction) {
8171 FieldAccessCallingConventionMIPS calling_convention;
8172 codegen_->GenerateUnresolvedFieldAccess(instruction,
8173 instruction->GetFieldType(),
8174 instruction->GetFieldIndex(),
8175 instruction->GetDexPc(),
8176 calling_convention);
8177}
8178
8179void LocationsBuilderMIPS::VisitUnresolvedStaticFieldSet(
8180 HUnresolvedStaticFieldSet* instruction) {
8181 FieldAccessCallingConventionMIPS calling_convention;
8182 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8183 instruction->GetFieldType(),
8184 calling_convention);
8185}
8186
8187void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldSet(
8188 HUnresolvedStaticFieldSet* instruction) {
8189 FieldAccessCallingConventionMIPS calling_convention;
8190 codegen_->GenerateUnresolvedFieldAccess(instruction,
8191 instruction->GetFieldType(),
8192 instruction->GetFieldIndex(),
8193 instruction->GetDexPc(),
8194 calling_convention);
8195}
8196
8197void LocationsBuilderMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01008198 LocationSummary* locations =
8199 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01008200 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008201}
8202
8203void InstructionCodeGeneratorMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
8204 HBasicBlock* block = instruction->GetBlock();
8205 if (block->GetLoopInformation() != nullptr) {
8206 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
8207 // The back edge will generate the suspend check.
8208 return;
8209 }
8210 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
8211 // The goto will generate the suspend check.
8212 return;
8213 }
8214 GenerateSuspendCheck(instruction, nullptr);
8215}
8216
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008217void LocationsBuilderMIPS::VisitThrow(HThrow* instruction) {
8218 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008219 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008220 InvokeRuntimeCallingConvention calling_convention;
8221 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8222}
8223
8224void InstructionCodeGeneratorMIPS::VisitThrow(HThrow* instruction) {
Serban Constantinescufca16662016-07-14 09:21:59 +01008225 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008226 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
8227}
8228
8229void LocationsBuilderMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8230 Primitive::Type input_type = conversion->GetInputType();
8231 Primitive::Type result_type = conversion->GetResultType();
8232 DCHECK_NE(input_type, result_type);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008233 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008234
8235 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
8236 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
8237 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
8238 }
8239
8240 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008241 if (!isR6 &&
8242 ((Primitive::IsFloatingPointType(result_type) && input_type == Primitive::kPrimLong) ||
8243 (result_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(input_type)))) {
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008244 call_kind = LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008245 }
8246
8247 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
8248
8249 if (call_kind == LocationSummary::kNoCall) {
8250 if (Primitive::IsFloatingPointType(input_type)) {
8251 locations->SetInAt(0, Location::RequiresFpuRegister());
8252 } else {
8253 locations->SetInAt(0, Location::RequiresRegister());
8254 }
8255
8256 if (Primitive::IsFloatingPointType(result_type)) {
8257 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8258 } else {
8259 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8260 }
8261 } else {
8262 InvokeRuntimeCallingConvention calling_convention;
8263
8264 if (Primitive::IsFloatingPointType(input_type)) {
8265 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8266 } else {
8267 DCHECK_EQ(input_type, Primitive::kPrimLong);
8268 locations->SetInAt(0, Location::RegisterPairLocation(
8269 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8270 }
8271
8272 locations->SetOut(calling_convention.GetReturnLocation(result_type));
8273 }
8274}
8275
8276void InstructionCodeGeneratorMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8277 LocationSummary* locations = conversion->GetLocations();
8278 Primitive::Type result_type = conversion->GetResultType();
8279 Primitive::Type input_type = conversion->GetInputType();
8280 bool has_sign_extension = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008281 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008282
8283 DCHECK_NE(input_type, result_type);
8284
8285 if (result_type == Primitive::kPrimLong && Primitive::IsIntegralType(input_type)) {
8286 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8287 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8288 Register src = locations->InAt(0).AsRegister<Register>();
8289
Alexey Frunzea871ef12016-06-27 15:20:11 -07008290 if (dst_low != src) {
8291 __ Move(dst_low, src);
8292 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008293 __ Sra(dst_high, src, 31);
8294 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
8295 Register dst = locations->Out().AsRegister<Register>();
8296 Register src = (input_type == Primitive::kPrimLong)
8297 ? locations->InAt(0).AsRegisterPairLow<Register>()
8298 : locations->InAt(0).AsRegister<Register>();
8299
8300 switch (result_type) {
8301 case Primitive::kPrimChar:
8302 __ Andi(dst, src, 0xFFFF);
8303 break;
8304 case Primitive::kPrimByte:
8305 if (has_sign_extension) {
8306 __ Seb(dst, src);
8307 } else {
8308 __ Sll(dst, src, 24);
8309 __ Sra(dst, dst, 24);
8310 }
8311 break;
8312 case Primitive::kPrimShort:
8313 if (has_sign_extension) {
8314 __ Seh(dst, src);
8315 } else {
8316 __ Sll(dst, src, 16);
8317 __ Sra(dst, dst, 16);
8318 }
8319 break;
8320 case Primitive::kPrimInt:
Alexey Frunzea871ef12016-06-27 15:20:11 -07008321 if (dst != src) {
8322 __ Move(dst, src);
8323 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008324 break;
8325
8326 default:
8327 LOG(FATAL) << "Unexpected type conversion from " << input_type
8328 << " to " << result_type;
8329 }
8330 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008331 if (input_type == Primitive::kPrimLong) {
8332 if (isR6) {
8333 // cvt.s.l/cvt.d.l requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8334 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8335 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8336 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8337 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8338 __ Mtc1(src_low, FTMP);
8339 __ Mthc1(src_high, FTMP);
8340 if (result_type == Primitive::kPrimFloat) {
8341 __ Cvtsl(dst, FTMP);
8342 } else {
8343 __ Cvtdl(dst, FTMP);
8344 }
8345 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008346 QuickEntrypointEnum entrypoint = (result_type == Primitive::kPrimFloat) ? kQuickL2f
8347 : kQuickL2d;
8348 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008349 if (result_type == Primitive::kPrimFloat) {
8350 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
8351 } else {
8352 CheckEntrypointTypes<kQuickL2d, double, int64_t>();
8353 }
8354 }
8355 } else {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008356 Register src = locations->InAt(0).AsRegister<Register>();
8357 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8358 __ Mtc1(src, FTMP);
8359 if (result_type == Primitive::kPrimFloat) {
8360 __ Cvtsw(dst, FTMP);
8361 } else {
8362 __ Cvtdw(dst, FTMP);
8363 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008364 }
8365 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
8366 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008367
8368 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
8369 // value of the output type if the input is outside of the range after the truncation or
8370 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
8371 // results. This matches the desired float/double-to-int/long conversion exactly.
8372 //
8373 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
8374 // value when the input is either a NaN or is outside of the range of the output type
8375 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
8376 // the same result.
8377 //
8378 // The code takes care of the different behaviors by first comparing the input to the
8379 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
8380 // If the input is greater than or equal to the minimum, it procedes to the truncate
8381 // instruction, which will handle such an input the same way irrespective of NAN2008.
8382 // Otherwise the input is compared to itself to determine whether it is a NaN or not
8383 // in order to return either zero or the minimum value.
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008384 if (result_type == Primitive::kPrimLong) {
8385 if (isR6) {
8386 // trunc.l.s/trunc.l.d requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8387 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8388 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8389 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8390 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008391
8392 if (input_type == Primitive::kPrimFloat) {
8393 __ TruncLS(FTMP, src);
8394 } else {
8395 __ TruncLD(FTMP, src);
8396 }
8397 __ Mfc1(dst_low, FTMP);
8398 __ Mfhc1(dst_high, FTMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008399 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008400 QuickEntrypointEnum entrypoint = (input_type == Primitive::kPrimFloat) ? kQuickF2l
8401 : kQuickD2l;
8402 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008403 if (input_type == Primitive::kPrimFloat) {
8404 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
8405 } else {
8406 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
8407 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008408 }
8409 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008410 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8411 Register dst = locations->Out().AsRegister<Register>();
8412 MipsLabel truncate;
8413 MipsLabel done;
8414
Lena Djokicf4e23a82017-05-09 15:43:45 +02008415 if (!isR6) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008416 if (input_type == Primitive::kPrimFloat) {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008417 uint32_t min_val = bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
8418 __ LoadConst32(TMP, min_val);
8419 __ Mtc1(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008420 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008421 uint64_t min_val = bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
8422 __ LoadConst32(TMP, High32Bits(min_val));
8423 __ Mtc1(ZERO, FTMP);
8424 __ MoveToFpuHigh(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008425 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008426
8427 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008428 __ ColeS(0, FTMP, src);
8429 } else {
8430 __ ColeD(0, FTMP, src);
8431 }
8432 __ Bc1t(0, &truncate);
8433
8434 if (input_type == Primitive::kPrimFloat) {
8435 __ CeqS(0, src, src);
8436 } else {
8437 __ CeqD(0, src, src);
8438 }
8439 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
8440 __ Movf(dst, ZERO, 0);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008441
8442 __ B(&done);
8443
8444 __ Bind(&truncate);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008445 }
8446
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008447 if (input_type == Primitive::kPrimFloat) {
8448 __ TruncWS(FTMP, src);
8449 } else {
8450 __ TruncWD(FTMP, src);
8451 }
8452 __ Mfc1(dst, FTMP);
8453
Lena Djokicf4e23a82017-05-09 15:43:45 +02008454 if (!isR6) {
8455 __ Bind(&done);
8456 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008457 }
8458 } else if (Primitive::IsFloatingPointType(result_type) &&
8459 Primitive::IsFloatingPointType(input_type)) {
8460 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8461 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8462 if (result_type == Primitive::kPrimFloat) {
8463 __ Cvtsd(dst, src);
8464 } else {
8465 __ Cvtds(dst, src);
8466 }
8467 } else {
8468 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
8469 << " to " << result_type;
8470 }
8471}
8472
8473void LocationsBuilderMIPS::VisitUShr(HUShr* ushr) {
8474 HandleShift(ushr);
8475}
8476
8477void InstructionCodeGeneratorMIPS::VisitUShr(HUShr* ushr) {
8478 HandleShift(ushr);
8479}
8480
8481void LocationsBuilderMIPS::VisitXor(HXor* instruction) {
8482 HandleBinaryOp(instruction);
8483}
8484
8485void InstructionCodeGeneratorMIPS::VisitXor(HXor* instruction) {
8486 HandleBinaryOp(instruction);
8487}
8488
8489void LocationsBuilderMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8490 // Nothing to do, this should be removed during prepare for register allocator.
8491 LOG(FATAL) << "Unreachable";
8492}
8493
8494void InstructionCodeGeneratorMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8495 // Nothing to do, this should be removed during prepare for register allocator.
8496 LOG(FATAL) << "Unreachable";
8497}
8498
8499void LocationsBuilderMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008500 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008501}
8502
8503void InstructionCodeGeneratorMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008504 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008505}
8506
8507void LocationsBuilderMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008508 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008509}
8510
8511void InstructionCodeGeneratorMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008512 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008513}
8514
8515void LocationsBuilderMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008516 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008517}
8518
8519void InstructionCodeGeneratorMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008520 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008521}
8522
8523void LocationsBuilderMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008524 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008525}
8526
8527void InstructionCodeGeneratorMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008528 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008529}
8530
8531void LocationsBuilderMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008532 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008533}
8534
8535void InstructionCodeGeneratorMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008536 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008537}
8538
8539void LocationsBuilderMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008540 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008541}
8542
8543void InstructionCodeGeneratorMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008544 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008545}
8546
8547void LocationsBuilderMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008548 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008549}
8550
8551void InstructionCodeGeneratorMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008552 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008553}
8554
8555void LocationsBuilderMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008556 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008557}
8558
8559void InstructionCodeGeneratorMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008560 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008561}
8562
8563void LocationsBuilderMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008564 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008565}
8566
8567void InstructionCodeGeneratorMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008568 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008569}
8570
8571void LocationsBuilderMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008572 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008573}
8574
8575void InstructionCodeGeneratorMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008576 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008577}
8578
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008579void LocationsBuilderMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8580 LocationSummary* locations =
8581 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
8582 locations->SetInAt(0, Location::RequiresRegister());
8583}
8584
Alexey Frunze96b66822016-09-10 02:32:44 -07008585void InstructionCodeGeneratorMIPS::GenPackedSwitchWithCompares(Register value_reg,
8586 int32_t lower_bound,
8587 uint32_t num_entries,
8588 HBasicBlock* switch_block,
8589 HBasicBlock* default_block) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008590 // Create a set of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008591 Register temp_reg = TMP;
8592 __ Addiu32(temp_reg, value_reg, -lower_bound);
8593 // Jump to default if index is negative
8594 // Note: We don't check the case that index is positive while value < lower_bound, because in
8595 // this case, index >= num_entries must be true. So that we can save one branch instruction.
8596 __ Bltz(temp_reg, codegen_->GetLabelOf(default_block));
8597
Alexey Frunze96b66822016-09-10 02:32:44 -07008598 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008599 // Jump to successors[0] if value == lower_bound.
8600 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[0]));
8601 int32_t last_index = 0;
8602 for (; num_entries - last_index > 2; last_index += 2) {
8603 __ Addiu(temp_reg, temp_reg, -2);
8604 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
8605 __ Bltz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
8606 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
8607 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
8608 }
8609 if (num_entries - last_index == 2) {
8610 // The last missing case_value.
8611 __ Addiu(temp_reg, temp_reg, -1);
8612 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008613 }
8614
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008615 // And the default for any other value.
Alexey Frunze96b66822016-09-10 02:32:44 -07008616 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008617 __ B(codegen_->GetLabelOf(default_block));
8618 }
8619}
8620
Alexey Frunze96b66822016-09-10 02:32:44 -07008621void InstructionCodeGeneratorMIPS::GenTableBasedPackedSwitch(Register value_reg,
8622 Register constant_area,
8623 int32_t lower_bound,
8624 uint32_t num_entries,
8625 HBasicBlock* switch_block,
8626 HBasicBlock* default_block) {
8627 // Create a jump table.
8628 std::vector<MipsLabel*> labels(num_entries);
8629 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
8630 for (uint32_t i = 0; i < num_entries; i++) {
8631 labels[i] = codegen_->GetLabelOf(successors[i]);
8632 }
8633 JumpTable* table = __ CreateJumpTable(std::move(labels));
8634
8635 // Is the value in range?
8636 __ Addiu32(TMP, value_reg, -lower_bound);
8637 if (IsInt<16>(static_cast<int32_t>(num_entries))) {
8638 __ Sltiu(AT, TMP, num_entries);
8639 __ Beqz(AT, codegen_->GetLabelOf(default_block));
8640 } else {
8641 __ LoadConst32(AT, num_entries);
8642 __ Bgeu(TMP, AT, codegen_->GetLabelOf(default_block));
8643 }
8644
8645 // We are in the range of the table.
8646 // Load the target address from the jump table, indexing by the value.
8647 __ LoadLabelAddress(AT, constant_area, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07008648 __ ShiftAndAdd(TMP, TMP, AT, 2, TMP);
Alexey Frunze96b66822016-09-10 02:32:44 -07008649 __ Lw(TMP, TMP, 0);
8650 // Compute the absolute target address by adding the table start address
8651 // (the table contains offsets to targets relative to its start).
8652 __ Addu(TMP, TMP, AT);
8653 // And jump.
8654 __ Jr(TMP);
8655 __ NopIfNoReordering();
8656}
8657
8658void InstructionCodeGeneratorMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8659 int32_t lower_bound = switch_instr->GetStartValue();
8660 uint32_t num_entries = switch_instr->GetNumEntries();
8661 LocationSummary* locations = switch_instr->GetLocations();
8662 Register value_reg = locations->InAt(0).AsRegister<Register>();
8663 HBasicBlock* switch_block = switch_instr->GetBlock();
8664 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
8665
8666 if (codegen_->GetInstructionSetFeatures().IsR6() &&
8667 num_entries > kPackedSwitchJumpTableThreshold) {
8668 // R6 uses PC-relative addressing to access the jump table.
8669 // R2, OTOH, requires an HMipsComputeBaseMethodAddress input to access
8670 // the jump table and it is implemented by changing HPackedSwitch to
8671 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress.
8672 // See VisitMipsPackedSwitch() for the table-based implementation on R2.
8673 GenTableBasedPackedSwitch(value_reg,
8674 ZERO,
8675 lower_bound,
8676 num_entries,
8677 switch_block,
8678 default_block);
8679 } else {
8680 GenPackedSwitchWithCompares(value_reg,
8681 lower_bound,
8682 num_entries,
8683 switch_block,
8684 default_block);
8685 }
8686}
8687
8688void LocationsBuilderMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
8689 LocationSummary* locations =
8690 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
8691 locations->SetInAt(0, Location::RequiresRegister());
8692 // Constant area pointer (HMipsComputeBaseMethodAddress).
8693 locations->SetInAt(1, Location::RequiresRegister());
8694}
8695
8696void InstructionCodeGeneratorMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
8697 int32_t lower_bound = switch_instr->GetStartValue();
8698 uint32_t num_entries = switch_instr->GetNumEntries();
8699 LocationSummary* locations = switch_instr->GetLocations();
8700 Register value_reg = locations->InAt(0).AsRegister<Register>();
8701 Register constant_area = locations->InAt(1).AsRegister<Register>();
8702 HBasicBlock* switch_block = switch_instr->GetBlock();
8703 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
8704
8705 // This is an R2-only path. HPackedSwitch has been changed to
8706 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress
8707 // required to address the jump table relative to PC.
8708 GenTableBasedPackedSwitch(value_reg,
8709 constant_area,
8710 lower_bound,
8711 num_entries,
8712 switch_block,
8713 default_block);
8714}
8715
Alexey Frunzee3fb2452016-05-10 16:08:05 -07008716void LocationsBuilderMIPS::VisitMipsComputeBaseMethodAddress(
8717 HMipsComputeBaseMethodAddress* insn) {
8718 LocationSummary* locations =
8719 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
8720 locations->SetOut(Location::RequiresRegister());
8721}
8722
8723void InstructionCodeGeneratorMIPS::VisitMipsComputeBaseMethodAddress(
8724 HMipsComputeBaseMethodAddress* insn) {
8725 LocationSummary* locations = insn->GetLocations();
8726 Register reg = locations->Out().AsRegister<Register>();
8727
8728 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
8729
8730 // Generate a dummy PC-relative call to obtain PC.
8731 __ Nal();
8732 // Grab the return address off RA.
8733 __ Move(reg, RA);
Alexey Frunze06a46c42016-07-19 15:00:40 -07008734 // TODO: Can we share this code with that of VisitMipsDexCacheArraysBase()?
Alexey Frunzee3fb2452016-05-10 16:08:05 -07008735
8736 // Remember this offset (the obtained PC value) for later use with constant area.
8737 __ BindPcRelBaseLabel();
8738}
8739
8740void LocationsBuilderMIPS::VisitMipsDexCacheArraysBase(HMipsDexCacheArraysBase* base) {
8741 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(base);
8742 locations->SetOut(Location::RequiresRegister());
8743}
8744
8745void InstructionCodeGeneratorMIPS::VisitMipsDexCacheArraysBase(HMipsDexCacheArraysBase* base) {
8746 Register reg = base->GetLocations()->Out().AsRegister<Register>();
8747 CodeGeneratorMIPS::PcRelativePatchInfo* info =
8748 codegen_->NewPcRelativeDexCacheArrayPatch(base->GetDexFile(), base->GetElementOffset());
Alexey Frunze6b892cd2017-01-03 17:11:38 -08008749 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
8750 bool reordering = __ SetReorder(false);
Vladimir Markoaad75c62016-10-03 08:46:48 +00008751 // TODO: Reuse MipsComputeBaseMethodAddress on R2 instead of passing ZERO to force emitting NAL.
Alexey Frunze6b892cd2017-01-03 17:11:38 -08008752 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, reg, ZERO);
8753 __ Addiu(reg, reg, /* placeholder */ 0x5678);
8754 __ SetReorder(reordering);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07008755}
8756
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008757void LocationsBuilderMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
8758 // The trampoline uses the same calling convention as dex calling conventions,
8759 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
8760 // the method_idx.
8761 HandleInvoke(invoke);
8762}
8763
8764void InstructionCodeGeneratorMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
8765 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
8766}
8767
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008768void LocationsBuilderMIPS::VisitClassTableGet(HClassTableGet* instruction) {
8769 LocationSummary* locations =
8770 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
8771 locations->SetInAt(0, Location::RequiresRegister());
8772 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00008773}
8774
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008775void InstructionCodeGeneratorMIPS::VisitClassTableGet(HClassTableGet* instruction) {
8776 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00008777 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008778 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008779 instruction->GetIndex(), kMipsPointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008780 __ LoadFromOffset(kLoadWord,
8781 locations->Out().AsRegister<Register>(),
8782 locations->InAt(0).AsRegister<Register>(),
8783 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008784 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008785 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00008786 instruction->GetIndex(), kMipsPointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00008787 __ LoadFromOffset(kLoadWord,
8788 locations->Out().AsRegister<Register>(),
8789 locations->InAt(0).AsRegister<Register>(),
8790 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008791 __ LoadFromOffset(kLoadWord,
8792 locations->Out().AsRegister<Register>(),
8793 locations->Out().AsRegister<Register>(),
8794 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008795 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00008796}
8797
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008798#undef __
8799#undef QUICK_ENTRY_POINT
8800
8801} // namespace mips
8802} // namespace art