blob: 4c4d97bc8d1c31275fa9e169a70afd7beb5bec97 [file] [log] [blame]
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips.h"
18
19#include "arch/mips/entrypoints_direct_mips.h"
20#include "arch/mips/instruction_set_features_mips.h"
21#include "art_method.h"
Chris Larsen701566a2015-10-27 15:29:13 -070022#include "code_generator_utils.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010023#include "compiled_method.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
27#include "intrinsics.h"
Chris Larsen701566a2015-10-27 15:29:13 -070028#include "intrinsics_mips.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020029#include "mirror/array-inl.h"
30#include "mirror/class-inl.h"
31#include "offsets.h"
32#include "thread.h"
33#include "utils/assembler.h"
34#include "utils/mips/assembler_mips.h"
35#include "utils/stack_checks.h"
36
37namespace art {
38namespace mips {
39
40static constexpr int kCurrentMethodStackOffset = 0;
41static constexpr Register kMethodRegisterArgument = A0;
42
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020043Location MipsReturnLocation(Primitive::Type return_type) {
44 switch (return_type) {
45 case Primitive::kPrimBoolean:
46 case Primitive::kPrimByte:
47 case Primitive::kPrimChar:
48 case Primitive::kPrimShort:
49 case Primitive::kPrimInt:
50 case Primitive::kPrimNot:
51 return Location::RegisterLocation(V0);
52
53 case Primitive::kPrimLong:
54 return Location::RegisterPairLocation(V0, V1);
55
56 case Primitive::kPrimFloat:
57 case Primitive::kPrimDouble:
58 return Location::FpuRegisterLocation(F0);
59
60 case Primitive::kPrimVoid:
61 return Location();
62 }
63 UNREACHABLE();
64}
65
66Location InvokeDexCallingConventionVisitorMIPS::GetReturnLocation(Primitive::Type type) const {
67 return MipsReturnLocation(type);
68}
69
70Location InvokeDexCallingConventionVisitorMIPS::GetMethodLocation() const {
71 return Location::RegisterLocation(kMethodRegisterArgument);
72}
73
74Location InvokeDexCallingConventionVisitorMIPS::GetNextLocation(Primitive::Type type) {
75 Location next_location;
76
77 switch (type) {
78 case Primitive::kPrimBoolean:
79 case Primitive::kPrimByte:
80 case Primitive::kPrimChar:
81 case Primitive::kPrimShort:
82 case Primitive::kPrimInt:
83 case Primitive::kPrimNot: {
84 uint32_t gp_index = gp_index_++;
85 if (gp_index < calling_convention.GetNumberOfRegisters()) {
86 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index));
87 } else {
88 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
89 next_location = Location::StackSlot(stack_offset);
90 }
91 break;
92 }
93
94 case Primitive::kPrimLong: {
95 uint32_t gp_index = gp_index_;
96 gp_index_ += 2;
97 if (gp_index + 1 < calling_convention.GetNumberOfRegisters()) {
Alexey Frunze1b8464d2016-11-12 17:22:05 -080098 Register reg = calling_convention.GetRegisterAt(gp_index);
99 if (reg == A1 || reg == A3) {
100 gp_index_++; // Skip A1(A3), and use A2_A3(T0_T1) instead.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200101 gp_index++;
102 }
103 Register low_even = calling_convention.GetRegisterAt(gp_index);
104 Register high_odd = calling_convention.GetRegisterAt(gp_index + 1);
105 DCHECK_EQ(low_even + 1, high_odd);
106 next_location = Location::RegisterPairLocation(low_even, high_odd);
107 } else {
108 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
109 next_location = Location::DoubleStackSlot(stack_offset);
110 }
111 break;
112 }
113
114 // Note: both float and double types are stored in even FPU registers. On 32 bit FPU, double
115 // will take up the even/odd pair, while floats are stored in even regs only.
116 // On 64 bit FPU, both double and float are stored in even registers only.
117 case Primitive::kPrimFloat:
118 case Primitive::kPrimDouble: {
119 uint32_t float_index = float_index_++;
120 if (float_index < calling_convention.GetNumberOfFpuRegisters()) {
121 next_location = Location::FpuRegisterLocation(
122 calling_convention.GetFpuRegisterAt(float_index));
123 } else {
124 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
125 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
126 : Location::StackSlot(stack_offset);
127 }
128 break;
129 }
130
131 case Primitive::kPrimVoid:
132 LOG(FATAL) << "Unexpected parameter type " << type;
133 break;
134 }
135
136 // Space on the stack is reserved for all arguments.
137 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
138
139 return next_location;
140}
141
142Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
143 return MipsReturnLocation(type);
144}
145
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100146// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
147#define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700148#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200149
150class BoundsCheckSlowPathMIPS : public SlowPathCodeMIPS {
151 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000152 explicit BoundsCheckSlowPathMIPS(HBoundsCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200153
154 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
155 LocationSummary* locations = instruction_->GetLocations();
156 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
157 __ Bind(GetEntryLabel());
158 if (instruction_->CanThrowIntoCatchBlock()) {
159 // Live registers will be restored in the catch block if caught.
160 SaveLiveRegisters(codegen, instruction_->GetLocations());
161 }
162 // We're moving two locations to locations that could overlap, so we need a parallel
163 // move resolver.
164 InvokeRuntimeCallingConvention calling_convention;
165 codegen->EmitParallelMoves(locations->InAt(0),
166 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
167 Primitive::kPrimInt,
168 locations->InAt(1),
169 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
170 Primitive::kPrimInt);
Serban Constantinescufca16662016-07-14 09:21:59 +0100171 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
172 ? kQuickThrowStringBounds
173 : kQuickThrowArrayBounds;
174 mips_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100175 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200176 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
177 }
178
179 bool IsFatal() const OVERRIDE { return true; }
180
181 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS"; }
182
183 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200184 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS);
185};
186
187class DivZeroCheckSlowPathMIPS : public SlowPathCodeMIPS {
188 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000189 explicit DivZeroCheckSlowPathMIPS(HDivZeroCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200190
191 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
192 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
193 __ Bind(GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +0100194 mips_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200195 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
196 }
197
198 bool IsFatal() const OVERRIDE { return true; }
199
200 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS"; }
201
202 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200203 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS);
204};
205
206class LoadClassSlowPathMIPS : public SlowPathCodeMIPS {
207 public:
208 LoadClassSlowPathMIPS(HLoadClass* cls,
209 HInstruction* at,
210 uint32_t dex_pc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700211 bool do_clinit,
212 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high = nullptr)
213 : SlowPathCodeMIPS(at),
214 cls_(cls),
215 dex_pc_(dex_pc),
216 do_clinit_(do_clinit),
217 bss_info_high_(bss_info_high) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200218 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
219 }
220
221 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000222 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700223 Location out = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200224 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700225 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700226 InvokeRuntimeCallingConvention calling_convention;
227 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
228 const bool is_load_class_bss_entry =
229 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200230 __ Bind(GetEntryLabel());
231 SaveLiveRegisters(codegen, locations);
232
Alexey Frunzec61c0762017-04-10 13:54:23 -0700233 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
234 Register entry_address = kNoRegister;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700235 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700236 Register temp = locations->GetTemp(0).AsRegister<Register>();
237 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
238 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
239 // kSaveEverything call.
240 entry_address = temp_is_a0 ? out.AsRegister<Register>() : temp;
241 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
242 if (temp_is_a0) {
243 __ Move(entry_address, temp);
244 }
245 }
246
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000247 dex::TypeIndex type_index = cls_->GetTypeIndex();
248 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100249 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
250 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000251 mips_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200252 if (do_clinit_) {
253 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
254 } else {
255 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
256 }
257
Alexey Frunzec61c0762017-04-10 13:54:23 -0700258 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700259 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700260 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700261 DCHECK(bss_info_high_);
262 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
263 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, bss_info_high_);
264 bool reordering = __ SetReorder(false);
265 __ Bind(&info_low->label);
266 __ StoreToOffset(kStoreWord,
267 calling_convention.GetRegisterAt(0),
268 entry_address,
269 /* placeholder */ 0x5678);
270 __ SetReorder(reordering);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700271 }
272
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200273 // Move the class to the desired location.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200274 if (out.IsValid()) {
275 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000276 Primitive::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700277 mips_codegen->MoveLocation(out,
278 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
279 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200280 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200281 RestoreLiveRegisters(codegen, locations);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700282
283 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700284 if (is_load_class_bss_entry && !baker_or_no_read_barriers) {
285 // For non-Baker read barriers we need to re-calculate the address of
Alexey Frunzec61c0762017-04-10 13:54:23 -0700286 // the class entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700287 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000288 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700289 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000290 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700291 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
292 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, info_high);
Alexey Frunze6b892cd2017-01-03 17:11:38 -0800293 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700294 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base, info_low);
Alexey Frunze6b892cd2017-01-03 17:11:38 -0800295 __ StoreToOffset(kStoreWord, out.AsRegister<Register>(), TMP, /* placeholder */ 0x5678);
296 __ SetReorder(reordering);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000297 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200298 __ B(GetExitLabel());
299 }
300
301 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS"; }
302
303 private:
304 // The class this slow path will load.
305 HLoadClass* const cls_;
306
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200307 // The dex PC of `at_`.
308 const uint32_t dex_pc_;
309
310 // Whether to initialize the class.
311 const bool do_clinit_;
312
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700313 // Pointer to the high half PC-relative patch info for HLoadClass/kBssEntry.
314 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high_;
315
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200316 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS);
317};
318
319class LoadStringSlowPathMIPS : public SlowPathCodeMIPS {
320 public:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700321 explicit LoadStringSlowPathMIPS(HLoadString* instruction,
322 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high)
323 : SlowPathCodeMIPS(instruction), bss_info_high_(bss_info_high) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200324
325 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700326 DCHECK(instruction_->IsLoadString());
327 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200328 LocationSummary* locations = instruction_->GetLocations();
329 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunzec61c0762017-04-10 13:54:23 -0700330 HLoadString* load = instruction_->AsLoadString();
331 const dex::StringIndex string_index = load->GetStringIndex();
332 Register out = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200333 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700334 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700335 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200336 __ Bind(GetEntryLabel());
337 SaveLiveRegisters(codegen, locations);
338
Alexey Frunzec61c0762017-04-10 13:54:23 -0700339 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
340 Register entry_address = kNoRegister;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700341 if (baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700342 Register temp = locations->GetTemp(0).AsRegister<Register>();
343 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
344 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
345 // kSaveEverything call.
346 entry_address = temp_is_a0 ? out : temp;
347 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
348 if (temp_is_a0) {
349 __ Move(entry_address, temp);
350 }
351 }
352
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000353 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100354 mips_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200355 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700356
357 // Store the resolved string to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700358 if (baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700359 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700360 DCHECK(bss_info_high_);
361 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
362 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, bss_info_high_);
363 bool reordering = __ SetReorder(false);
364 __ Bind(&info_low->label);
365 __ StoreToOffset(kStoreWord,
366 calling_convention.GetRegisterAt(0),
367 entry_address,
368 /* placeholder */ 0x5678);
369 __ SetReorder(reordering);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700370 }
371
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200372 Primitive::Type type = instruction_->GetType();
373 mips_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700374 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200375 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200376 RestoreLiveRegisters(codegen, locations);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000377
Alexey Frunzec61c0762017-04-10 13:54:23 -0700378 // Store the resolved string to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700379 if (!baker_or_no_read_barriers) {
380 // For non-Baker read barriers we need to re-calculate the address of
Alexey Frunzec61c0762017-04-10 13:54:23 -0700381 // the string entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700382 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700383 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700384 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Alexey Frunzec61c0762017-04-10 13:54:23 -0700385 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700386 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
387 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700388 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700389 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base, info_low);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700390 __ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
391 __ SetReorder(reordering);
392 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200393 __ B(GetExitLabel());
394 }
395
396 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS"; }
397
398 private:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700399 // Pointer to the high half PC-relative patch info.
400 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high_;
401
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200402 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS);
403};
404
405class NullCheckSlowPathMIPS : public SlowPathCodeMIPS {
406 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000407 explicit NullCheckSlowPathMIPS(HNullCheck* instr) : SlowPathCodeMIPS(instr) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200408
409 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
410 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
411 __ Bind(GetEntryLabel());
412 if (instruction_->CanThrowIntoCatchBlock()) {
413 // Live registers will be restored in the catch block if caught.
414 SaveLiveRegisters(codegen, instruction_->GetLocations());
415 }
Serban Constantinescufca16662016-07-14 09:21:59 +0100416 mips_codegen->InvokeRuntime(kQuickThrowNullPointer,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200417 instruction_,
418 instruction_->GetDexPc(),
Serban Constantinescufca16662016-07-14 09:21:59 +0100419 this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200420 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
421 }
422
423 bool IsFatal() const OVERRIDE { return true; }
424
425 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS"; }
426
427 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200428 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS);
429};
430
431class SuspendCheckSlowPathMIPS : public SlowPathCodeMIPS {
432 public:
433 SuspendCheckSlowPathMIPS(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000434 : SlowPathCodeMIPS(instruction), successor_(successor) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200435
436 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
437 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
438 __ Bind(GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +0100439 mips_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200440 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200441 if (successor_ == nullptr) {
442 __ B(GetReturnLabel());
443 } else {
444 __ B(mips_codegen->GetLabelOf(successor_));
445 }
446 }
447
448 MipsLabel* GetReturnLabel() {
449 DCHECK(successor_ == nullptr);
450 return &return_label_;
451 }
452
453 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS"; }
454
455 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200456 // If not null, the block to branch to after the suspend check.
457 HBasicBlock* const successor_;
458
459 // If `successor_` is null, the label to branch to after the suspend check.
460 MipsLabel return_label_;
461
462 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS);
463};
464
465class TypeCheckSlowPathMIPS : public SlowPathCodeMIPS {
466 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800467 explicit TypeCheckSlowPathMIPS(HInstruction* instruction, bool is_fatal)
468 : SlowPathCodeMIPS(instruction), is_fatal_(is_fatal) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200469
470 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
471 LocationSummary* locations = instruction_->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200472 uint32_t dex_pc = instruction_->GetDexPc();
473 DCHECK(instruction_->IsCheckCast()
474 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
475 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
476
477 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800478 if (!is_fatal_) {
479 SaveLiveRegisters(codegen, locations);
480 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200481
482 // We're moving two locations to locations that could overlap, so we need a parallel
483 // move resolver.
484 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800485 codegen->EmitParallelMoves(locations->InAt(0),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200486 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
487 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800488 locations->InAt(1),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200489 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
490 Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200491 if (instruction_->IsInstanceOf()) {
Serban Constantinescufca16662016-07-14 09:21:59 +0100492 mips_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800493 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200494 Primitive::Type ret_type = instruction_->GetType();
495 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
496 mips_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200497 } else {
498 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800499 mips_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
500 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200501 }
502
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800503 if (!is_fatal_) {
504 RestoreLiveRegisters(codegen, locations);
505 __ B(GetExitLabel());
506 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200507 }
508
509 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS"; }
510
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800511 bool IsFatal() const OVERRIDE { return is_fatal_; }
512
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200513 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800514 const bool is_fatal_;
515
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200516 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS);
517};
518
519class DeoptimizationSlowPathMIPS : public SlowPathCodeMIPS {
520 public:
Aart Bik42249c32016-01-07 15:33:50 -0800521 explicit DeoptimizationSlowPathMIPS(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000522 : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200523
524 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800525 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200526 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100527 LocationSummary* locations = instruction_->GetLocations();
528 SaveLiveRegisters(codegen, locations);
529 InvokeRuntimeCallingConvention calling_convention;
530 __ LoadConst32(calling_convention.GetRegisterAt(0),
531 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufca16662016-07-14 09:21:59 +0100532 mips_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100533 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200534 }
535
536 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS"; }
537
538 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200539 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS);
540};
541
Alexey Frunze15958152017-02-09 19:08:30 -0800542class ArraySetSlowPathMIPS : public SlowPathCodeMIPS {
543 public:
544 explicit ArraySetSlowPathMIPS(HInstruction* instruction) : SlowPathCodeMIPS(instruction) {}
545
546 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
547 LocationSummary* locations = instruction_->GetLocations();
548 __ Bind(GetEntryLabel());
549 SaveLiveRegisters(codegen, locations);
550
551 InvokeRuntimeCallingConvention calling_convention;
552 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
553 parallel_move.AddMove(
554 locations->InAt(0),
555 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
556 Primitive::kPrimNot,
557 nullptr);
558 parallel_move.AddMove(
559 locations->InAt(1),
560 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
561 Primitive::kPrimInt,
562 nullptr);
563 parallel_move.AddMove(
564 locations->InAt(2),
565 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
566 Primitive::kPrimNot,
567 nullptr);
568 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
569
570 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
571 mips_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
572 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
573 RestoreLiveRegisters(codegen, locations);
574 __ B(GetExitLabel());
575 }
576
577 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS"; }
578
579 private:
580 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS);
581};
582
583// Slow path marking an object reference `ref` during a read
584// barrier. The field `obj.field` in the object `obj` holding this
585// reference does not get updated by this slow path after marking (see
586// ReadBarrierMarkAndUpdateFieldSlowPathMIPS below for that).
587//
588// This means that after the execution of this slow path, `ref` will
589// always be up-to-date, but `obj.field` may not; i.e., after the
590// flip, `ref` will be a to-space reference, but `obj.field` will
591// probably still be a from-space reference (unless it gets updated by
592// another thread, or if another thread installed another object
593// reference (different from `ref`) in `obj.field`).
594//
595// If `entrypoint` is a valid location it is assumed to already be
596// holding the entrypoint. The case where the entrypoint is passed in
597// is for the GcRoot read barrier.
598class ReadBarrierMarkSlowPathMIPS : public SlowPathCodeMIPS {
599 public:
600 ReadBarrierMarkSlowPathMIPS(HInstruction* instruction,
601 Location ref,
602 Location entrypoint = Location::NoLocation())
603 : SlowPathCodeMIPS(instruction), ref_(ref), entrypoint_(entrypoint) {
604 DCHECK(kEmitCompilerReadBarrier);
605 }
606
607 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
608
609 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
610 LocationSummary* locations = instruction_->GetLocations();
611 Register ref_reg = ref_.AsRegister<Register>();
612 DCHECK(locations->CanCall());
613 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
614 DCHECK(instruction_->IsInstanceFieldGet() ||
615 instruction_->IsStaticFieldGet() ||
616 instruction_->IsArrayGet() ||
617 instruction_->IsArraySet() ||
618 instruction_->IsLoadClass() ||
619 instruction_->IsLoadString() ||
620 instruction_->IsInstanceOf() ||
621 instruction_->IsCheckCast() ||
622 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
623 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
624 << "Unexpected instruction in read barrier marking slow path: "
625 << instruction_->DebugName();
626
627 __ Bind(GetEntryLabel());
628 // No need to save live registers; it's taken care of by the
629 // entrypoint. Also, there is no need to update the stack mask,
630 // as this runtime call will not trigger a garbage collection.
631 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
632 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
633 (S2 <= ref_reg && ref_reg <= S7) ||
634 (ref_reg == FP)) << ref_reg;
635 // "Compact" slow path, saving two moves.
636 //
637 // Instead of using the standard runtime calling convention (input
638 // and output in A0 and V0 respectively):
639 //
640 // A0 <- ref
641 // V0 <- ReadBarrierMark(A0)
642 // ref <- V0
643 //
644 // we just use rX (the register containing `ref`) as input and output
645 // of a dedicated entrypoint:
646 //
647 // rX <- ReadBarrierMarkRegX(rX)
648 //
649 if (entrypoint_.IsValid()) {
650 mips_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
651 DCHECK_EQ(entrypoint_.AsRegister<Register>(), T9);
652 __ Jalr(entrypoint_.AsRegister<Register>());
653 __ NopIfNoReordering();
654 } else {
655 int32_t entry_point_offset =
656 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
657 // This runtime call does not require a stack map.
658 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
659 instruction_,
660 this,
661 /* direct */ false);
662 }
663 __ B(GetExitLabel());
664 }
665
666 private:
667 // The location (register) of the marked object reference.
668 const Location ref_;
669
670 // The location of the entrypoint if already loaded.
671 const Location entrypoint_;
672
673 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS);
674};
675
676// Slow path marking an object reference `ref` during a read barrier,
677// and if needed, atomically updating the field `obj.field` in the
678// object `obj` holding this reference after marking (contrary to
679// ReadBarrierMarkSlowPathMIPS above, which never tries to update
680// `obj.field`).
681//
682// This means that after the execution of this slow path, both `ref`
683// and `obj.field` will be up-to-date; i.e., after the flip, both will
684// hold the same to-space reference (unless another thread installed
685// another object reference (different from `ref`) in `obj.field`).
686class ReadBarrierMarkAndUpdateFieldSlowPathMIPS : public SlowPathCodeMIPS {
687 public:
688 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(HInstruction* instruction,
689 Location ref,
690 Register obj,
691 Location field_offset,
692 Register temp1)
693 : SlowPathCodeMIPS(instruction),
694 ref_(ref),
695 obj_(obj),
696 field_offset_(field_offset),
697 temp1_(temp1) {
698 DCHECK(kEmitCompilerReadBarrier);
699 }
700
701 const char* GetDescription() const OVERRIDE {
702 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS";
703 }
704
705 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
706 LocationSummary* locations = instruction_->GetLocations();
707 Register ref_reg = ref_.AsRegister<Register>();
708 DCHECK(locations->CanCall());
709 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
710 // This slow path is only used by the UnsafeCASObject intrinsic.
711 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
712 << "Unexpected instruction in read barrier marking and field updating slow path: "
713 << instruction_->DebugName();
714 DCHECK(instruction_->GetLocations()->Intrinsified());
715 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
716 DCHECK(field_offset_.IsRegisterPair()) << field_offset_;
717
718 __ Bind(GetEntryLabel());
719
720 // Save the old reference.
721 // Note that we cannot use AT or TMP to save the old reference, as those
722 // are used by the code that follows, but we need the old reference after
723 // the call to the ReadBarrierMarkRegX entry point.
724 DCHECK_NE(temp1_, AT);
725 DCHECK_NE(temp1_, TMP);
726 __ Move(temp1_, ref_reg);
727
728 // No need to save live registers; it's taken care of by the
729 // entrypoint. Also, there is no need to update the stack mask,
730 // as this runtime call will not trigger a garbage collection.
731 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
732 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
733 (S2 <= ref_reg && ref_reg <= S7) ||
734 (ref_reg == FP)) << ref_reg;
735 // "Compact" slow path, saving two moves.
736 //
737 // Instead of using the standard runtime calling convention (input
738 // and output in A0 and V0 respectively):
739 //
740 // A0 <- ref
741 // V0 <- ReadBarrierMark(A0)
742 // ref <- V0
743 //
744 // we just use rX (the register containing `ref`) as input and output
745 // of a dedicated entrypoint:
746 //
747 // rX <- ReadBarrierMarkRegX(rX)
748 //
749 int32_t entry_point_offset =
750 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
751 // This runtime call does not require a stack map.
752 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
753 instruction_,
754 this,
755 /* direct */ false);
756
757 // If the new reference is different from the old reference,
758 // update the field in the holder (`*(obj_ + field_offset_)`).
759 //
760 // Note that this field could also hold a different object, if
761 // another thread had concurrently changed it. In that case, the
762 // the compare-and-set (CAS) loop below would abort, leaving the
763 // field as-is.
764 MipsLabel done;
765 __ Beq(temp1_, ref_reg, &done);
766
767 // Update the the holder's field atomically. This may fail if
768 // mutator updates before us, but it's OK. This is achieved
769 // using a strong compare-and-set (CAS) operation with relaxed
770 // memory synchronization ordering, where the expected value is
771 // the old reference and the desired value is the new reference.
772
773 // Convenience aliases.
774 Register base = obj_;
775 // The UnsafeCASObject intrinsic uses a register pair as field
776 // offset ("long offset"), of which only the low part contains
777 // data.
778 Register offset = field_offset_.AsRegisterPairLow<Register>();
779 Register expected = temp1_;
780 Register value = ref_reg;
781 Register tmp_ptr = TMP; // Pointer to actual memory.
782 Register tmp = AT; // Value in memory.
783
784 __ Addu(tmp_ptr, base, offset);
785
786 if (kPoisonHeapReferences) {
787 __ PoisonHeapReference(expected);
788 // Do not poison `value` if it is the same register as
789 // `expected`, which has just been poisoned.
790 if (value != expected) {
791 __ PoisonHeapReference(value);
792 }
793 }
794
795 // do {
796 // tmp = [r_ptr] - expected;
797 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
798
799 bool is_r6 = mips_codegen->GetInstructionSetFeatures().IsR6();
800 MipsLabel loop_head, exit_loop;
801 __ Bind(&loop_head);
802 if (is_r6) {
803 __ LlR6(tmp, tmp_ptr);
804 } else {
805 __ LlR2(tmp, tmp_ptr);
806 }
807 __ Bne(tmp, expected, &exit_loop);
808 __ Move(tmp, value);
809 if (is_r6) {
810 __ ScR6(tmp, tmp_ptr);
811 } else {
812 __ ScR2(tmp, tmp_ptr);
813 }
814 __ Beqz(tmp, &loop_head);
815 __ Bind(&exit_loop);
816
817 if (kPoisonHeapReferences) {
818 __ UnpoisonHeapReference(expected);
819 // Do not unpoison `value` if it is the same register as
820 // `expected`, which has just been unpoisoned.
821 if (value != expected) {
822 __ UnpoisonHeapReference(value);
823 }
824 }
825
826 __ Bind(&done);
827 __ B(GetExitLabel());
828 }
829
830 private:
831 // The location (register) of the marked object reference.
832 const Location ref_;
833 // The register containing the object holding the marked object reference field.
834 const Register obj_;
835 // The location of the offset of the marked reference field within `obj_`.
836 Location field_offset_;
837
838 const Register temp1_;
839
840 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS);
841};
842
843// Slow path generating a read barrier for a heap reference.
844class ReadBarrierForHeapReferenceSlowPathMIPS : public SlowPathCodeMIPS {
845 public:
846 ReadBarrierForHeapReferenceSlowPathMIPS(HInstruction* instruction,
847 Location out,
848 Location ref,
849 Location obj,
850 uint32_t offset,
851 Location index)
852 : SlowPathCodeMIPS(instruction),
853 out_(out),
854 ref_(ref),
855 obj_(obj),
856 offset_(offset),
857 index_(index) {
858 DCHECK(kEmitCompilerReadBarrier);
859 // If `obj` is equal to `out` or `ref`, it means the initial object
860 // has been overwritten by (or after) the heap object reference load
861 // to be instrumented, e.g.:
862 //
863 // __ LoadFromOffset(kLoadWord, out, out, offset);
864 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
865 //
866 // In that case, we have lost the information about the original
867 // object, and the emitted read barrier cannot work properly.
868 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
869 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
870 }
871
872 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
873 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
874 LocationSummary* locations = instruction_->GetLocations();
875 Register reg_out = out_.AsRegister<Register>();
876 DCHECK(locations->CanCall());
877 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
878 DCHECK(instruction_->IsInstanceFieldGet() ||
879 instruction_->IsStaticFieldGet() ||
880 instruction_->IsArrayGet() ||
881 instruction_->IsInstanceOf() ||
882 instruction_->IsCheckCast() ||
883 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
884 << "Unexpected instruction in read barrier for heap reference slow path: "
885 << instruction_->DebugName();
886
887 __ Bind(GetEntryLabel());
888 SaveLiveRegisters(codegen, locations);
889
890 // We may have to change the index's value, but as `index_` is a
891 // constant member (like other "inputs" of this slow path),
892 // introduce a copy of it, `index`.
893 Location index = index_;
894 if (index_.IsValid()) {
895 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
896 if (instruction_->IsArrayGet()) {
897 // Compute the actual memory offset and store it in `index`.
898 Register index_reg = index_.AsRegister<Register>();
899 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
900 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
901 // We are about to change the value of `index_reg` (see the
902 // calls to art::mips::MipsAssembler::Sll and
903 // art::mips::MipsAssembler::Addiu32 below), but it has
904 // not been saved by the previous call to
905 // art::SlowPathCode::SaveLiveRegisters, as it is a
906 // callee-save register --
907 // art::SlowPathCode::SaveLiveRegisters does not consider
908 // callee-save registers, as it has been designed with the
909 // assumption that callee-save registers are supposed to be
910 // handled by the called function. So, as a callee-save
911 // register, `index_reg` _would_ eventually be saved onto
912 // the stack, but it would be too late: we would have
913 // changed its value earlier. Therefore, we manually save
914 // it here into another freely available register,
915 // `free_reg`, chosen of course among the caller-save
916 // registers (as a callee-save `free_reg` register would
917 // exhibit the same problem).
918 //
919 // Note we could have requested a temporary register from
920 // the register allocator instead; but we prefer not to, as
921 // this is a slow path, and we know we can find a
922 // caller-save register that is available.
923 Register free_reg = FindAvailableCallerSaveRegister(codegen);
924 __ Move(free_reg, index_reg);
925 index_reg = free_reg;
926 index = Location::RegisterLocation(index_reg);
927 } else {
928 // The initial register stored in `index_` has already been
929 // saved in the call to art::SlowPathCode::SaveLiveRegisters
930 // (as it is not a callee-save register), so we can freely
931 // use it.
932 }
933 // Shifting the index value contained in `index_reg` by the scale
934 // factor (2) cannot overflow in practice, as the runtime is
935 // unable to allocate object arrays with a size larger than
936 // 2^26 - 1 (that is, 2^28 - 4 bytes).
937 __ Sll(index_reg, index_reg, TIMES_4);
938 static_assert(
939 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
940 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
941 __ Addiu32(index_reg, index_reg, offset_);
942 } else {
943 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
944 // intrinsics, `index_` is not shifted by a scale factor of 2
945 // (as in the case of ArrayGet), as it is actually an offset
946 // to an object field within an object.
947 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
948 DCHECK(instruction_->GetLocations()->Intrinsified());
949 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
950 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
951 << instruction_->AsInvoke()->GetIntrinsic();
952 DCHECK_EQ(offset_, 0U);
953 DCHECK(index_.IsRegisterPair());
954 // UnsafeGet's offset location is a register pair, the low
955 // part contains the correct offset.
956 index = index_.ToLow();
957 }
958 }
959
960 // We're moving two or three locations to locations that could
961 // overlap, so we need a parallel move resolver.
962 InvokeRuntimeCallingConvention calling_convention;
963 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
964 parallel_move.AddMove(ref_,
965 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
966 Primitive::kPrimNot,
967 nullptr);
968 parallel_move.AddMove(obj_,
969 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
970 Primitive::kPrimNot,
971 nullptr);
972 if (index.IsValid()) {
973 parallel_move.AddMove(index,
974 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
975 Primitive::kPrimInt,
976 nullptr);
977 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
978 } else {
979 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
980 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
981 }
982 mips_codegen->InvokeRuntime(kQuickReadBarrierSlow,
983 instruction_,
984 instruction_->GetDexPc(),
985 this);
986 CheckEntrypointTypes<
987 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
Lena Djokic8098da92017-06-28 12:07:50 +0200988 mips_codegen->MoveLocation(out_,
989 calling_convention.GetReturnLocation(Primitive::kPrimNot),
990 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -0800991
992 RestoreLiveRegisters(codegen, locations);
993 __ B(GetExitLabel());
994 }
995
996 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathMIPS"; }
997
998 private:
999 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
1000 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
1001 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
1002 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1003 if (i != ref &&
1004 i != obj &&
1005 !codegen->IsCoreCalleeSaveRegister(i) &&
1006 !codegen->IsBlockedCoreRegister(i)) {
1007 return static_cast<Register>(i);
1008 }
1009 }
1010 // We shall never fail to find a free caller-save register, as
1011 // there are more than two core caller-save registers on MIPS
1012 // (meaning it is possible to find one which is different from
1013 // `ref` and `obj`).
1014 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1015 LOG(FATAL) << "Could not find a free caller-save register";
1016 UNREACHABLE();
1017 }
1018
1019 const Location out_;
1020 const Location ref_;
1021 const Location obj_;
1022 const uint32_t offset_;
1023 // An additional location containing an index to an array.
1024 // Only used for HArrayGet and the UnsafeGetObject &
1025 // UnsafeGetObjectVolatile intrinsics.
1026 const Location index_;
1027
1028 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS);
1029};
1030
1031// Slow path generating a read barrier for a GC root.
1032class ReadBarrierForRootSlowPathMIPS : public SlowPathCodeMIPS {
1033 public:
1034 ReadBarrierForRootSlowPathMIPS(HInstruction* instruction, Location out, Location root)
1035 : SlowPathCodeMIPS(instruction), out_(out), root_(root) {
1036 DCHECK(kEmitCompilerReadBarrier);
1037 }
1038
1039 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1040 LocationSummary* locations = instruction_->GetLocations();
1041 Register reg_out = out_.AsRegister<Register>();
1042 DCHECK(locations->CanCall());
1043 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
1044 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1045 << "Unexpected instruction in read barrier for GC root slow path: "
1046 << instruction_->DebugName();
1047
1048 __ Bind(GetEntryLabel());
1049 SaveLiveRegisters(codegen, locations);
1050
1051 InvokeRuntimeCallingConvention calling_convention;
1052 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Lena Djokic8098da92017-06-28 12:07:50 +02001053 mips_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
1054 root_,
1055 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -08001056 mips_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
1057 instruction_,
1058 instruction_->GetDexPc(),
1059 this);
1060 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
Lena Djokic8098da92017-06-28 12:07:50 +02001061 mips_codegen->MoveLocation(out_,
1062 calling_convention.GetReturnLocation(Primitive::kPrimNot),
1063 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -08001064
1065 RestoreLiveRegisters(codegen, locations);
1066 __ B(GetExitLabel());
1067 }
1068
1069 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS"; }
1070
1071 private:
1072 const Location out_;
1073 const Location root_;
1074
1075 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS);
1076};
1077
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001078CodeGeneratorMIPS::CodeGeneratorMIPS(HGraph* graph,
1079 const MipsInstructionSetFeatures& isa_features,
1080 const CompilerOptions& compiler_options,
1081 OptimizingCompilerStats* stats)
1082 : CodeGenerator(graph,
1083 kNumberOfCoreRegisters,
1084 kNumberOfFRegisters,
1085 kNumberOfRegisterPairs,
1086 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1087 arraysize(kCoreCalleeSaves)),
1088 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1089 arraysize(kFpuCalleeSaves)),
1090 compiler_options,
1091 stats),
1092 block_labels_(nullptr),
1093 location_builder_(graph, this),
1094 instruction_visitor_(graph, this),
1095 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001096 assembler_(graph->GetArena(), &isa_features),
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001097 isa_features_(isa_features),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001098 uint32_literals_(std::less<uint32_t>(),
1099 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001100 pc_relative_method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001101 method_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001102 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001103 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001104 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001105 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1106 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001107 clobbered_ra_(false) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001108 // Save RA (containing the return address) to mimic Quick.
1109 AddAllocatedRegister(Location::RegisterLocation(RA));
1110}
1111
1112#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001113// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1114#define __ down_cast<MipsAssembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001115#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001116
1117void CodeGeneratorMIPS::Finalize(CodeAllocator* allocator) {
1118 // Ensure that we fix up branches.
1119 __ FinalizeCode();
1120
1121 // Adjust native pc offsets in stack maps.
1122 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001123 uint32_t old_position =
1124 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001125 uint32_t new_position = __ GetAdjustedPosition(old_position);
1126 DCHECK_GE(new_position, old_position);
1127 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
1128 }
1129
1130 // Adjust pc offsets for the disassembly information.
1131 if (disasm_info_ != nullptr) {
1132 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1133 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1134 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1135 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1136 it.second.start = __ GetAdjustedPosition(it.second.start);
1137 it.second.end = __ GetAdjustedPosition(it.second.end);
1138 }
1139 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1140 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1141 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1142 }
1143 }
1144
1145 CodeGenerator::Finalize(allocator);
1146}
1147
1148MipsAssembler* ParallelMoveResolverMIPS::GetAssembler() const {
1149 return codegen_->GetAssembler();
1150}
1151
1152void ParallelMoveResolverMIPS::EmitMove(size_t index) {
1153 DCHECK_LT(index, moves_.size());
1154 MoveOperands* move = moves_[index];
1155 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1156}
1157
1158void ParallelMoveResolverMIPS::EmitSwap(size_t index) {
1159 DCHECK_LT(index, moves_.size());
1160 MoveOperands* move = moves_[index];
1161 Primitive::Type type = move->GetType();
1162 Location loc1 = move->GetDestination();
1163 Location loc2 = move->GetSource();
1164
1165 DCHECK(!loc1.IsConstant());
1166 DCHECK(!loc2.IsConstant());
1167
1168 if (loc1.Equals(loc2)) {
1169 return;
1170 }
1171
1172 if (loc1.IsRegister() && loc2.IsRegister()) {
1173 // Swap 2 GPRs.
1174 Register r1 = loc1.AsRegister<Register>();
1175 Register r2 = loc2.AsRegister<Register>();
1176 __ Move(TMP, r2);
1177 __ Move(r2, r1);
1178 __ Move(r1, TMP);
1179 } else if (loc1.IsFpuRegister() && loc2.IsFpuRegister()) {
1180 FRegister f1 = loc1.AsFpuRegister<FRegister>();
1181 FRegister f2 = loc2.AsFpuRegister<FRegister>();
1182 if (type == Primitive::kPrimFloat) {
1183 __ MovS(FTMP, f2);
1184 __ MovS(f2, f1);
1185 __ MovS(f1, FTMP);
1186 } else {
1187 DCHECK_EQ(type, Primitive::kPrimDouble);
1188 __ MovD(FTMP, f2);
1189 __ MovD(f2, f1);
1190 __ MovD(f1, FTMP);
1191 }
1192 } else if ((loc1.IsRegister() && loc2.IsFpuRegister()) ||
1193 (loc1.IsFpuRegister() && loc2.IsRegister())) {
1194 // Swap FPR and GPR.
1195 DCHECK_EQ(type, Primitive::kPrimFloat); // Can only swap a float.
1196 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1197 : loc2.AsFpuRegister<FRegister>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001198 Register r2 = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001199 __ Move(TMP, r2);
1200 __ Mfc1(r2, f1);
1201 __ Mtc1(TMP, f1);
1202 } else if (loc1.IsRegisterPair() && loc2.IsRegisterPair()) {
1203 // Swap 2 GPR register pairs.
1204 Register r1 = loc1.AsRegisterPairLow<Register>();
1205 Register r2 = loc2.AsRegisterPairLow<Register>();
1206 __ Move(TMP, r2);
1207 __ Move(r2, r1);
1208 __ Move(r1, TMP);
1209 r1 = loc1.AsRegisterPairHigh<Register>();
1210 r2 = loc2.AsRegisterPairHigh<Register>();
1211 __ Move(TMP, r2);
1212 __ Move(r2, r1);
1213 __ Move(r1, TMP);
1214 } else if ((loc1.IsRegisterPair() && loc2.IsFpuRegister()) ||
1215 (loc1.IsFpuRegister() && loc2.IsRegisterPair())) {
1216 // Swap FPR and GPR register pair.
1217 DCHECK_EQ(type, Primitive::kPrimDouble);
1218 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1219 : loc2.AsFpuRegister<FRegister>();
1220 Register r2_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1221 : loc2.AsRegisterPairLow<Register>();
1222 Register r2_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1223 : loc2.AsRegisterPairHigh<Register>();
1224 // Use 2 temporary registers because we can't first swap the low 32 bits of an FPR and
1225 // then swap the high 32 bits of the same FPR. mtc1 makes the high 32 bits of an FPR
1226 // unpredictable and the following mfch1 will fail.
1227 __ Mfc1(TMP, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001228 __ MoveFromFpuHigh(AT, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001229 __ Mtc1(r2_l, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001230 __ MoveToFpuHigh(r2_h, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001231 __ Move(r2_l, TMP);
1232 __ Move(r2_h, AT);
1233 } else if (loc1.IsStackSlot() && loc2.IsStackSlot()) {
1234 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ false);
1235 } else if (loc1.IsDoubleStackSlot() && loc2.IsDoubleStackSlot()) {
1236 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ true);
David Brazdilcc0f3112016-01-28 17:14:52 +00001237 } else if ((loc1.IsRegister() && loc2.IsStackSlot()) ||
1238 (loc1.IsStackSlot() && loc2.IsRegister())) {
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001239 Register reg = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
1240 intptr_t offset = loc1.IsStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001241 __ Move(TMP, reg);
1242 __ LoadFromOffset(kLoadWord, reg, SP, offset);
1243 __ StoreToOffset(kStoreWord, TMP, SP, offset);
1244 } else if ((loc1.IsRegisterPair() && loc2.IsDoubleStackSlot()) ||
1245 (loc1.IsDoubleStackSlot() && loc2.IsRegisterPair())) {
1246 Register reg_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1247 : loc2.AsRegisterPairLow<Register>();
1248 Register reg_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1249 : loc2.AsRegisterPairHigh<Register>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001250 intptr_t offset_l = loc1.IsDoubleStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001251 intptr_t offset_h = loc1.IsDoubleStackSlot() ? loc1.GetHighStackIndex(kMipsWordSize)
1252 : loc2.GetHighStackIndex(kMipsWordSize);
1253 __ Move(TMP, reg_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001254 __ LoadFromOffset(kLoadWord, reg_l, SP, offset_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001255 __ StoreToOffset(kStoreWord, TMP, SP, offset_l);
David Brazdil04d3e872016-01-29 09:50:09 +00001256 __ Move(TMP, reg_h);
1257 __ LoadFromOffset(kLoadWord, reg_h, SP, offset_h);
1258 __ StoreToOffset(kStoreWord, TMP, SP, offset_h);
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001259 } else if (loc1.IsFpuRegister() || loc2.IsFpuRegister()) {
1260 FRegister reg = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1261 : loc2.AsFpuRegister<FRegister>();
1262 intptr_t offset = loc1.IsFpuRegister() ? loc2.GetStackIndex() : loc1.GetStackIndex();
1263 if (type == Primitive::kPrimFloat) {
1264 __ MovS(FTMP, reg);
1265 __ LoadSFromOffset(reg, SP, offset);
1266 __ StoreSToOffset(FTMP, SP, offset);
1267 } else {
1268 DCHECK_EQ(type, Primitive::kPrimDouble);
1269 __ MovD(FTMP, reg);
1270 __ LoadDFromOffset(reg, SP, offset);
1271 __ StoreDToOffset(FTMP, SP, offset);
1272 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001273 } else {
1274 LOG(FATAL) << "Swap between " << loc1 << " and " << loc2 << " is unsupported";
1275 }
1276}
1277
1278void ParallelMoveResolverMIPS::RestoreScratch(int reg) {
1279 __ Pop(static_cast<Register>(reg));
1280}
1281
1282void ParallelMoveResolverMIPS::SpillScratch(int reg) {
1283 __ Push(static_cast<Register>(reg));
1284}
1285
1286void ParallelMoveResolverMIPS::Exchange(int index1, int index2, bool double_slot) {
1287 // Allocate a scratch register other than TMP, if available.
1288 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1289 // automatically unspilled when the scratch scope object is destroyed).
1290 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1291 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
1292 int stack_offset = ensure_scratch.IsSpilled() ? kMipsWordSize : 0;
1293 for (int i = 0; i <= (double_slot ? 1 : 0); i++, stack_offset += kMipsWordSize) {
1294 __ LoadFromOffset(kLoadWord,
1295 Register(ensure_scratch.GetRegister()),
1296 SP,
1297 index1 + stack_offset);
1298 __ LoadFromOffset(kLoadWord,
1299 TMP,
1300 SP,
1301 index2 + stack_offset);
1302 __ StoreToOffset(kStoreWord,
1303 Register(ensure_scratch.GetRegister()),
1304 SP,
1305 index2 + stack_offset);
1306 __ StoreToOffset(kStoreWord, TMP, SP, index1 + stack_offset);
1307 }
1308}
1309
Alexey Frunze73296a72016-06-03 22:51:46 -07001310void CodeGeneratorMIPS::ComputeSpillMask() {
1311 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
1312 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
1313 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
1314 // If there're FPU callee-saved registers and there's an odd number of GPR callee-saved
1315 // registers, include the ZERO register to force alignment of FPU callee-saved registers
1316 // within the stack frame.
1317 if ((fpu_spill_mask_ != 0) && (POPCOUNT(core_spill_mask_) % 2 != 0)) {
1318 core_spill_mask_ |= (1 << ZERO);
1319 }
Alexey Frunze58320ce2016-08-30 21:40:46 -07001320}
1321
1322bool CodeGeneratorMIPS::HasAllocatedCalleeSaveRegisters() const {
Alexey Frunze06a46c42016-07-19 15:00:40 -07001323 // If RA is clobbered by PC-relative operations on R2 and it's the only spilled register
Alexey Frunze58320ce2016-08-30 21:40:46 -07001324 // (this can happen in leaf methods), force CodeGenerator::InitializeCodeGeneration()
1325 // into the path that creates a stack frame so that RA can be explicitly saved and restored.
1326 // RA can't otherwise be saved/restored when it's the only spilled register.
Alexey Frunze58320ce2016-08-30 21:40:46 -07001327 return CodeGenerator::HasAllocatedCalleeSaveRegisters() || clobbered_ra_;
Alexey Frunze73296a72016-06-03 22:51:46 -07001328}
1329
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001330static dwarf::Reg DWARFReg(Register reg) {
1331 return dwarf::Reg::MipsCore(static_cast<int>(reg));
1332}
1333
1334// TODO: mapping of floating-point registers to DWARF.
1335
1336void CodeGeneratorMIPS::GenerateFrameEntry() {
1337 __ Bind(&frame_entry_label_);
1338
1339 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips) || !IsLeafMethod();
1340
1341 if (do_overflow_check) {
1342 __ LoadFromOffset(kLoadWord,
1343 ZERO,
1344 SP,
1345 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips)));
1346 RecordPcInfo(nullptr, 0);
1347 }
1348
1349 if (HasEmptyFrame()) {
Alexey Frunze58320ce2016-08-30 21:40:46 -07001350 CHECK_EQ(fpu_spill_mask_, 0u);
1351 CHECK_EQ(core_spill_mask_, 1u << RA);
1352 CHECK(!clobbered_ra_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001353 return;
1354 }
1355
1356 // Make sure the frame size isn't unreasonably large.
1357 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips)) {
1358 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips) << " bytes";
1359 }
1360
1361 // Spill callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001362
Alexey Frunze73296a72016-06-03 22:51:46 -07001363 uint32_t ofs = GetFrameSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001364 __ IncreaseFrameSize(ofs);
1365
Alexey Frunze73296a72016-06-03 22:51:46 -07001366 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1367 Register reg = static_cast<Register>(MostSignificantBit(mask));
1368 mask ^= 1u << reg;
1369 ofs -= kMipsWordSize;
1370 // The ZERO register is only included for alignment.
1371 if (reg != ZERO) {
1372 __ StoreToOffset(kStoreWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001373 __ cfi().RelOffset(DWARFReg(reg), ofs);
1374 }
1375 }
1376
Alexey Frunze73296a72016-06-03 22:51:46 -07001377 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1378 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1379 mask ^= 1u << reg;
1380 ofs -= kMipsDoublewordSize;
1381 __ StoreDToOffset(reg, SP, ofs);
1382 // TODO: __ cfi().RelOffset(DWARFReg(reg), ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001383 }
1384
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001385 // Save the current method if we need it. Note that we do not
1386 // do this in HCurrentMethod, as the instruction might have been removed
1387 // in the SSA graph.
1388 if (RequiresCurrentMethod()) {
1389 __ StoreToOffset(kStoreWord, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
1390 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001391
1392 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1393 // Initialize should deoptimize flag to 0.
1394 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1395 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001396}
1397
1398void CodeGeneratorMIPS::GenerateFrameExit() {
1399 __ cfi().RememberState();
1400
1401 if (!HasEmptyFrame()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001402 // Restore callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001403
Alexey Frunze73296a72016-06-03 22:51:46 -07001404 // For better instruction scheduling restore RA before other registers.
1405 uint32_t ofs = GetFrameSize();
1406 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1407 Register reg = static_cast<Register>(MostSignificantBit(mask));
1408 mask ^= 1u << reg;
1409 ofs -= kMipsWordSize;
1410 // The ZERO register is only included for alignment.
1411 if (reg != ZERO) {
1412 __ LoadFromOffset(kLoadWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001413 __ cfi().Restore(DWARFReg(reg));
1414 }
1415 }
1416
Alexey Frunze73296a72016-06-03 22:51:46 -07001417 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1418 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1419 mask ^= 1u << reg;
1420 ofs -= kMipsDoublewordSize;
1421 __ LoadDFromOffset(reg, SP, ofs);
1422 // TODO: __ cfi().Restore(DWARFReg(reg));
1423 }
1424
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001425 size_t frame_size = GetFrameSize();
1426 // Adjust the stack pointer in the delay slot if doing so doesn't break CFI.
1427 bool exchange = IsInt<16>(static_cast<int32_t>(frame_size));
1428 bool reordering = __ SetReorder(false);
1429 if (exchange) {
1430 __ Jr(RA);
1431 __ DecreaseFrameSize(frame_size); // Single instruction in delay slot.
1432 } else {
1433 __ DecreaseFrameSize(frame_size);
1434 __ Jr(RA);
1435 __ Nop(); // In delay slot.
1436 }
1437 __ SetReorder(reordering);
1438 } else {
1439 __ Jr(RA);
1440 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001441 }
1442
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001443 __ cfi().RestoreState();
1444 __ cfi().DefCFAOffset(GetFrameSize());
1445}
1446
1447void CodeGeneratorMIPS::Bind(HBasicBlock* block) {
1448 __ Bind(GetLabelOf(block));
1449}
1450
Lena Djokic8098da92017-06-28 12:07:50 +02001451void CodeGeneratorMIPS::MoveLocation(Location destination,
1452 Location source,
1453 Primitive::Type dst_type) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001454 if (source.Equals(destination)) {
1455 return;
1456 }
1457
Lena Djokic8098da92017-06-28 12:07:50 +02001458 if (source.IsConstant()) {
1459 MoveConstant(destination, source.GetConstant());
1460 } else {
1461 if (destination.IsRegister()) {
1462 if (source.IsRegister()) {
1463 __ Move(destination.AsRegister<Register>(), source.AsRegister<Register>());
1464 } else if (source.IsFpuRegister()) {
1465 __ Mfc1(destination.AsRegister<Register>(), source.AsFpuRegister<FRegister>());
1466 } else {
1467 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001468 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
Lena Djokic8098da92017-06-28 12:07:50 +02001469 }
1470 } else if (destination.IsRegisterPair()) {
1471 if (source.IsRegisterPair()) {
1472 __ Move(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
1473 __ Move(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
1474 } else if (source.IsFpuRegister()) {
1475 Register dst_high = destination.AsRegisterPairHigh<Register>();
1476 Register dst_low = destination.AsRegisterPairLow<Register>();
1477 FRegister src = source.AsFpuRegister<FRegister>();
1478 __ Mfc1(dst_low, src);
1479 __ MoveFromFpuHigh(dst_high, src);
1480 } else {
1481 DCHECK(source.IsDoubleStackSlot()) << "Cannot move from " << source << " to " << destination;
1482 int32_t off = source.GetStackIndex();
1483 Register r = destination.AsRegisterPairLow<Register>();
1484 __ LoadFromOffset(kLoadDoubleword, r, SP, off);
1485 }
1486 } else if (destination.IsFpuRegister()) {
1487 if (source.IsRegister()) {
1488 DCHECK(!Primitive::Is64BitType(dst_type));
1489 __ Mtc1(source.AsRegister<Register>(), destination.AsFpuRegister<FRegister>());
1490 } else if (source.IsRegisterPair()) {
1491 DCHECK(Primitive::Is64BitType(dst_type));
1492 FRegister dst = destination.AsFpuRegister<FRegister>();
1493 Register src_high = source.AsRegisterPairHigh<Register>();
1494 Register src_low = source.AsRegisterPairLow<Register>();
1495 __ Mtc1(src_low, dst);
1496 __ MoveToFpuHigh(src_high, dst);
1497 } else if (source.IsFpuRegister()) {
1498 if (Primitive::Is64BitType(dst_type)) {
1499 __ MovD(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1500 } else {
1501 DCHECK_EQ(dst_type, Primitive::kPrimFloat);
1502 __ MovS(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1503 }
1504 } else if (source.IsDoubleStackSlot()) {
1505 DCHECK(Primitive::Is64BitType(dst_type));
1506 __ LoadDFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1507 } else {
1508 DCHECK(!Primitive::Is64BitType(dst_type));
1509 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1510 __ LoadSFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1511 }
1512 } else if (destination.IsDoubleStackSlot()) {
1513 int32_t dst_offset = destination.GetStackIndex();
1514 if (source.IsRegisterPair()) {
1515 __ StoreToOffset(kStoreDoubleword, source.AsRegisterPairLow<Register>(), SP, dst_offset);
1516 } else if (source.IsFpuRegister()) {
1517 __ StoreDToOffset(source.AsFpuRegister<FRegister>(), SP, dst_offset);
1518 } else {
1519 DCHECK(source.IsDoubleStackSlot()) << "Cannot move from " << source << " to " << destination;
1520 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1521 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset);
1522 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex() + 4);
1523 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset + 4);
1524 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001525 } else {
Lena Djokic8098da92017-06-28 12:07:50 +02001526 DCHECK(destination.IsStackSlot()) << destination;
1527 int32_t dst_offset = destination.GetStackIndex();
1528 if (source.IsRegister()) {
1529 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, dst_offset);
1530 } else if (source.IsFpuRegister()) {
1531 __ StoreSToOffset(source.AsFpuRegister<FRegister>(), SP, dst_offset);
1532 } else {
1533 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1534 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1535 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset);
1536 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001537 }
1538 }
1539}
1540
1541void CodeGeneratorMIPS::MoveConstant(Location destination, HConstant* c) {
1542 if (c->IsIntConstant() || c->IsNullConstant()) {
1543 // Move 32 bit constant.
1544 int32_t value = GetInt32ValueOf(c);
1545 if (destination.IsRegister()) {
1546 Register dst = destination.AsRegister<Register>();
1547 __ LoadConst32(dst, value);
1548 } else {
1549 DCHECK(destination.IsStackSlot())
1550 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001551 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001552 }
1553 } else if (c->IsLongConstant()) {
1554 // Move 64 bit constant.
1555 int64_t value = GetInt64ValueOf(c);
1556 if (destination.IsRegisterPair()) {
1557 Register r_h = destination.AsRegisterPairHigh<Register>();
1558 Register r_l = destination.AsRegisterPairLow<Register>();
1559 __ LoadConst64(r_h, r_l, value);
1560 } else {
1561 DCHECK(destination.IsDoubleStackSlot())
1562 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001563 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001564 }
1565 } else if (c->IsFloatConstant()) {
1566 // Move 32 bit float constant.
1567 int32_t value = GetInt32ValueOf(c);
1568 if (destination.IsFpuRegister()) {
1569 __ LoadSConst32(destination.AsFpuRegister<FRegister>(), value, TMP);
1570 } else {
1571 DCHECK(destination.IsStackSlot())
1572 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001573 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001574 }
1575 } else {
1576 // Move 64 bit double constant.
1577 DCHECK(c->IsDoubleConstant()) << c->DebugName();
1578 int64_t value = GetInt64ValueOf(c);
1579 if (destination.IsFpuRegister()) {
1580 FRegister fd = destination.AsFpuRegister<FRegister>();
1581 __ LoadDConst64(fd, value, TMP);
1582 } else {
1583 DCHECK(destination.IsDoubleStackSlot())
1584 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001585 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001586 }
1587 }
1588}
1589
1590void CodeGeneratorMIPS::MoveConstant(Location destination, int32_t value) {
1591 DCHECK(destination.IsRegister());
1592 Register dst = destination.AsRegister<Register>();
1593 __ LoadConst32(dst, value);
1594}
1595
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001596void CodeGeneratorMIPS::AddLocationAsTemp(Location location, LocationSummary* locations) {
1597 if (location.IsRegister()) {
1598 locations->AddTemp(location);
Alexey Frunzec9e94f32015-10-26 16:11:39 -07001599 } else if (location.IsRegisterPair()) {
1600 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1601 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001602 } else {
1603 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1604 }
1605}
1606
Vladimir Markoaad75c62016-10-03 08:46:48 +00001607template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1608inline void CodeGeneratorMIPS::EmitPcRelativeLinkerPatches(
1609 const ArenaDeque<PcRelativePatchInfo>& infos,
1610 ArenaVector<LinkerPatch>* linker_patches) {
1611 for (const PcRelativePatchInfo& info : infos) {
1612 const DexFile& dex_file = info.target_dex_file;
1613 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001614 DCHECK(info.label.IsBound());
1615 uint32_t literal_offset = __ GetLabelLocation(&info.label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001616 // On R2 we use HMipsComputeBaseMethodAddress and patch relative to
1617 // the assembler's base label used for PC-relative addressing.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001618 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1619 uint32_t pc_rel_offset = info_high.pc_rel_label.IsBound()
1620 ? __ GetLabelLocation(&info_high.pc_rel_label)
Vladimir Markoaad75c62016-10-03 08:46:48 +00001621 : __ GetPcRelBaseLabelLocation();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001622 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001623 }
1624}
1625
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001626void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1627 DCHECK(linker_patches->empty());
1628 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001629 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001630 method_bss_entry_patches_.size() +
Alexey Frunze06a46c42016-07-19 15:00:40 -07001631 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001632 type_bss_entry_patches_.size() +
1633 pc_relative_string_patches_.size();
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001634 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001635 if (GetCompilerOptions().IsBootImage()) {
1636 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
Vladimir Markoaad75c62016-10-03 08:46:48 +00001637 linker_patches);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001638 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
1639 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001640 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
1641 linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001642 } else {
1643 DCHECK(pc_relative_method_patches_.empty());
1644 DCHECK(pc_relative_type_patches_.empty());
1645 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
1646 linker_patches);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001647 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001648 EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
1649 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001650 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1651 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001652 DCHECK_EQ(size, linker_patches->size());
Alexey Frunze06a46c42016-07-19 15:00:40 -07001653}
1654
Vladimir Marko65979462017-05-19 17:25:12 +01001655CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001656 MethodReference target_method,
1657 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001658 return NewPcRelativePatch(*target_method.dex_file,
1659 target_method.dex_method_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001660 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001661 &pc_relative_method_patches_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001662}
1663
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001664CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001665 MethodReference target_method,
1666 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001667 return NewPcRelativePatch(*target_method.dex_file,
1668 target_method.dex_method_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001669 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001670 &method_bss_entry_patches_);
1671}
1672
Alexey Frunze06a46c42016-07-19 15:00:40 -07001673CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001674 const DexFile& dex_file,
1675 dex::TypeIndex type_index,
1676 const PcRelativePatchInfo* info_high) {
1677 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001678}
1679
Vladimir Marko1998cd02017-01-13 13:02:58 +00001680CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001681 const DexFile& dex_file,
1682 dex::TypeIndex type_index,
1683 const PcRelativePatchInfo* info_high) {
1684 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001685}
1686
Vladimir Marko65979462017-05-19 17:25:12 +01001687CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001688 const DexFile& dex_file,
1689 dex::StringIndex string_index,
1690 const PcRelativePatchInfo* info_high) {
1691 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001692}
1693
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001694CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001695 const DexFile& dex_file,
1696 uint32_t offset_or_index,
1697 const PcRelativePatchInfo* info_high,
1698 ArenaDeque<PcRelativePatchInfo>* patches) {
1699 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001700 return &patches->back();
1701}
1702
Alexey Frunze06a46c42016-07-19 15:00:40 -07001703Literal* CodeGeneratorMIPS::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1704 return map->GetOrCreate(
1705 value,
1706 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1707}
1708
Alexey Frunze06a46c42016-07-19 15:00:40 -07001709Literal* CodeGeneratorMIPS::DeduplicateBootImageAddressLiteral(uint32_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001710 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001711}
1712
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001713void CodeGeneratorMIPS::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001714 Register out,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001715 Register base,
1716 PcRelativePatchInfo* info_low) {
1717 DCHECK(!info_high->patch_info_high);
Alexey Frunze6079dca2017-05-28 19:10:28 -07001718 DCHECK_NE(out, base);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001719 if (GetInstructionSetFeatures().IsR6()) {
1720 DCHECK_EQ(base, ZERO);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001721 __ Bind(&info_high->label);
1722 __ Bind(&info_high->pc_rel_label);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001723 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001724 __ Auipc(out, /* placeholder */ 0x1234);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001725 } else {
1726 // If base is ZERO, emit NAL to obtain the actual base.
1727 if (base == ZERO) {
1728 // Generate a dummy PC-relative call to obtain PC.
1729 __ Nal();
1730 }
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001731 __ Bind(&info_high->label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001732 __ Lui(out, /* placeholder */ 0x1234);
1733 // If we emitted the NAL, bind the pc_rel_label, otherwise base is a register holding
1734 // the HMipsComputeBaseMethodAddress which has its own label stored in MipsAssembler.
1735 if (base == ZERO) {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001736 __ Bind(&info_high->pc_rel_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001737 }
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001738 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001739 __ Addu(out, out, (base == ZERO) ? RA : base);
1740 }
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001741 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001742 // offset to `out` (e.g. lw, jialc, addiu).
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001743 DCHECK_EQ(info_low->patch_info_high, info_high);
1744 __ Bind(&info_low->label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001745}
1746
Alexey Frunze627c1a02017-01-30 19:28:14 -08001747CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootStringPatch(
1748 const DexFile& dex_file,
1749 dex::StringIndex dex_index,
1750 Handle<mirror::String> handle) {
1751 jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index),
1752 reinterpret_cast64<uint64_t>(handle.GetReference()));
1753 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
1754 return &jit_string_patches_.back();
1755}
1756
1757CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootClassPatch(
1758 const DexFile& dex_file,
1759 dex::TypeIndex dex_index,
1760 Handle<mirror::Class> handle) {
1761 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index),
1762 reinterpret_cast64<uint64_t>(handle.GetReference()));
1763 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
1764 return &jit_class_patches_.back();
1765}
1766
1767void CodeGeneratorMIPS::PatchJitRootUse(uint8_t* code,
1768 const uint8_t* roots_data,
1769 const CodeGeneratorMIPS::JitPatchInfo& info,
1770 uint64_t index_in_table) const {
1771 uint32_t literal_offset = GetAssembler().GetLabelLocation(&info.high_label);
1772 uintptr_t address =
1773 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1774 uint32_t addr32 = dchecked_integral_cast<uint32_t>(address);
1775 // lui reg, addr32_high
1776 DCHECK_EQ(code[literal_offset + 0], 0x34);
1777 DCHECK_EQ(code[literal_offset + 1], 0x12);
1778 DCHECK_EQ((code[literal_offset + 2] & 0xE0), 0x00);
1779 DCHECK_EQ(code[literal_offset + 3], 0x3C);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001780 // instr reg, reg, addr32_low
Alexey Frunze627c1a02017-01-30 19:28:14 -08001781 DCHECK_EQ(code[literal_offset + 4], 0x78);
1782 DCHECK_EQ(code[literal_offset + 5], 0x56);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001783 addr32 += (addr32 & 0x8000) << 1; // Account for sign extension in "instr reg, reg, addr32_low".
Alexey Frunze627c1a02017-01-30 19:28:14 -08001784 // lui reg, addr32_high
1785 code[literal_offset + 0] = static_cast<uint8_t>(addr32 >> 16);
1786 code[literal_offset + 1] = static_cast<uint8_t>(addr32 >> 24);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001787 // instr reg, reg, addr32_low
Alexey Frunze627c1a02017-01-30 19:28:14 -08001788 code[literal_offset + 4] = static_cast<uint8_t>(addr32 >> 0);
1789 code[literal_offset + 5] = static_cast<uint8_t>(addr32 >> 8);
1790}
1791
1792void CodeGeneratorMIPS::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1793 for (const JitPatchInfo& info : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001794 const auto it = jit_string_roots_.find(StringReference(&info.target_dex_file,
1795 dex::StringIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001796 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001797 uint64_t index_in_table = it->second;
1798 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001799 }
1800 for (const JitPatchInfo& info : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001801 const auto it = jit_class_roots_.find(TypeReference(&info.target_dex_file,
1802 dex::TypeIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001803 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001804 uint64_t index_in_table = it->second;
1805 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001806 }
1807}
1808
Goran Jakovljevice114da22016-12-26 14:21:43 +01001809void CodeGeneratorMIPS::MarkGCCard(Register object,
1810 Register value,
1811 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001812 MipsLabel done;
1813 Register card = AT;
1814 Register temp = TMP;
Goran Jakovljevice114da22016-12-26 14:21:43 +01001815 if (value_can_be_null) {
1816 __ Beqz(value, &done);
1817 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001818 __ LoadFromOffset(kLoadWord,
1819 card,
1820 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001821 Thread::CardTableOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001822 __ Srl(temp, object, gc::accounting::CardTable::kCardShift);
1823 __ Addu(temp, card, temp);
1824 __ Sb(card, temp, 0);
Goran Jakovljevice114da22016-12-26 14:21:43 +01001825 if (value_can_be_null) {
1826 __ Bind(&done);
1827 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001828}
1829
David Brazdil58282f42016-01-14 12:45:10 +00001830void CodeGeneratorMIPS::SetupBlockedRegisters() const {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001831 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1832 blocked_core_registers_[ZERO] = true;
1833 blocked_core_registers_[K0] = true;
1834 blocked_core_registers_[K1] = true;
1835 blocked_core_registers_[GP] = true;
1836 blocked_core_registers_[SP] = true;
1837 blocked_core_registers_[RA] = true;
1838
1839 // AT and TMP(T8) are used as temporary/scratch registers
1840 // (similar to how AT is used by MIPS assemblers).
1841 blocked_core_registers_[AT] = true;
1842 blocked_core_registers_[TMP] = true;
1843 blocked_fpu_registers_[FTMP] = true;
1844
1845 // Reserve suspend and thread registers.
1846 blocked_core_registers_[S0] = true;
1847 blocked_core_registers_[TR] = true;
1848
1849 // Reserve T9 for function calls
1850 blocked_core_registers_[T9] = true;
1851
1852 // Reserve odd-numbered FPU registers.
1853 for (size_t i = 1; i < kNumberOfFRegisters; i += 2) {
1854 blocked_fpu_registers_[i] = true;
1855 }
1856
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02001857 if (GetGraph()->IsDebuggable()) {
1858 // Stubs do not save callee-save floating point registers. If the graph
1859 // is debuggable, we need to deal with these registers differently. For
1860 // now, just block them.
1861 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1862 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1863 }
1864 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001865}
1866
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001867size_t CodeGeneratorMIPS::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1868 __ StoreToOffset(kStoreWord, Register(reg_id), SP, stack_index);
1869 return kMipsWordSize;
1870}
1871
1872size_t CodeGeneratorMIPS::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1873 __ LoadFromOffset(kLoadWord, Register(reg_id), SP, stack_index);
1874 return kMipsWordSize;
1875}
1876
1877size_t CodeGeneratorMIPS::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1878 __ StoreDToOffset(FRegister(reg_id), SP, stack_index);
1879 return kMipsDoublewordSize;
1880}
1881
1882size_t CodeGeneratorMIPS::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1883 __ LoadDFromOffset(FRegister(reg_id), SP, stack_index);
1884 return kMipsDoublewordSize;
1885}
1886
1887void CodeGeneratorMIPS::DumpCoreRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001888 stream << Register(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001889}
1890
1891void CodeGeneratorMIPS::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001892 stream << FRegister(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001893}
1894
Serban Constantinescufca16662016-07-14 09:21:59 +01001895constexpr size_t kMipsDirectEntrypointRuntimeOffset = 16;
1896
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001897void CodeGeneratorMIPS::InvokeRuntime(QuickEntrypointEnum entrypoint,
1898 HInstruction* instruction,
1899 uint32_t dex_pc,
1900 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001901 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001902 GenerateInvokeRuntime(GetThreadOffset<kMipsPointerSize>(entrypoint).Int32Value(),
1903 IsDirectEntrypoint(entrypoint));
1904 if (EntrypointRequiresStackMap(entrypoint)) {
1905 RecordPcInfo(instruction, dex_pc, slow_path);
1906 }
1907}
1908
1909void CodeGeneratorMIPS::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1910 HInstruction* instruction,
1911 SlowPathCode* slow_path,
1912 bool direct) {
1913 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1914 GenerateInvokeRuntime(entry_point_offset, direct);
1915}
1916
1917void CodeGeneratorMIPS::GenerateInvokeRuntime(int32_t entry_point_offset, bool direct) {
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001918 bool reordering = __ SetReorder(false);
Alexey Frunze15958152017-02-09 19:08:30 -08001919 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001920 __ Jalr(T9);
Alexey Frunze15958152017-02-09 19:08:30 -08001921 if (direct) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001922 // Reserve argument space on stack (for $a0-$a3) for
1923 // entrypoints that directly reference native implementations.
1924 // Called function may use this space to store $a0-$a3 regs.
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001925 __ IncreaseFrameSize(kMipsDirectEntrypointRuntimeOffset); // Single instruction in delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001926 __ DecreaseFrameSize(kMipsDirectEntrypointRuntimeOffset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001927 } else {
1928 __ Nop(); // In delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001929 }
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001930 __ SetReorder(reordering);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001931}
1932
1933void InstructionCodeGeneratorMIPS::GenerateClassInitializationCheck(SlowPathCodeMIPS* slow_path,
1934 Register class_reg) {
1935 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1936 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1937 __ Blt(TMP, AT, slow_path->GetEntryLabel());
1938 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1939 __ Sync(0);
1940 __ Bind(slow_path->GetExitLabel());
1941}
1942
1943void InstructionCodeGeneratorMIPS::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1944 __ Sync(0); // Only stype 0 is supported.
1945}
1946
1947void InstructionCodeGeneratorMIPS::GenerateSuspendCheck(HSuspendCheck* instruction,
1948 HBasicBlock* successor) {
1949 SuspendCheckSlowPathMIPS* slow_path =
1950 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS(instruction, successor);
1951 codegen_->AddSlowPath(slow_path);
1952
1953 __ LoadFromOffset(kLoadUnsignedHalfword,
1954 TMP,
1955 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001956 Thread::ThreadFlagsOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001957 if (successor == nullptr) {
1958 __ Bnez(TMP, slow_path->GetEntryLabel());
1959 __ Bind(slow_path->GetReturnLabel());
1960 } else {
1961 __ Beqz(TMP, codegen_->GetLabelOf(successor));
1962 __ B(slow_path->GetEntryLabel());
1963 // slow_path will return to GetLabelOf(successor).
1964 }
1965}
1966
1967InstructionCodeGeneratorMIPS::InstructionCodeGeneratorMIPS(HGraph* graph,
1968 CodeGeneratorMIPS* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001969 : InstructionCodeGenerator(graph, codegen),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001970 assembler_(codegen->GetAssembler()),
1971 codegen_(codegen) {}
1972
1973void LocationsBuilderMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
1974 DCHECK_EQ(instruction->InputCount(), 2U);
1975 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1976 Primitive::Type type = instruction->GetResultType();
1977 switch (type) {
1978 case Primitive::kPrimInt: {
1979 locations->SetInAt(0, Location::RequiresRegister());
1980 HInstruction* right = instruction->InputAt(1);
1981 bool can_use_imm = false;
1982 if (right->IsConstant()) {
1983 int32_t imm = CodeGenerator::GetInt32ValueOf(right->AsConstant());
1984 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1985 can_use_imm = IsUint<16>(imm);
1986 } else if (instruction->IsAdd()) {
1987 can_use_imm = IsInt<16>(imm);
1988 } else {
1989 DCHECK(instruction->IsSub());
1990 can_use_imm = IsInt<16>(-imm);
1991 }
1992 }
1993 if (can_use_imm)
1994 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1995 else
1996 locations->SetInAt(1, Location::RequiresRegister());
1997 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1998 break;
1999 }
2000
2001 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002002 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002003 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2004 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002005 break;
2006 }
2007
2008 case Primitive::kPrimFloat:
2009 case Primitive::kPrimDouble:
2010 DCHECK(instruction->IsAdd() || instruction->IsSub());
2011 locations->SetInAt(0, Location::RequiresFpuRegister());
2012 locations->SetInAt(1, Location::RequiresFpuRegister());
2013 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2014 break;
2015
2016 default:
2017 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
2018 }
2019}
2020
2021void InstructionCodeGeneratorMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
2022 Primitive::Type type = instruction->GetType();
2023 LocationSummary* locations = instruction->GetLocations();
2024
2025 switch (type) {
2026 case Primitive::kPrimInt: {
2027 Register dst = locations->Out().AsRegister<Register>();
2028 Register lhs = locations->InAt(0).AsRegister<Register>();
2029 Location rhs_location = locations->InAt(1);
2030
2031 Register rhs_reg = ZERO;
2032 int32_t rhs_imm = 0;
2033 bool use_imm = rhs_location.IsConstant();
2034 if (use_imm) {
2035 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2036 } else {
2037 rhs_reg = rhs_location.AsRegister<Register>();
2038 }
2039
2040 if (instruction->IsAnd()) {
2041 if (use_imm)
2042 __ Andi(dst, lhs, rhs_imm);
2043 else
2044 __ And(dst, lhs, rhs_reg);
2045 } else if (instruction->IsOr()) {
2046 if (use_imm)
2047 __ Ori(dst, lhs, rhs_imm);
2048 else
2049 __ Or(dst, lhs, rhs_reg);
2050 } else if (instruction->IsXor()) {
2051 if (use_imm)
2052 __ Xori(dst, lhs, rhs_imm);
2053 else
2054 __ Xor(dst, lhs, rhs_reg);
2055 } else if (instruction->IsAdd()) {
2056 if (use_imm)
2057 __ Addiu(dst, lhs, rhs_imm);
2058 else
2059 __ Addu(dst, lhs, rhs_reg);
2060 } else {
2061 DCHECK(instruction->IsSub());
2062 if (use_imm)
2063 __ Addiu(dst, lhs, -rhs_imm);
2064 else
2065 __ Subu(dst, lhs, rhs_reg);
2066 }
2067 break;
2068 }
2069
2070 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002071 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2072 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2073 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2074 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002075 Location rhs_location = locations->InAt(1);
2076 bool use_imm = rhs_location.IsConstant();
2077 if (!use_imm) {
2078 Register rhs_high = rhs_location.AsRegisterPairHigh<Register>();
2079 Register rhs_low = rhs_location.AsRegisterPairLow<Register>();
2080 if (instruction->IsAnd()) {
2081 __ And(dst_low, lhs_low, rhs_low);
2082 __ And(dst_high, lhs_high, rhs_high);
2083 } else if (instruction->IsOr()) {
2084 __ Or(dst_low, lhs_low, rhs_low);
2085 __ Or(dst_high, lhs_high, rhs_high);
2086 } else if (instruction->IsXor()) {
2087 __ Xor(dst_low, lhs_low, rhs_low);
2088 __ Xor(dst_high, lhs_high, rhs_high);
2089 } else if (instruction->IsAdd()) {
2090 if (lhs_low == rhs_low) {
2091 // Special case for lhs = rhs and the sum potentially overwriting both lhs and rhs.
2092 __ Slt(TMP, lhs_low, ZERO);
2093 __ Addu(dst_low, lhs_low, rhs_low);
2094 } else {
2095 __ Addu(dst_low, lhs_low, rhs_low);
2096 // If the sum overwrites rhs, lhs remains unchanged, otherwise rhs remains unchanged.
2097 __ Sltu(TMP, dst_low, (dst_low == rhs_low) ? lhs_low : rhs_low);
2098 }
2099 __ Addu(dst_high, lhs_high, rhs_high);
2100 __ Addu(dst_high, dst_high, TMP);
2101 } else {
2102 DCHECK(instruction->IsSub());
2103 __ Sltu(TMP, lhs_low, rhs_low);
2104 __ Subu(dst_low, lhs_low, rhs_low);
2105 __ Subu(dst_high, lhs_high, rhs_high);
2106 __ Subu(dst_high, dst_high, TMP);
2107 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002108 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002109 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
2110 if (instruction->IsOr()) {
2111 uint32_t low = Low32Bits(value);
2112 uint32_t high = High32Bits(value);
2113 if (IsUint<16>(low)) {
2114 if (dst_low != lhs_low || low != 0) {
2115 __ Ori(dst_low, lhs_low, low);
2116 }
2117 } else {
2118 __ LoadConst32(TMP, low);
2119 __ Or(dst_low, lhs_low, TMP);
2120 }
2121 if (IsUint<16>(high)) {
2122 if (dst_high != lhs_high || high != 0) {
2123 __ Ori(dst_high, lhs_high, high);
2124 }
2125 } else {
2126 if (high != low) {
2127 __ LoadConst32(TMP, high);
2128 }
2129 __ Or(dst_high, lhs_high, TMP);
2130 }
2131 } else if (instruction->IsXor()) {
2132 uint32_t low = Low32Bits(value);
2133 uint32_t high = High32Bits(value);
2134 if (IsUint<16>(low)) {
2135 if (dst_low != lhs_low || low != 0) {
2136 __ Xori(dst_low, lhs_low, low);
2137 }
2138 } else {
2139 __ LoadConst32(TMP, low);
2140 __ Xor(dst_low, lhs_low, TMP);
2141 }
2142 if (IsUint<16>(high)) {
2143 if (dst_high != lhs_high || high != 0) {
2144 __ Xori(dst_high, lhs_high, high);
2145 }
2146 } else {
2147 if (high != low) {
2148 __ LoadConst32(TMP, high);
2149 }
2150 __ Xor(dst_high, lhs_high, TMP);
2151 }
2152 } else if (instruction->IsAnd()) {
2153 uint32_t low = Low32Bits(value);
2154 uint32_t high = High32Bits(value);
2155 if (IsUint<16>(low)) {
2156 __ Andi(dst_low, lhs_low, low);
2157 } else if (low != 0xFFFFFFFF) {
2158 __ LoadConst32(TMP, low);
2159 __ And(dst_low, lhs_low, TMP);
2160 } else if (dst_low != lhs_low) {
2161 __ Move(dst_low, lhs_low);
2162 }
2163 if (IsUint<16>(high)) {
2164 __ Andi(dst_high, lhs_high, high);
2165 } else if (high != 0xFFFFFFFF) {
2166 if (high != low) {
2167 __ LoadConst32(TMP, high);
2168 }
2169 __ And(dst_high, lhs_high, TMP);
2170 } else if (dst_high != lhs_high) {
2171 __ Move(dst_high, lhs_high);
2172 }
2173 } else {
2174 if (instruction->IsSub()) {
2175 value = -value;
2176 } else {
2177 DCHECK(instruction->IsAdd());
2178 }
2179 int32_t low = Low32Bits(value);
2180 int32_t high = High32Bits(value);
2181 if (IsInt<16>(low)) {
2182 if (dst_low != lhs_low || low != 0) {
2183 __ Addiu(dst_low, lhs_low, low);
2184 }
2185 if (low != 0) {
2186 __ Sltiu(AT, dst_low, low);
2187 }
2188 } else {
2189 __ LoadConst32(TMP, low);
2190 __ Addu(dst_low, lhs_low, TMP);
2191 __ Sltu(AT, dst_low, TMP);
2192 }
2193 if (IsInt<16>(high)) {
2194 if (dst_high != lhs_high || high != 0) {
2195 __ Addiu(dst_high, lhs_high, high);
2196 }
2197 } else {
2198 if (high != low) {
2199 __ LoadConst32(TMP, high);
2200 }
2201 __ Addu(dst_high, lhs_high, TMP);
2202 }
2203 if (low != 0) {
2204 __ Addu(dst_high, dst_high, AT);
2205 }
2206 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002207 }
2208 break;
2209 }
2210
2211 case Primitive::kPrimFloat:
2212 case Primitive::kPrimDouble: {
2213 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
2214 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
2215 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
2216 if (instruction->IsAdd()) {
2217 if (type == Primitive::kPrimFloat) {
2218 __ AddS(dst, lhs, rhs);
2219 } else {
2220 __ AddD(dst, lhs, rhs);
2221 }
2222 } else {
2223 DCHECK(instruction->IsSub());
2224 if (type == Primitive::kPrimFloat) {
2225 __ SubS(dst, lhs, rhs);
2226 } else {
2227 __ SubD(dst, lhs, rhs);
2228 }
2229 }
2230 break;
2231 }
2232
2233 default:
2234 LOG(FATAL) << "Unexpected binary operation type " << type;
2235 }
2236}
2237
2238void LocationsBuilderMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002239 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002240
2241 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2242 Primitive::Type type = instr->GetResultType();
2243 switch (type) {
2244 case Primitive::kPrimInt:
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002245 locations->SetInAt(0, Location::RequiresRegister());
2246 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2247 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2248 break;
2249 case Primitive::kPrimLong:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002250 locations->SetInAt(0, Location::RequiresRegister());
2251 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2252 locations->SetOut(Location::RequiresRegister());
2253 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002254 default:
2255 LOG(FATAL) << "Unexpected shift type " << type;
2256 }
2257}
2258
2259static constexpr size_t kMipsBitsPerWord = kMipsWordSize * kBitsPerByte;
2260
2261void InstructionCodeGeneratorMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002262 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002263 LocationSummary* locations = instr->GetLocations();
2264 Primitive::Type type = instr->GetType();
2265
2266 Location rhs_location = locations->InAt(1);
2267 bool use_imm = rhs_location.IsConstant();
2268 Register rhs_reg = use_imm ? ZERO : rhs_location.AsRegister<Register>();
2269 int64_t rhs_imm = use_imm ? CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()) : 0;
Roland Levillain5b5b9312016-03-22 14:57:31 +00002270 const uint32_t shift_mask =
2271 (type == Primitive::kPrimInt) ? kMaxIntShiftDistance : kMaxLongShiftDistance;
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002272 const uint32_t shift_value = rhs_imm & shift_mask;
Alexey Frunze92d90602015-12-18 18:16:36 -08002273 // Are the INS (Insert Bit Field) and ROTR instructions supported?
2274 bool has_ins_rotr = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002275
2276 switch (type) {
2277 case Primitive::kPrimInt: {
2278 Register dst = locations->Out().AsRegister<Register>();
2279 Register lhs = locations->InAt(0).AsRegister<Register>();
2280 if (use_imm) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002281 if (shift_value == 0) {
2282 if (dst != lhs) {
2283 __ Move(dst, lhs);
2284 }
2285 } else if (instr->IsShl()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002286 __ Sll(dst, lhs, shift_value);
2287 } else if (instr->IsShr()) {
2288 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002289 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002290 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002291 } else {
2292 if (has_ins_rotr) {
2293 __ Rotr(dst, lhs, shift_value);
2294 } else {
2295 __ Sll(TMP, lhs, (kMipsBitsPerWord - shift_value) & shift_mask);
2296 __ Srl(dst, lhs, shift_value);
2297 __ Or(dst, dst, TMP);
2298 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002299 }
2300 } else {
2301 if (instr->IsShl()) {
2302 __ Sllv(dst, lhs, rhs_reg);
2303 } else if (instr->IsShr()) {
2304 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002305 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002306 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002307 } else {
2308 if (has_ins_rotr) {
2309 __ Rotrv(dst, lhs, rhs_reg);
2310 } else {
2311 __ Subu(TMP, ZERO, rhs_reg);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002312 // 32-bit shift instructions use the 5 least significant bits of the shift count, so
2313 // shifting by `-rhs_reg` is equivalent to shifting by `(32 - rhs_reg) & 31`. The case
2314 // when `rhs_reg & 31 == 0` is OK even though we don't shift `lhs` left all the way out
2315 // by 32, because the result in this case is computed as `(lhs >> 0) | (lhs << 0)`,
2316 // IOW, the OR'd values are equal.
Alexey Frunze92d90602015-12-18 18:16:36 -08002317 __ Sllv(TMP, lhs, TMP);
2318 __ Srlv(dst, lhs, rhs_reg);
2319 __ Or(dst, dst, TMP);
2320 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002321 }
2322 }
2323 break;
2324 }
2325
2326 case Primitive::kPrimLong: {
2327 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2328 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2329 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2330 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
2331 if (use_imm) {
2332 if (shift_value == 0) {
Lena Djokic8098da92017-06-28 12:07:50 +02002333 codegen_->MoveLocation(locations->Out(), locations->InAt(0), type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002334 } else if (shift_value < kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002335 if (has_ins_rotr) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002336 if (instr->IsShl()) {
2337 __ Srl(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2338 __ Ins(dst_high, lhs_high, shift_value, kMipsBitsPerWord - shift_value);
2339 __ Sll(dst_low, lhs_low, shift_value);
2340 } else if (instr->IsShr()) {
2341 __ Srl(dst_low, lhs_low, shift_value);
2342 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2343 __ Sra(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002344 } else if (instr->IsUShr()) {
2345 __ Srl(dst_low, lhs_low, shift_value);
2346 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2347 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002348 } else {
2349 __ Srl(dst_low, lhs_low, shift_value);
2350 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2351 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002352 __ Ins(dst_high, lhs_low, kMipsBitsPerWord - shift_value, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002353 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002354 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002355 if (instr->IsShl()) {
2356 __ Sll(dst_low, lhs_low, shift_value);
2357 __ Srl(TMP, lhs_low, kMipsBitsPerWord - shift_value);
2358 __ Sll(dst_high, lhs_high, shift_value);
2359 __ Or(dst_high, dst_high, TMP);
2360 } else if (instr->IsShr()) {
2361 __ Sra(dst_high, lhs_high, shift_value);
2362 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2363 __ Srl(dst_low, lhs_low, shift_value);
2364 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002365 } else if (instr->IsUShr()) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002366 __ Srl(dst_high, lhs_high, shift_value);
2367 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2368 __ Srl(dst_low, lhs_low, shift_value);
2369 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002370 } else {
2371 __ Srl(TMP, lhs_low, shift_value);
2372 __ Sll(dst_low, lhs_high, kMipsBitsPerWord - shift_value);
2373 __ Or(dst_low, dst_low, TMP);
2374 __ Srl(TMP, lhs_high, shift_value);
2375 __ Sll(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2376 __ Or(dst_high, dst_high, TMP);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002377 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002378 }
2379 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002380 const uint32_t shift_value_high = shift_value - kMipsBitsPerWord;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002381 if (instr->IsShl()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002382 __ Sll(dst_high, lhs_low, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002383 __ Move(dst_low, ZERO);
2384 } else if (instr->IsShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002385 __ Sra(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002386 __ Sra(dst_high, dst_low, kMipsBitsPerWord - 1);
Alexey Frunze92d90602015-12-18 18:16:36 -08002387 } else if (instr->IsUShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002388 __ Srl(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002389 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002390 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002391 if (shift_value == kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002392 // 64-bit rotation by 32 is just a swap.
2393 __ Move(dst_low, lhs_high);
2394 __ Move(dst_high, lhs_low);
2395 } else {
2396 if (has_ins_rotr) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002397 __ Srl(dst_low, lhs_high, shift_value_high);
2398 __ Ins(dst_low, lhs_low, kMipsBitsPerWord - shift_value_high, shift_value_high);
2399 __ Srl(dst_high, lhs_low, shift_value_high);
2400 __ Ins(dst_high, lhs_high, kMipsBitsPerWord - shift_value_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002401 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002402 __ Sll(TMP, lhs_low, kMipsBitsPerWord - shift_value_high);
2403 __ Srl(dst_low, lhs_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002404 __ Or(dst_low, dst_low, TMP);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002405 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value_high);
2406 __ Srl(dst_high, lhs_low, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002407 __ Or(dst_high, dst_high, TMP);
2408 }
2409 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002410 }
2411 }
2412 } else {
2413 MipsLabel done;
2414 if (instr->IsShl()) {
2415 __ Sllv(dst_low, lhs_low, rhs_reg);
2416 __ Nor(AT, ZERO, rhs_reg);
2417 __ Srl(TMP, lhs_low, 1);
2418 __ Srlv(TMP, TMP, AT);
2419 __ Sllv(dst_high, lhs_high, rhs_reg);
2420 __ Or(dst_high, dst_high, TMP);
2421 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2422 __ Beqz(TMP, &done);
2423 __ Move(dst_high, dst_low);
2424 __ Move(dst_low, ZERO);
2425 } else if (instr->IsShr()) {
2426 __ Srav(dst_high, lhs_high, rhs_reg);
2427 __ Nor(AT, ZERO, rhs_reg);
2428 __ Sll(TMP, lhs_high, 1);
2429 __ Sllv(TMP, TMP, AT);
2430 __ Srlv(dst_low, lhs_low, rhs_reg);
2431 __ Or(dst_low, dst_low, TMP);
2432 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2433 __ Beqz(TMP, &done);
2434 __ Move(dst_low, dst_high);
2435 __ Sra(dst_high, dst_high, 31);
Alexey Frunze92d90602015-12-18 18:16:36 -08002436 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002437 __ Srlv(dst_high, lhs_high, rhs_reg);
2438 __ Nor(AT, ZERO, rhs_reg);
2439 __ Sll(TMP, lhs_high, 1);
2440 __ Sllv(TMP, TMP, AT);
2441 __ Srlv(dst_low, lhs_low, rhs_reg);
2442 __ Or(dst_low, dst_low, TMP);
2443 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2444 __ Beqz(TMP, &done);
2445 __ Move(dst_low, dst_high);
2446 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002447 } else {
2448 __ Nor(AT, ZERO, rhs_reg);
2449 __ Srlv(TMP, lhs_low, rhs_reg);
2450 __ Sll(dst_low, lhs_high, 1);
2451 __ Sllv(dst_low, dst_low, AT);
2452 __ Or(dst_low, dst_low, TMP);
2453 __ Srlv(TMP, lhs_high, rhs_reg);
2454 __ Sll(dst_high, lhs_low, 1);
2455 __ Sllv(dst_high, dst_high, AT);
2456 __ Or(dst_high, dst_high, TMP);
2457 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2458 __ Beqz(TMP, &done);
2459 __ Move(TMP, dst_high);
2460 __ Move(dst_high, dst_low);
2461 __ Move(dst_low, TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002462 }
2463 __ Bind(&done);
2464 }
2465 break;
2466 }
2467
2468 default:
2469 LOG(FATAL) << "Unexpected shift operation type " << type;
2470 }
2471}
2472
2473void LocationsBuilderMIPS::VisitAdd(HAdd* instruction) {
2474 HandleBinaryOp(instruction);
2475}
2476
2477void InstructionCodeGeneratorMIPS::VisitAdd(HAdd* instruction) {
2478 HandleBinaryOp(instruction);
2479}
2480
2481void LocationsBuilderMIPS::VisitAnd(HAnd* instruction) {
2482 HandleBinaryOp(instruction);
2483}
2484
2485void InstructionCodeGeneratorMIPS::VisitAnd(HAnd* instruction) {
2486 HandleBinaryOp(instruction);
2487}
2488
2489void LocationsBuilderMIPS::VisitArrayGet(HArrayGet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002490 Primitive::Type type = instruction->GetType();
2491 bool object_array_get_with_read_barrier =
2492 kEmitCompilerReadBarrier && (type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002493 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08002494 new (GetGraph()->GetArena()) LocationSummary(instruction,
2495 object_array_get_with_read_barrier
2496 ? LocationSummary::kCallOnSlowPath
2497 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002498 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2499 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2500 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002501 locations->SetInAt(0, Location::RequiresRegister());
2502 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexey Frunze15958152017-02-09 19:08:30 -08002503 if (Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002504 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2505 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002506 // The output overlaps in the case of an object array get with
2507 // read barriers enabled: we do not want the move to overwrite the
2508 // array's location, as we need it to emit the read barrier.
2509 locations->SetOut(Location::RequiresRegister(),
2510 object_array_get_with_read_barrier
2511 ? Location::kOutputOverlap
2512 : Location::kNoOutputOverlap);
2513 }
2514 // We need a temporary register for the read barrier marking slow
2515 // path in CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier.
2516 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2517 locations->AddTemp(Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002518 }
2519}
2520
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002521static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS* codegen) {
2522 auto null_checker = [codegen, instruction]() {
2523 codegen->MaybeRecordImplicitNullCheck(instruction);
Alexey Frunze2923db72016-08-20 01:55:47 -07002524 };
2525 return null_checker;
2526}
2527
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002528void InstructionCodeGeneratorMIPS::VisitArrayGet(HArrayGet* instruction) {
2529 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002530 Location obj_loc = locations->InAt(0);
2531 Register obj = obj_loc.AsRegister<Register>();
2532 Location out_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002533 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002534 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002535 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002536
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002537 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002538 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2539 instruction->IsStringCharAt();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002540 switch (type) {
2541 case Primitive::kPrimBoolean: {
Alexey Frunze15958152017-02-09 19:08:30 -08002542 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002543 if (index.IsConstant()) {
2544 size_t offset =
2545 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002546 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002547 } else {
2548 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002549 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002550 }
2551 break;
2552 }
2553
2554 case Primitive::kPrimByte: {
Alexey Frunze15958152017-02-09 19:08:30 -08002555 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002556 if (index.IsConstant()) {
2557 size_t offset =
2558 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002559 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002560 } else {
2561 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002562 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002563 }
2564 break;
2565 }
2566
2567 case Primitive::kPrimShort: {
Alexey Frunze15958152017-02-09 19:08:30 -08002568 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002569 if (index.IsConstant()) {
2570 size_t offset =
2571 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002572 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002573 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002574 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_2, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002575 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002576 }
2577 break;
2578 }
2579
2580 case Primitive::kPrimChar: {
Alexey Frunze15958152017-02-09 19:08:30 -08002581 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002582 if (maybe_compressed_char_at) {
2583 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2584 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
2585 __ Sll(TMP, TMP, 31); // Extract compression flag into the most significant bit of TMP.
2586 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2587 "Expecting 0=compressed, 1=uncompressed");
2588 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002589 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002590 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2591 if (maybe_compressed_char_at) {
2592 MipsLabel uncompressed_load, done;
2593 __ Bnez(TMP, &uncompressed_load);
2594 __ LoadFromOffset(kLoadUnsignedByte,
2595 out,
2596 obj,
2597 data_offset + (const_index << TIMES_1));
2598 __ B(&done);
2599 __ Bind(&uncompressed_load);
2600 __ LoadFromOffset(kLoadUnsignedHalfword,
2601 out,
2602 obj,
2603 data_offset + (const_index << TIMES_2));
2604 __ Bind(&done);
2605 } else {
2606 __ LoadFromOffset(kLoadUnsignedHalfword,
2607 out,
2608 obj,
2609 data_offset + (const_index << TIMES_2),
2610 null_checker);
2611 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002612 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002613 Register index_reg = index.AsRegister<Register>();
2614 if (maybe_compressed_char_at) {
2615 MipsLabel uncompressed_load, done;
2616 __ Bnez(TMP, &uncompressed_load);
2617 __ Addu(TMP, obj, index_reg);
2618 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2619 __ B(&done);
2620 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002621 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002622 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2623 __ Bind(&done);
2624 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002625 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002626 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
2627 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002628 }
2629 break;
2630 }
2631
Alexey Frunze15958152017-02-09 19:08:30 -08002632 case Primitive::kPrimInt: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002633 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002634 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002635 if (index.IsConstant()) {
2636 size_t offset =
2637 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002638 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002639 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002640 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002641 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002642 }
2643 break;
2644 }
2645
Alexey Frunze15958152017-02-09 19:08:30 -08002646 case Primitive::kPrimNot: {
2647 static_assert(
2648 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2649 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2650 // /* HeapReference<Object> */ out =
2651 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2652 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2653 Location temp = locations->GetTemp(0);
2654 // Note that a potential implicit null check is handled in this
2655 // CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier call.
2656 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2657 out_loc,
2658 obj,
2659 data_offset,
2660 index,
2661 temp,
2662 /* needs_null_check */ true);
2663 } else {
2664 Register out = out_loc.AsRegister<Register>();
2665 if (index.IsConstant()) {
2666 size_t offset =
2667 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2668 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
2669 // If read barriers are enabled, emit read barriers other than
2670 // Baker's using a slow path (and also unpoison the loaded
2671 // reference, if heap poisoning is enabled).
2672 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2673 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002674 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08002675 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
2676 // If read barriers are enabled, emit read barriers other than
2677 // Baker's using a slow path (and also unpoison the loaded
2678 // reference, if heap poisoning is enabled).
2679 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2680 out_loc,
2681 out_loc,
2682 obj_loc,
2683 data_offset,
2684 index);
2685 }
2686 }
2687 break;
2688 }
2689
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002690 case Primitive::kPrimLong: {
Alexey Frunze15958152017-02-09 19:08:30 -08002691 Register out = out_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002692 if (index.IsConstant()) {
2693 size_t offset =
2694 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002695 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002696 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002697 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002698 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002699 }
2700 break;
2701 }
2702
2703 case Primitive::kPrimFloat: {
Alexey Frunze15958152017-02-09 19:08:30 -08002704 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002705 if (index.IsConstant()) {
2706 size_t offset =
2707 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002708 __ LoadSFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002709 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002710 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002711 __ LoadSFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002712 }
2713 break;
2714 }
2715
2716 case Primitive::kPrimDouble: {
Alexey Frunze15958152017-02-09 19:08:30 -08002717 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002718 if (index.IsConstant()) {
2719 size_t offset =
2720 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002721 __ LoadDFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002722 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002723 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002724 __ LoadDFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002725 }
2726 break;
2727 }
2728
2729 case Primitive::kPrimVoid:
2730 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2731 UNREACHABLE();
2732 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002733}
2734
2735void LocationsBuilderMIPS::VisitArrayLength(HArrayLength* instruction) {
2736 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2737 locations->SetInAt(0, Location::RequiresRegister());
2738 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2739}
2740
2741void InstructionCodeGeneratorMIPS::VisitArrayLength(HArrayLength* instruction) {
2742 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002743 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002744 Register obj = locations->InAt(0).AsRegister<Register>();
2745 Register out = locations->Out().AsRegister<Register>();
2746 __ LoadFromOffset(kLoadWord, out, obj, offset);
2747 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002748 // Mask out compression flag from String's array length.
2749 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2750 __ Srl(out, out, 1u);
2751 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002752}
2753
Alexey Frunzef58b2482016-09-02 22:14:06 -07002754Location LocationsBuilderMIPS::RegisterOrZeroConstant(HInstruction* instruction) {
2755 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2756 ? Location::ConstantLocation(instruction->AsConstant())
2757 : Location::RequiresRegister();
2758}
2759
2760Location LocationsBuilderMIPS::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2761 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2762 // We can store a non-zero float or double constant without first loading it into the FPU,
2763 // but we should only prefer this if the constant has a single use.
2764 if (instruction->IsConstant() &&
2765 (instruction->AsConstant()->IsZeroBitPattern() ||
2766 instruction->GetUses().HasExactlyOneElement())) {
2767 return Location::ConstantLocation(instruction->AsConstant());
2768 // Otherwise fall through and require an FPU register for the constant.
2769 }
2770 return Location::RequiresFpuRegister();
2771}
2772
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002773void LocationsBuilderMIPS::VisitArraySet(HArraySet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002774 Primitive::Type value_type = instruction->GetComponentType();
2775
2776 bool needs_write_barrier =
2777 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2778 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2779
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002780 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2781 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002782 may_need_runtime_call_for_type_check ?
2783 LocationSummary::kCallOnSlowPath :
2784 LocationSummary::kNoCall);
2785
2786 locations->SetInAt(0, Location::RequiresRegister());
2787 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2788 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
2789 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002790 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002791 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2792 }
2793 if (needs_write_barrier) {
2794 // Temporary register for the write barrier.
2795 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002796 }
2797}
2798
2799void InstructionCodeGeneratorMIPS::VisitArraySet(HArraySet* instruction) {
2800 LocationSummary* locations = instruction->GetLocations();
2801 Register obj = locations->InAt(0).AsRegister<Register>();
2802 Location index = locations->InAt(1);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002803 Location value_location = locations->InAt(2);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002804 Primitive::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002805 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002806 bool needs_write_barrier =
2807 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002808 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002809 Register base_reg = index.IsConstant() ? obj : TMP;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002810
2811 switch (value_type) {
2812 case Primitive::kPrimBoolean:
2813 case Primitive::kPrimByte: {
2814 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002815 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002816 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002817 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002818 __ Addu(base_reg, obj, index.AsRegister<Register>());
2819 }
2820 if (value_location.IsConstant()) {
2821 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2822 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2823 } else {
2824 Register value = value_location.AsRegister<Register>();
2825 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002826 }
2827 break;
2828 }
2829
2830 case Primitive::kPrimShort:
2831 case Primitive::kPrimChar: {
2832 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002833 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002834 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002835 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002836 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_2, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002837 }
2838 if (value_location.IsConstant()) {
2839 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2840 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2841 } else {
2842 Register value = value_location.AsRegister<Register>();
2843 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002844 }
2845 break;
2846 }
2847
Alexey Frunze15958152017-02-09 19:08:30 -08002848 case Primitive::kPrimInt: {
2849 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2850 if (index.IsConstant()) {
2851 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2852 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002853 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08002854 }
2855 if (value_location.IsConstant()) {
2856 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2857 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2858 } else {
2859 Register value = value_location.AsRegister<Register>();
2860 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2861 }
2862 break;
2863 }
2864
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002865 case Primitive::kPrimNot: {
Alexey Frunze15958152017-02-09 19:08:30 -08002866 if (value_location.IsConstant()) {
2867 // Just setting null.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002868 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002869 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002870 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002871 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002872 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002873 }
Alexey Frunze15958152017-02-09 19:08:30 -08002874 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2875 DCHECK_EQ(value, 0);
2876 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2877 DCHECK(!needs_write_barrier);
2878 DCHECK(!may_need_runtime_call_for_type_check);
2879 break;
2880 }
2881
2882 DCHECK(needs_write_barrier);
2883 Register value = value_location.AsRegister<Register>();
2884 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
2885 Register temp2 = TMP; // Doesn't need to survive slow path.
2886 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2887 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2888 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2889 MipsLabel done;
2890 SlowPathCodeMIPS* slow_path = nullptr;
2891
2892 if (may_need_runtime_call_for_type_check) {
2893 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS(instruction);
2894 codegen_->AddSlowPath(slow_path);
2895 if (instruction->GetValueCanBeNull()) {
2896 MipsLabel non_zero;
2897 __ Bnez(value, &non_zero);
2898 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2899 if (index.IsConstant()) {
2900 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunzec061de12017-02-14 13:27:23 -08002901 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002902 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzec061de12017-02-14 13:27:23 -08002903 }
Alexey Frunze15958152017-02-09 19:08:30 -08002904 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2905 __ B(&done);
2906 __ Bind(&non_zero);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002907 }
Alexey Frunze15958152017-02-09 19:08:30 -08002908
2909 // Note that when read barriers are enabled, the type checks
2910 // are performed without read barriers. This is fine, even in
2911 // the case where a class object is in the from-space after
2912 // the flip, as a comparison involving such a type would not
2913 // produce a false positive; it may of course produce a false
2914 // negative, in which case we would take the ArraySet slow
2915 // path.
2916
2917 // /* HeapReference<Class> */ temp1 = obj->klass_
2918 __ LoadFromOffset(kLoadWord, temp1, obj, class_offset, null_checker);
2919 __ MaybeUnpoisonHeapReference(temp1);
2920
2921 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2922 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
2923 // /* HeapReference<Class> */ temp2 = value->klass_
2924 __ LoadFromOffset(kLoadWord, temp2, value, class_offset);
2925 // If heap poisoning is enabled, no need to unpoison `temp1`
2926 // nor `temp2`, as we are comparing two poisoned references.
2927
2928 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2929 MipsLabel do_put;
2930 __ Beq(temp1, temp2, &do_put);
2931 // If heap poisoning is enabled, the `temp1` reference has
2932 // not been unpoisoned yet; unpoison it now.
2933 __ MaybeUnpoisonHeapReference(temp1);
2934
2935 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2936 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
2937 // If heap poisoning is enabled, no need to unpoison
2938 // `temp1`, as we are comparing against null below.
2939 __ Bnez(temp1, slow_path->GetEntryLabel());
2940 __ Bind(&do_put);
2941 } else {
2942 __ Bne(temp1, temp2, slow_path->GetEntryLabel());
2943 }
2944 }
2945
2946 Register source = value;
2947 if (kPoisonHeapReferences) {
2948 // Note that in the case where `value` is a null reference,
2949 // we do not enter this block, as a null reference does not
2950 // need poisoning.
2951 __ Move(temp1, value);
2952 __ PoisonHeapReference(temp1);
2953 source = temp1;
2954 }
2955
2956 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2957 if (index.IsConstant()) {
2958 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002959 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002960 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08002961 }
2962 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2963
2964 if (!may_need_runtime_call_for_type_check) {
2965 codegen_->MaybeRecordImplicitNullCheck(instruction);
2966 }
2967
2968 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2969
2970 if (done.IsLinked()) {
2971 __ Bind(&done);
2972 }
2973
2974 if (slow_path != nullptr) {
2975 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002976 }
2977 break;
2978 }
2979
2980 case Primitive::kPrimLong: {
2981 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002982 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002983 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002984 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002985 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002986 }
2987 if (value_location.IsConstant()) {
2988 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2989 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2990 } else {
2991 Register value = value_location.AsRegisterPairLow<Register>();
2992 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002993 }
2994 break;
2995 }
2996
2997 case Primitive::kPrimFloat: {
2998 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002999 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003000 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003001 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003002 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003003 }
3004 if (value_location.IsConstant()) {
3005 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
3006 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
3007 } else {
3008 FRegister value = value_location.AsFpuRegister<FRegister>();
3009 __ StoreSToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003010 }
3011 break;
3012 }
3013
3014 case Primitive::kPrimDouble: {
3015 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003016 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003017 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003018 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003019 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003020 }
3021 if (value_location.IsConstant()) {
3022 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
3023 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
3024 } else {
3025 FRegister value = value_location.AsFpuRegister<FRegister>();
3026 __ StoreDToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003027 }
3028 break;
3029 }
3030
3031 case Primitive::kPrimVoid:
3032 LOG(FATAL) << "Unreachable type " << instruction->GetType();
3033 UNREACHABLE();
3034 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003035}
3036
3037void LocationsBuilderMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003038 RegisterSet caller_saves = RegisterSet::Empty();
3039 InvokeRuntimeCallingConvention calling_convention;
3040 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3041 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3042 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003043 locations->SetInAt(0, Location::RequiresRegister());
3044 locations->SetInAt(1, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003045}
3046
3047void InstructionCodeGeneratorMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
3048 LocationSummary* locations = instruction->GetLocations();
3049 BoundsCheckSlowPathMIPS* slow_path =
3050 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS(instruction);
3051 codegen_->AddSlowPath(slow_path);
3052
3053 Register index = locations->InAt(0).AsRegister<Register>();
3054 Register length = locations->InAt(1).AsRegister<Register>();
3055
3056 // length is limited by the maximum positive signed 32-bit integer.
3057 // Unsigned comparison of length and index checks for index < 0
3058 // and for length <= index simultaneously.
3059 __ Bgeu(index, length, slow_path->GetEntryLabel());
3060}
3061
Alexey Frunze15958152017-02-09 19:08:30 -08003062// Temp is used for read barrier.
3063static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3064 if (kEmitCompilerReadBarrier &&
3065 (kUseBakerReadBarrier ||
3066 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3067 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3068 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3069 return 1;
3070 }
3071 return 0;
3072}
3073
3074// Extra temp is used for read barrier.
3075static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3076 return 1 + NumberOfInstanceOfTemps(type_check_kind);
3077}
3078
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003079void LocationsBuilderMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003080 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3081 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3082
3083 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3084 switch (type_check_kind) {
3085 case TypeCheckKind::kExactCheck:
3086 case TypeCheckKind::kAbstractClassCheck:
3087 case TypeCheckKind::kClassHierarchyCheck:
3088 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08003089 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003090 ? LocationSummary::kCallOnSlowPath
3091 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
3092 break;
3093 case TypeCheckKind::kArrayCheck:
3094 case TypeCheckKind::kUnresolvedCheck:
3095 case TypeCheckKind::kInterfaceCheck:
3096 call_kind = LocationSummary::kCallOnSlowPath;
3097 break;
3098 }
3099
3100 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003101 locations->SetInAt(0, Location::RequiresRegister());
3102 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08003103 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003104}
3105
3106void InstructionCodeGeneratorMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003107 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003108 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08003109 Location obj_loc = locations->InAt(0);
3110 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003111 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08003112 Location temp_loc = locations->GetTemp(0);
3113 Register temp = temp_loc.AsRegister<Register>();
3114 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3115 DCHECK_LE(num_temps, 2u);
3116 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003117 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3118 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3119 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3120 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3121 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3122 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3123 const uint32_t object_array_data_offset =
3124 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
3125 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003126
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003127 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
3128 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
3129 // read barriers is done for performance and code size reasons.
3130 bool is_type_check_slow_path_fatal = false;
3131 if (!kEmitCompilerReadBarrier) {
3132 is_type_check_slow_path_fatal =
3133 (type_check_kind == TypeCheckKind::kExactCheck ||
3134 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3135 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3136 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3137 !instruction->CanThrowIntoCatchBlock();
3138 }
3139 SlowPathCodeMIPS* slow_path =
3140 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
3141 is_type_check_slow_path_fatal);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003142 codegen_->AddSlowPath(slow_path);
3143
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003144 // Avoid this check if we know `obj` is not null.
3145 if (instruction->MustDoNullCheck()) {
3146 __ Beqz(obj, &done);
3147 }
3148
3149 switch (type_check_kind) {
3150 case TypeCheckKind::kExactCheck:
3151 case TypeCheckKind::kArrayCheck: {
3152 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003153 GenerateReferenceLoadTwoRegisters(instruction,
3154 temp_loc,
3155 obj_loc,
3156 class_offset,
3157 maybe_temp2_loc,
3158 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003159 // Jump to slow path for throwing the exception or doing a
3160 // more involved array check.
3161 __ Bne(temp, cls, slow_path->GetEntryLabel());
3162 break;
3163 }
3164
3165 case TypeCheckKind::kAbstractClassCheck: {
3166 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003167 GenerateReferenceLoadTwoRegisters(instruction,
3168 temp_loc,
3169 obj_loc,
3170 class_offset,
3171 maybe_temp2_loc,
3172 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003173 // If the class is abstract, we eagerly fetch the super class of the
3174 // object to avoid doing a comparison we know will fail.
3175 MipsLabel loop;
3176 __ Bind(&loop);
3177 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003178 GenerateReferenceLoadOneRegister(instruction,
3179 temp_loc,
3180 super_offset,
3181 maybe_temp2_loc,
3182 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003183 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3184 // exception.
3185 __ Beqz(temp, slow_path->GetEntryLabel());
3186 // Otherwise, compare the classes.
3187 __ Bne(temp, cls, &loop);
3188 break;
3189 }
3190
3191 case TypeCheckKind::kClassHierarchyCheck: {
3192 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003193 GenerateReferenceLoadTwoRegisters(instruction,
3194 temp_loc,
3195 obj_loc,
3196 class_offset,
3197 maybe_temp2_loc,
3198 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003199 // Walk over the class hierarchy to find a match.
3200 MipsLabel loop;
3201 __ Bind(&loop);
3202 __ Beq(temp, cls, &done);
3203 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003204 GenerateReferenceLoadOneRegister(instruction,
3205 temp_loc,
3206 super_offset,
3207 maybe_temp2_loc,
3208 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003209 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3210 // exception. Otherwise, jump to the beginning of the loop.
3211 __ Bnez(temp, &loop);
3212 __ B(slow_path->GetEntryLabel());
3213 break;
3214 }
3215
3216 case TypeCheckKind::kArrayObjectCheck: {
3217 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003218 GenerateReferenceLoadTwoRegisters(instruction,
3219 temp_loc,
3220 obj_loc,
3221 class_offset,
3222 maybe_temp2_loc,
3223 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003224 // Do an exact check.
3225 __ Beq(temp, cls, &done);
3226 // Otherwise, we need to check that the object's class is a non-primitive array.
3227 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08003228 GenerateReferenceLoadOneRegister(instruction,
3229 temp_loc,
3230 component_offset,
3231 maybe_temp2_loc,
3232 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003233 // If the component type is null, jump to the slow path to throw the exception.
3234 __ Beqz(temp, slow_path->GetEntryLabel());
3235 // Otherwise, the object is indeed an array, further check that this component
3236 // type is not a primitive type.
3237 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
3238 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3239 __ Bnez(temp, slow_path->GetEntryLabel());
3240 break;
3241 }
3242
3243 case TypeCheckKind::kUnresolvedCheck:
3244 // We always go into the type check slow path for the unresolved check case.
3245 // We cannot directly call the CheckCast runtime entry point
3246 // without resorting to a type checking slow path here (i.e. by
3247 // calling InvokeRuntime directly), as it would require to
3248 // assign fixed registers for the inputs of this HInstanceOf
3249 // instruction (following the runtime calling convention), which
3250 // might be cluttered by the potential first read barrier
3251 // emission at the beginning of this method.
3252 __ B(slow_path->GetEntryLabel());
3253 break;
3254
3255 case TypeCheckKind::kInterfaceCheck: {
3256 // Avoid read barriers to improve performance of the fast path. We can not get false
3257 // positives by doing this.
3258 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003259 GenerateReferenceLoadTwoRegisters(instruction,
3260 temp_loc,
3261 obj_loc,
3262 class_offset,
3263 maybe_temp2_loc,
3264 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003265 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003266 GenerateReferenceLoadTwoRegisters(instruction,
3267 temp_loc,
3268 temp_loc,
3269 iftable_offset,
3270 maybe_temp2_loc,
3271 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003272 // Iftable is never null.
3273 __ Lw(TMP, temp, array_length_offset);
3274 // Loop through the iftable and check if any class matches.
3275 MipsLabel loop;
3276 __ Bind(&loop);
3277 __ Addiu(temp, temp, 2 * kHeapReferenceSize); // Possibly in delay slot on R2.
3278 __ Beqz(TMP, slow_path->GetEntryLabel());
3279 __ Lw(AT, temp, object_array_data_offset - 2 * kHeapReferenceSize);
3280 __ MaybeUnpoisonHeapReference(AT);
3281 // Go to next interface.
3282 __ Addiu(TMP, TMP, -2);
3283 // Compare the classes and continue the loop if they do not match.
3284 __ Bne(AT, cls, &loop);
3285 break;
3286 }
3287 }
3288
3289 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003290 __ Bind(slow_path->GetExitLabel());
3291}
3292
3293void LocationsBuilderMIPS::VisitClinitCheck(HClinitCheck* check) {
3294 LocationSummary* locations =
3295 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3296 locations->SetInAt(0, Location::RequiresRegister());
3297 if (check->HasUses()) {
3298 locations->SetOut(Location::SameAsFirstInput());
3299 }
3300}
3301
3302void InstructionCodeGeneratorMIPS::VisitClinitCheck(HClinitCheck* check) {
3303 // We assume the class is not null.
3304 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
3305 check->GetLoadClass(),
3306 check,
3307 check->GetDexPc(),
3308 true);
3309 codegen_->AddSlowPath(slow_path);
3310 GenerateClassInitializationCheck(slow_path,
3311 check->GetLocations()->InAt(0).AsRegister<Register>());
3312}
3313
3314void LocationsBuilderMIPS::VisitCompare(HCompare* compare) {
3315 Primitive::Type in_type = compare->InputAt(0)->GetType();
3316
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003317 LocationSummary* locations =
3318 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003319
3320 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003321 case Primitive::kPrimBoolean:
3322 case Primitive::kPrimByte:
3323 case Primitive::kPrimShort:
3324 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003325 case Primitive::kPrimInt:
Alexey Frunzee7697712016-09-15 21:37:49 -07003326 locations->SetInAt(0, Location::RequiresRegister());
3327 locations->SetInAt(1, Location::RequiresRegister());
3328 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3329 break;
3330
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003331 case Primitive::kPrimLong:
3332 locations->SetInAt(0, Location::RequiresRegister());
3333 locations->SetInAt(1, Location::RequiresRegister());
3334 // Output overlaps because it is written before doing the low comparison.
3335 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3336 break;
3337
3338 case Primitive::kPrimFloat:
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003339 case Primitive::kPrimDouble:
3340 locations->SetInAt(0, Location::RequiresFpuRegister());
3341 locations->SetInAt(1, Location::RequiresFpuRegister());
3342 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003343 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003344
3345 default:
3346 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3347 }
3348}
3349
3350void InstructionCodeGeneratorMIPS::VisitCompare(HCompare* instruction) {
3351 LocationSummary* locations = instruction->GetLocations();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003352 Register res = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003353 Primitive::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003354 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003355
3356 // 0 if: left == right
3357 // 1 if: left > right
3358 // -1 if: left < right
3359 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003360 case Primitive::kPrimBoolean:
3361 case Primitive::kPrimByte:
3362 case Primitive::kPrimShort:
3363 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003364 case Primitive::kPrimInt: {
3365 Register lhs = locations->InAt(0).AsRegister<Register>();
3366 Register rhs = locations->InAt(1).AsRegister<Register>();
3367 __ Slt(TMP, lhs, rhs);
3368 __ Slt(res, rhs, lhs);
3369 __ Subu(res, res, TMP);
3370 break;
3371 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003372 case Primitive::kPrimLong: {
3373 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003374 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
3375 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
3376 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
3377 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
3378 // TODO: more efficient (direct) comparison with a constant.
3379 __ Slt(TMP, lhs_high, rhs_high);
3380 __ Slt(AT, rhs_high, lhs_high); // Inverted: is actually gt.
3381 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3382 __ Bnez(res, &done); // If we compared ==, check if lower bits are also equal.
3383 __ Sltu(TMP, lhs_low, rhs_low);
3384 __ Sltu(AT, rhs_low, lhs_low); // Inverted: is actually gt.
3385 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3386 __ Bind(&done);
3387 break;
3388 }
3389
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003390 case Primitive::kPrimFloat: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003391 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003392 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3393 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3394 MipsLabel done;
3395 if (isR6) {
3396 __ CmpEqS(FTMP, lhs, rhs);
3397 __ LoadConst32(res, 0);
3398 __ Bc1nez(FTMP, &done);
3399 if (gt_bias) {
3400 __ CmpLtS(FTMP, lhs, rhs);
3401 __ LoadConst32(res, -1);
3402 __ Bc1nez(FTMP, &done);
3403 __ LoadConst32(res, 1);
3404 } else {
3405 __ CmpLtS(FTMP, rhs, lhs);
3406 __ LoadConst32(res, 1);
3407 __ Bc1nez(FTMP, &done);
3408 __ LoadConst32(res, -1);
3409 }
3410 } else {
3411 if (gt_bias) {
3412 __ ColtS(0, lhs, rhs);
3413 __ LoadConst32(res, -1);
3414 __ Bc1t(0, &done);
3415 __ CeqS(0, lhs, rhs);
3416 __ LoadConst32(res, 1);
3417 __ Movt(res, ZERO, 0);
3418 } else {
3419 __ ColtS(0, rhs, lhs);
3420 __ LoadConst32(res, 1);
3421 __ Bc1t(0, &done);
3422 __ CeqS(0, lhs, rhs);
3423 __ LoadConst32(res, -1);
3424 __ Movt(res, ZERO, 0);
3425 }
3426 }
3427 __ Bind(&done);
3428 break;
3429 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003430 case Primitive::kPrimDouble: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003431 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003432 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3433 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3434 MipsLabel done;
3435 if (isR6) {
3436 __ CmpEqD(FTMP, lhs, rhs);
3437 __ LoadConst32(res, 0);
3438 __ Bc1nez(FTMP, &done);
3439 if (gt_bias) {
3440 __ CmpLtD(FTMP, lhs, rhs);
3441 __ LoadConst32(res, -1);
3442 __ Bc1nez(FTMP, &done);
3443 __ LoadConst32(res, 1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003444 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003445 __ CmpLtD(FTMP, rhs, lhs);
3446 __ LoadConst32(res, 1);
3447 __ Bc1nez(FTMP, &done);
3448 __ LoadConst32(res, -1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003449 }
3450 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003451 if (gt_bias) {
3452 __ ColtD(0, lhs, rhs);
3453 __ LoadConst32(res, -1);
3454 __ Bc1t(0, &done);
3455 __ CeqD(0, lhs, rhs);
3456 __ LoadConst32(res, 1);
3457 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003458 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003459 __ ColtD(0, rhs, lhs);
3460 __ LoadConst32(res, 1);
3461 __ Bc1t(0, &done);
3462 __ CeqD(0, lhs, rhs);
3463 __ LoadConst32(res, -1);
3464 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003465 }
3466 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003467 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003468 break;
3469 }
3470
3471 default:
3472 LOG(FATAL) << "Unimplemented compare type " << in_type;
3473 }
3474}
3475
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003476void LocationsBuilderMIPS::HandleCondition(HCondition* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003477 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003478 switch (instruction->InputAt(0)->GetType()) {
3479 default:
3480 case Primitive::kPrimLong:
3481 locations->SetInAt(0, Location::RequiresRegister());
3482 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3483 break;
3484
3485 case Primitive::kPrimFloat:
3486 case Primitive::kPrimDouble:
3487 locations->SetInAt(0, Location::RequiresFpuRegister());
3488 locations->SetInAt(1, Location::RequiresFpuRegister());
3489 break;
3490 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003491 if (!instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003492 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3493 }
3494}
3495
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003496void InstructionCodeGeneratorMIPS::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003497 if (instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003498 return;
3499 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003500
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003501 Primitive::Type type = instruction->InputAt(0)->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003502 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003503
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003504 switch (type) {
3505 default:
3506 // Integer case.
3507 GenerateIntCompare(instruction->GetCondition(), locations);
3508 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003509
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003510 case Primitive::kPrimLong:
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01003511 GenerateLongCompare(instruction->GetCondition(), locations);
3512 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003513
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003514 case Primitive::kPrimFloat:
3515 case Primitive::kPrimDouble:
Alexey Frunze2ddb7172016-09-06 17:04:55 -07003516 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3517 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003518 }
3519}
3520
Alexey Frunze7e99e052015-11-24 19:28:01 -08003521void InstructionCodeGeneratorMIPS::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3522 DCHECK(instruction->IsDiv() || instruction->IsRem());
3523 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3524
3525 LocationSummary* locations = instruction->GetLocations();
3526 Location second = locations->InAt(1);
3527 DCHECK(second.IsConstant());
3528
3529 Register out = locations->Out().AsRegister<Register>();
3530 Register dividend = locations->InAt(0).AsRegister<Register>();
3531 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3532 DCHECK(imm == 1 || imm == -1);
3533
3534 if (instruction->IsRem()) {
3535 __ Move(out, ZERO);
3536 } else {
3537 if (imm == -1) {
3538 __ Subu(out, ZERO, dividend);
3539 } else if (out != dividend) {
3540 __ Move(out, dividend);
3541 }
3542 }
3543}
3544
3545void InstructionCodeGeneratorMIPS::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3546 DCHECK(instruction->IsDiv() || instruction->IsRem());
3547 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3548
3549 LocationSummary* locations = instruction->GetLocations();
3550 Location second = locations->InAt(1);
3551 DCHECK(second.IsConstant());
3552
3553 Register out = locations->Out().AsRegister<Register>();
3554 Register dividend = locations->InAt(0).AsRegister<Register>();
3555 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003556 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Alexey Frunze7e99e052015-11-24 19:28:01 -08003557 int ctz_imm = CTZ(abs_imm);
3558
3559 if (instruction->IsDiv()) {
3560 if (ctz_imm == 1) {
3561 // Fast path for division by +/-2, which is very common.
3562 __ Srl(TMP, dividend, 31);
3563 } else {
3564 __ Sra(TMP, dividend, 31);
3565 __ Srl(TMP, TMP, 32 - ctz_imm);
3566 }
3567 __ Addu(out, dividend, TMP);
3568 __ Sra(out, out, ctz_imm);
3569 if (imm < 0) {
3570 __ Subu(out, ZERO, out);
3571 }
3572 } else {
3573 if (ctz_imm == 1) {
3574 // Fast path for modulo +/-2, which is very common.
3575 __ Sra(TMP, dividend, 31);
3576 __ Subu(out, dividend, TMP);
3577 __ Andi(out, out, 1);
3578 __ Addu(out, out, TMP);
3579 } else {
3580 __ Sra(TMP, dividend, 31);
3581 __ Srl(TMP, TMP, 32 - ctz_imm);
3582 __ Addu(out, dividend, TMP);
3583 if (IsUint<16>(abs_imm - 1)) {
3584 __ Andi(out, out, abs_imm - 1);
3585 } else {
3586 __ Sll(out, out, 32 - ctz_imm);
3587 __ Srl(out, out, 32 - ctz_imm);
3588 }
3589 __ Subu(out, out, TMP);
3590 }
3591 }
3592}
3593
3594void InstructionCodeGeneratorMIPS::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3595 DCHECK(instruction->IsDiv() || instruction->IsRem());
3596 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3597
3598 LocationSummary* locations = instruction->GetLocations();
3599 Location second = locations->InAt(1);
3600 DCHECK(second.IsConstant());
3601
3602 Register out = locations->Out().AsRegister<Register>();
3603 Register dividend = locations->InAt(0).AsRegister<Register>();
3604 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3605
3606 int64_t magic;
3607 int shift;
3608 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3609
3610 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3611
3612 __ LoadConst32(TMP, magic);
3613 if (isR6) {
3614 __ MuhR6(TMP, dividend, TMP);
3615 } else {
3616 __ MultR2(dividend, TMP);
3617 __ Mfhi(TMP);
3618 }
3619 if (imm > 0 && magic < 0) {
3620 __ Addu(TMP, TMP, dividend);
3621 } else if (imm < 0 && magic > 0) {
3622 __ Subu(TMP, TMP, dividend);
3623 }
3624
3625 if (shift != 0) {
3626 __ Sra(TMP, TMP, shift);
3627 }
3628
3629 if (instruction->IsDiv()) {
3630 __ Sra(out, TMP, 31);
3631 __ Subu(out, TMP, out);
3632 } else {
3633 __ Sra(AT, TMP, 31);
3634 __ Subu(AT, TMP, AT);
3635 __ LoadConst32(TMP, imm);
3636 if (isR6) {
3637 __ MulR6(TMP, AT, TMP);
3638 } else {
3639 __ MulR2(TMP, AT, TMP);
3640 }
3641 __ Subu(out, dividend, TMP);
3642 }
3643}
3644
3645void InstructionCodeGeneratorMIPS::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3646 DCHECK(instruction->IsDiv() || instruction->IsRem());
3647 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3648
3649 LocationSummary* locations = instruction->GetLocations();
3650 Register out = locations->Out().AsRegister<Register>();
3651 Location second = locations->InAt(1);
3652
3653 if (second.IsConstant()) {
3654 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3655 if (imm == 0) {
3656 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3657 } else if (imm == 1 || imm == -1) {
3658 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003659 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003660 DivRemByPowerOfTwo(instruction);
3661 } else {
3662 DCHECK(imm <= -2 || imm >= 2);
3663 GenerateDivRemWithAnyConstant(instruction);
3664 }
3665 } else {
3666 Register dividend = locations->InAt(0).AsRegister<Register>();
3667 Register divisor = second.AsRegister<Register>();
3668 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3669 if (instruction->IsDiv()) {
3670 if (isR6) {
3671 __ DivR6(out, dividend, divisor);
3672 } else {
3673 __ DivR2(out, dividend, divisor);
3674 }
3675 } else {
3676 if (isR6) {
3677 __ ModR6(out, dividend, divisor);
3678 } else {
3679 __ ModR2(out, dividend, divisor);
3680 }
3681 }
3682 }
3683}
3684
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003685void LocationsBuilderMIPS::VisitDiv(HDiv* div) {
3686 Primitive::Type type = div->GetResultType();
3687 LocationSummary::CallKind call_kind = (type == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003688 ? LocationSummary::kCallOnMainOnly
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003689 : LocationSummary::kNoCall;
3690
3691 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
3692
3693 switch (type) {
3694 case Primitive::kPrimInt:
3695 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08003696 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003697 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3698 break;
3699
3700 case Primitive::kPrimLong: {
3701 InvokeRuntimeCallingConvention calling_convention;
3702 locations->SetInAt(0, Location::RegisterPairLocation(
3703 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3704 locations->SetInAt(1, Location::RegisterPairLocation(
3705 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3706 locations->SetOut(calling_convention.GetReturnLocation(type));
3707 break;
3708 }
3709
3710 case Primitive::kPrimFloat:
3711 case Primitive::kPrimDouble:
3712 locations->SetInAt(0, Location::RequiresFpuRegister());
3713 locations->SetInAt(1, Location::RequiresFpuRegister());
3714 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3715 break;
3716
3717 default:
3718 LOG(FATAL) << "Unexpected div type " << type;
3719 }
3720}
3721
3722void InstructionCodeGeneratorMIPS::VisitDiv(HDiv* instruction) {
3723 Primitive::Type type = instruction->GetType();
3724 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003725
3726 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003727 case Primitive::kPrimInt:
3728 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003729 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003730 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01003731 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003732 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
3733 break;
3734 }
3735 case Primitive::kPrimFloat:
3736 case Primitive::kPrimDouble: {
3737 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
3738 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3739 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3740 if (type == Primitive::kPrimFloat) {
3741 __ DivS(dst, lhs, rhs);
3742 } else {
3743 __ DivD(dst, lhs, rhs);
3744 }
3745 break;
3746 }
3747 default:
3748 LOG(FATAL) << "Unexpected div type " << type;
3749 }
3750}
3751
3752void LocationsBuilderMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003753 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003754 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003755}
3756
3757void InstructionCodeGeneratorMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3758 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS(instruction);
3759 codegen_->AddSlowPath(slow_path);
3760 Location value = instruction->GetLocations()->InAt(0);
3761 Primitive::Type type = instruction->GetType();
3762
3763 switch (type) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003764 case Primitive::kPrimBoolean:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003765 case Primitive::kPrimByte:
3766 case Primitive::kPrimChar:
3767 case Primitive::kPrimShort:
3768 case Primitive::kPrimInt: {
3769 if (value.IsConstant()) {
3770 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3771 __ B(slow_path->GetEntryLabel());
3772 } else {
3773 // A division by a non-null constant is valid. We don't need to perform
3774 // any check, so simply fall through.
3775 }
3776 } else {
3777 DCHECK(value.IsRegister()) << value;
3778 __ Beqz(value.AsRegister<Register>(), slow_path->GetEntryLabel());
3779 }
3780 break;
3781 }
3782 case Primitive::kPrimLong: {
3783 if (value.IsConstant()) {
3784 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3785 __ B(slow_path->GetEntryLabel());
3786 } else {
3787 // A division by a non-null constant is valid. We don't need to perform
3788 // any check, so simply fall through.
3789 }
3790 } else {
3791 DCHECK(value.IsRegisterPair()) << value;
3792 __ Or(TMP, value.AsRegisterPairHigh<Register>(), value.AsRegisterPairLow<Register>());
3793 __ Beqz(TMP, slow_path->GetEntryLabel());
3794 }
3795 break;
3796 }
3797 default:
3798 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
3799 }
3800}
3801
3802void LocationsBuilderMIPS::VisitDoubleConstant(HDoubleConstant* constant) {
3803 LocationSummary* locations =
3804 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3805 locations->SetOut(Location::ConstantLocation(constant));
3806}
3807
3808void InstructionCodeGeneratorMIPS::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3809 // Will be generated at use site.
3810}
3811
3812void LocationsBuilderMIPS::VisitExit(HExit* exit) {
3813 exit->SetLocations(nullptr);
3814}
3815
3816void InstructionCodeGeneratorMIPS::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3817}
3818
3819void LocationsBuilderMIPS::VisitFloatConstant(HFloatConstant* constant) {
3820 LocationSummary* locations =
3821 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3822 locations->SetOut(Location::ConstantLocation(constant));
3823}
3824
3825void InstructionCodeGeneratorMIPS::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3826 // Will be generated at use site.
3827}
3828
3829void LocationsBuilderMIPS::VisitGoto(HGoto* got) {
3830 got->SetLocations(nullptr);
3831}
3832
3833void InstructionCodeGeneratorMIPS::HandleGoto(HInstruction* got, HBasicBlock* successor) {
3834 DCHECK(!successor->IsExitBlock());
3835 HBasicBlock* block = got->GetBlock();
3836 HInstruction* previous = got->GetPrevious();
3837 HLoopInformation* info = block->GetLoopInformation();
3838
3839 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3840 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3841 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3842 return;
3843 }
3844 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3845 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3846 }
3847 if (!codegen_->GoesToNextBlock(block, successor)) {
3848 __ B(codegen_->GetLabelOf(successor));
3849 }
3850}
3851
3852void InstructionCodeGeneratorMIPS::VisitGoto(HGoto* got) {
3853 HandleGoto(got, got->GetSuccessor());
3854}
3855
3856void LocationsBuilderMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3857 try_boundary->SetLocations(nullptr);
3858}
3859
3860void InstructionCodeGeneratorMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3861 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3862 if (!successor->IsExitBlock()) {
3863 HandleGoto(try_boundary, successor);
3864 }
3865}
3866
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003867void InstructionCodeGeneratorMIPS::GenerateIntCompare(IfCondition cond,
3868 LocationSummary* locations) {
3869 Register dst = locations->Out().AsRegister<Register>();
3870 Register lhs = locations->InAt(0).AsRegister<Register>();
3871 Location rhs_location = locations->InAt(1);
3872 Register rhs_reg = ZERO;
3873 int64_t rhs_imm = 0;
3874 bool use_imm = rhs_location.IsConstant();
3875 if (use_imm) {
3876 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3877 } else {
3878 rhs_reg = rhs_location.AsRegister<Register>();
3879 }
3880
3881 switch (cond) {
3882 case kCondEQ:
3883 case kCondNE:
Alexey Frunzee7697712016-09-15 21:37:49 -07003884 if (use_imm && IsInt<16>(-rhs_imm)) {
3885 if (rhs_imm == 0) {
3886 if (cond == kCondEQ) {
3887 __ Sltiu(dst, lhs, 1);
3888 } else {
3889 __ Sltu(dst, ZERO, lhs);
3890 }
3891 } else {
3892 __ Addiu(dst, lhs, -rhs_imm);
3893 if (cond == kCondEQ) {
3894 __ Sltiu(dst, dst, 1);
3895 } else {
3896 __ Sltu(dst, ZERO, dst);
3897 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003898 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003899 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07003900 if (use_imm && IsUint<16>(rhs_imm)) {
3901 __ Xori(dst, lhs, rhs_imm);
3902 } else {
3903 if (use_imm) {
3904 rhs_reg = TMP;
3905 __ LoadConst32(rhs_reg, rhs_imm);
3906 }
3907 __ Xor(dst, lhs, rhs_reg);
3908 }
3909 if (cond == kCondEQ) {
3910 __ Sltiu(dst, dst, 1);
3911 } else {
3912 __ Sltu(dst, ZERO, dst);
3913 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003914 }
3915 break;
3916
3917 case kCondLT:
3918 case kCondGE:
3919 if (use_imm && IsInt<16>(rhs_imm)) {
3920 __ Slti(dst, lhs, rhs_imm);
3921 } else {
3922 if (use_imm) {
3923 rhs_reg = TMP;
3924 __ LoadConst32(rhs_reg, rhs_imm);
3925 }
3926 __ Slt(dst, lhs, rhs_reg);
3927 }
3928 if (cond == kCondGE) {
3929 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3930 // only the slt instruction but no sge.
3931 __ Xori(dst, dst, 1);
3932 }
3933 break;
3934
3935 case kCondLE:
3936 case kCondGT:
3937 if (use_imm && IsInt<16>(rhs_imm + 1)) {
3938 // Simulate lhs <= rhs via lhs < rhs + 1.
3939 __ Slti(dst, lhs, rhs_imm + 1);
3940 if (cond == kCondGT) {
3941 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3942 // only the slti instruction but no sgti.
3943 __ Xori(dst, dst, 1);
3944 }
3945 } else {
3946 if (use_imm) {
3947 rhs_reg = TMP;
3948 __ LoadConst32(rhs_reg, rhs_imm);
3949 }
3950 __ Slt(dst, rhs_reg, lhs);
3951 if (cond == kCondLE) {
3952 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3953 // only the slt instruction but no sle.
3954 __ Xori(dst, dst, 1);
3955 }
3956 }
3957 break;
3958
3959 case kCondB:
3960 case kCondAE:
3961 if (use_imm && IsInt<16>(rhs_imm)) {
3962 // Sltiu sign-extends its 16-bit immediate operand before
3963 // the comparison and thus lets us compare directly with
3964 // unsigned values in the ranges [0, 0x7fff] and
3965 // [0xffff8000, 0xffffffff].
3966 __ Sltiu(dst, lhs, rhs_imm);
3967 } else {
3968 if (use_imm) {
3969 rhs_reg = TMP;
3970 __ LoadConst32(rhs_reg, rhs_imm);
3971 }
3972 __ Sltu(dst, lhs, rhs_reg);
3973 }
3974 if (cond == kCondAE) {
3975 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3976 // only the sltu instruction but no sgeu.
3977 __ Xori(dst, dst, 1);
3978 }
3979 break;
3980
3981 case kCondBE:
3982 case kCondA:
3983 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
3984 // Simulate lhs <= rhs via lhs < rhs + 1.
3985 // Note that this only works if rhs + 1 does not overflow
3986 // to 0, hence the check above.
3987 // Sltiu sign-extends its 16-bit immediate operand before
3988 // the comparison and thus lets us compare directly with
3989 // unsigned values in the ranges [0, 0x7fff] and
3990 // [0xffff8000, 0xffffffff].
3991 __ Sltiu(dst, lhs, rhs_imm + 1);
3992 if (cond == kCondA) {
3993 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3994 // only the sltiu instruction but no sgtiu.
3995 __ Xori(dst, dst, 1);
3996 }
3997 } else {
3998 if (use_imm) {
3999 rhs_reg = TMP;
4000 __ LoadConst32(rhs_reg, rhs_imm);
4001 }
4002 __ Sltu(dst, rhs_reg, lhs);
4003 if (cond == kCondBE) {
4004 // Simulate lhs <= rhs via !(rhs < lhs) since there's
4005 // only the sltu instruction but no sleu.
4006 __ Xori(dst, dst, 1);
4007 }
4008 }
4009 break;
4010 }
4011}
4012
Alexey Frunze674b9ee2016-09-20 14:54:15 -07004013bool InstructionCodeGeneratorMIPS::MaterializeIntCompare(IfCondition cond,
4014 LocationSummary* input_locations,
4015 Register dst) {
4016 Register lhs = input_locations->InAt(0).AsRegister<Register>();
4017 Location rhs_location = input_locations->InAt(1);
4018 Register rhs_reg = ZERO;
4019 int64_t rhs_imm = 0;
4020 bool use_imm = rhs_location.IsConstant();
4021 if (use_imm) {
4022 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4023 } else {
4024 rhs_reg = rhs_location.AsRegister<Register>();
4025 }
4026
4027 switch (cond) {
4028 case kCondEQ:
4029 case kCondNE:
4030 if (use_imm && IsInt<16>(-rhs_imm)) {
4031 __ Addiu(dst, lhs, -rhs_imm);
4032 } else if (use_imm && IsUint<16>(rhs_imm)) {
4033 __ Xori(dst, lhs, rhs_imm);
4034 } else {
4035 if (use_imm) {
4036 rhs_reg = TMP;
4037 __ LoadConst32(rhs_reg, rhs_imm);
4038 }
4039 __ Xor(dst, lhs, rhs_reg);
4040 }
4041 return (cond == kCondEQ);
4042
4043 case kCondLT:
4044 case kCondGE:
4045 if (use_imm && IsInt<16>(rhs_imm)) {
4046 __ Slti(dst, lhs, rhs_imm);
4047 } else {
4048 if (use_imm) {
4049 rhs_reg = TMP;
4050 __ LoadConst32(rhs_reg, rhs_imm);
4051 }
4052 __ Slt(dst, lhs, rhs_reg);
4053 }
4054 return (cond == kCondGE);
4055
4056 case kCondLE:
4057 case kCondGT:
4058 if (use_imm && IsInt<16>(rhs_imm + 1)) {
4059 // Simulate lhs <= rhs via lhs < rhs + 1.
4060 __ Slti(dst, lhs, rhs_imm + 1);
4061 return (cond == kCondGT);
4062 } else {
4063 if (use_imm) {
4064 rhs_reg = TMP;
4065 __ LoadConst32(rhs_reg, rhs_imm);
4066 }
4067 __ Slt(dst, rhs_reg, lhs);
4068 return (cond == kCondLE);
4069 }
4070
4071 case kCondB:
4072 case kCondAE:
4073 if (use_imm && IsInt<16>(rhs_imm)) {
4074 // Sltiu sign-extends its 16-bit immediate operand before
4075 // the comparison and thus lets us compare directly with
4076 // unsigned values in the ranges [0, 0x7fff] and
4077 // [0xffff8000, 0xffffffff].
4078 __ Sltiu(dst, lhs, rhs_imm);
4079 } else {
4080 if (use_imm) {
4081 rhs_reg = TMP;
4082 __ LoadConst32(rhs_reg, rhs_imm);
4083 }
4084 __ Sltu(dst, lhs, rhs_reg);
4085 }
4086 return (cond == kCondAE);
4087
4088 case kCondBE:
4089 case kCondA:
4090 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4091 // Simulate lhs <= rhs via lhs < rhs + 1.
4092 // Note that this only works if rhs + 1 does not overflow
4093 // to 0, hence the check above.
4094 // Sltiu sign-extends its 16-bit immediate operand before
4095 // the comparison and thus lets us compare directly with
4096 // unsigned values in the ranges [0, 0x7fff] and
4097 // [0xffff8000, 0xffffffff].
4098 __ Sltiu(dst, lhs, rhs_imm + 1);
4099 return (cond == kCondA);
4100 } else {
4101 if (use_imm) {
4102 rhs_reg = TMP;
4103 __ LoadConst32(rhs_reg, rhs_imm);
4104 }
4105 __ Sltu(dst, rhs_reg, lhs);
4106 return (cond == kCondBE);
4107 }
4108 }
4109}
4110
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004111void InstructionCodeGeneratorMIPS::GenerateIntCompareAndBranch(IfCondition cond,
4112 LocationSummary* locations,
4113 MipsLabel* label) {
4114 Register lhs = locations->InAt(0).AsRegister<Register>();
4115 Location rhs_location = locations->InAt(1);
4116 Register rhs_reg = ZERO;
Alexey Frunzee7697712016-09-15 21:37:49 -07004117 int64_t rhs_imm = 0;
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004118 bool use_imm = rhs_location.IsConstant();
4119 if (use_imm) {
4120 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4121 } else {
4122 rhs_reg = rhs_location.AsRegister<Register>();
4123 }
4124
4125 if (use_imm && rhs_imm == 0) {
4126 switch (cond) {
4127 case kCondEQ:
4128 case kCondBE: // <= 0 if zero
4129 __ Beqz(lhs, label);
4130 break;
4131 case kCondNE:
4132 case kCondA: // > 0 if non-zero
4133 __ Bnez(lhs, label);
4134 break;
4135 case kCondLT:
4136 __ Bltz(lhs, label);
4137 break;
4138 case kCondGE:
4139 __ Bgez(lhs, label);
4140 break;
4141 case kCondLE:
4142 __ Blez(lhs, label);
4143 break;
4144 case kCondGT:
4145 __ Bgtz(lhs, label);
4146 break;
4147 case kCondB: // always false
4148 break;
4149 case kCondAE: // always true
4150 __ B(label);
4151 break;
4152 }
4153 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07004154 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4155 if (isR6 || !use_imm) {
4156 if (use_imm) {
4157 rhs_reg = TMP;
4158 __ LoadConst32(rhs_reg, rhs_imm);
4159 }
4160 switch (cond) {
4161 case kCondEQ:
4162 __ Beq(lhs, rhs_reg, label);
4163 break;
4164 case kCondNE:
4165 __ Bne(lhs, rhs_reg, label);
4166 break;
4167 case kCondLT:
4168 __ Blt(lhs, rhs_reg, label);
4169 break;
4170 case kCondGE:
4171 __ Bge(lhs, rhs_reg, label);
4172 break;
4173 case kCondLE:
4174 __ Bge(rhs_reg, lhs, label);
4175 break;
4176 case kCondGT:
4177 __ Blt(rhs_reg, lhs, label);
4178 break;
4179 case kCondB:
4180 __ Bltu(lhs, rhs_reg, label);
4181 break;
4182 case kCondAE:
4183 __ Bgeu(lhs, rhs_reg, label);
4184 break;
4185 case kCondBE:
4186 __ Bgeu(rhs_reg, lhs, label);
4187 break;
4188 case kCondA:
4189 __ Bltu(rhs_reg, lhs, label);
4190 break;
4191 }
4192 } else {
4193 // Special cases for more efficient comparison with constants on R2.
4194 switch (cond) {
4195 case kCondEQ:
4196 __ LoadConst32(TMP, rhs_imm);
4197 __ Beq(lhs, TMP, label);
4198 break;
4199 case kCondNE:
4200 __ LoadConst32(TMP, rhs_imm);
4201 __ Bne(lhs, TMP, label);
4202 break;
4203 case kCondLT:
4204 if (IsInt<16>(rhs_imm)) {
4205 __ Slti(TMP, lhs, rhs_imm);
4206 __ Bnez(TMP, label);
4207 } else {
4208 __ LoadConst32(TMP, rhs_imm);
4209 __ Blt(lhs, TMP, label);
4210 }
4211 break;
4212 case kCondGE:
4213 if (IsInt<16>(rhs_imm)) {
4214 __ Slti(TMP, lhs, rhs_imm);
4215 __ Beqz(TMP, label);
4216 } else {
4217 __ LoadConst32(TMP, rhs_imm);
4218 __ Bge(lhs, TMP, label);
4219 }
4220 break;
4221 case kCondLE:
4222 if (IsInt<16>(rhs_imm + 1)) {
4223 // Simulate lhs <= rhs via lhs < rhs + 1.
4224 __ Slti(TMP, lhs, rhs_imm + 1);
4225 __ Bnez(TMP, label);
4226 } else {
4227 __ LoadConst32(TMP, rhs_imm);
4228 __ Bge(TMP, lhs, label);
4229 }
4230 break;
4231 case kCondGT:
4232 if (IsInt<16>(rhs_imm + 1)) {
4233 // Simulate lhs > rhs via !(lhs < rhs + 1).
4234 __ Slti(TMP, lhs, rhs_imm + 1);
4235 __ Beqz(TMP, label);
4236 } else {
4237 __ LoadConst32(TMP, rhs_imm);
4238 __ Blt(TMP, lhs, label);
4239 }
4240 break;
4241 case kCondB:
4242 if (IsInt<16>(rhs_imm)) {
4243 __ Sltiu(TMP, lhs, rhs_imm);
4244 __ Bnez(TMP, label);
4245 } else {
4246 __ LoadConst32(TMP, rhs_imm);
4247 __ Bltu(lhs, TMP, label);
4248 }
4249 break;
4250 case kCondAE:
4251 if (IsInt<16>(rhs_imm)) {
4252 __ Sltiu(TMP, lhs, rhs_imm);
4253 __ Beqz(TMP, label);
4254 } else {
4255 __ LoadConst32(TMP, rhs_imm);
4256 __ Bgeu(lhs, TMP, label);
4257 }
4258 break;
4259 case kCondBE:
4260 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4261 // Simulate lhs <= rhs via lhs < rhs + 1.
4262 // Note that this only works if rhs + 1 does not overflow
4263 // to 0, hence the check above.
4264 __ Sltiu(TMP, lhs, rhs_imm + 1);
4265 __ Bnez(TMP, label);
4266 } else {
4267 __ LoadConst32(TMP, rhs_imm);
4268 __ Bgeu(TMP, lhs, label);
4269 }
4270 break;
4271 case kCondA:
4272 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4273 // Simulate lhs > rhs via !(lhs < rhs + 1).
4274 // Note that this only works if rhs + 1 does not overflow
4275 // to 0, hence the check above.
4276 __ Sltiu(TMP, lhs, rhs_imm + 1);
4277 __ Beqz(TMP, label);
4278 } else {
4279 __ LoadConst32(TMP, rhs_imm);
4280 __ Bltu(TMP, lhs, label);
4281 }
4282 break;
4283 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004284 }
4285 }
4286}
4287
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01004288void InstructionCodeGeneratorMIPS::GenerateLongCompare(IfCondition cond,
4289 LocationSummary* locations) {
4290 Register dst = locations->Out().AsRegister<Register>();
4291 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4292 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4293 Location rhs_location = locations->InAt(1);
4294 Register rhs_high = ZERO;
4295 Register rhs_low = ZERO;
4296 int64_t imm = 0;
4297 uint32_t imm_high = 0;
4298 uint32_t imm_low = 0;
4299 bool use_imm = rhs_location.IsConstant();
4300 if (use_imm) {
4301 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4302 imm_high = High32Bits(imm);
4303 imm_low = Low32Bits(imm);
4304 } else {
4305 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4306 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4307 }
4308 if (use_imm && imm == 0) {
4309 switch (cond) {
4310 case kCondEQ:
4311 case kCondBE: // <= 0 if zero
4312 __ Or(dst, lhs_high, lhs_low);
4313 __ Sltiu(dst, dst, 1);
4314 break;
4315 case kCondNE:
4316 case kCondA: // > 0 if non-zero
4317 __ Or(dst, lhs_high, lhs_low);
4318 __ Sltu(dst, ZERO, dst);
4319 break;
4320 case kCondLT:
4321 __ Slt(dst, lhs_high, ZERO);
4322 break;
4323 case kCondGE:
4324 __ Slt(dst, lhs_high, ZERO);
4325 __ Xori(dst, dst, 1);
4326 break;
4327 case kCondLE:
4328 __ Or(TMP, lhs_high, lhs_low);
4329 __ Sra(AT, lhs_high, 31);
4330 __ Sltu(dst, AT, TMP);
4331 __ Xori(dst, dst, 1);
4332 break;
4333 case kCondGT:
4334 __ Or(TMP, lhs_high, lhs_low);
4335 __ Sra(AT, lhs_high, 31);
4336 __ Sltu(dst, AT, TMP);
4337 break;
4338 case kCondB: // always false
4339 __ Andi(dst, dst, 0);
4340 break;
4341 case kCondAE: // always true
4342 __ Ori(dst, ZERO, 1);
4343 break;
4344 }
4345 } else if (use_imm) {
4346 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4347 switch (cond) {
4348 case kCondEQ:
4349 __ LoadConst32(TMP, imm_high);
4350 __ Xor(TMP, TMP, lhs_high);
4351 __ LoadConst32(AT, imm_low);
4352 __ Xor(AT, AT, lhs_low);
4353 __ Or(dst, TMP, AT);
4354 __ Sltiu(dst, dst, 1);
4355 break;
4356 case kCondNE:
4357 __ LoadConst32(TMP, imm_high);
4358 __ Xor(TMP, TMP, lhs_high);
4359 __ LoadConst32(AT, imm_low);
4360 __ Xor(AT, AT, lhs_low);
4361 __ Or(dst, TMP, AT);
4362 __ Sltu(dst, ZERO, dst);
4363 break;
4364 case kCondLT:
4365 case kCondGE:
4366 if (dst == lhs_low) {
4367 __ LoadConst32(TMP, imm_low);
4368 __ Sltu(dst, lhs_low, TMP);
4369 }
4370 __ LoadConst32(TMP, imm_high);
4371 __ Slt(AT, lhs_high, TMP);
4372 __ Slt(TMP, TMP, lhs_high);
4373 if (dst != lhs_low) {
4374 __ LoadConst32(dst, imm_low);
4375 __ Sltu(dst, lhs_low, dst);
4376 }
4377 __ Slt(dst, TMP, dst);
4378 __ Or(dst, dst, AT);
4379 if (cond == kCondGE) {
4380 __ Xori(dst, dst, 1);
4381 }
4382 break;
4383 case kCondGT:
4384 case kCondLE:
4385 if (dst == lhs_low) {
4386 __ LoadConst32(TMP, imm_low);
4387 __ Sltu(dst, TMP, lhs_low);
4388 }
4389 __ LoadConst32(TMP, imm_high);
4390 __ Slt(AT, TMP, lhs_high);
4391 __ Slt(TMP, lhs_high, TMP);
4392 if (dst != lhs_low) {
4393 __ LoadConst32(dst, imm_low);
4394 __ Sltu(dst, dst, lhs_low);
4395 }
4396 __ Slt(dst, TMP, dst);
4397 __ Or(dst, dst, AT);
4398 if (cond == kCondLE) {
4399 __ Xori(dst, dst, 1);
4400 }
4401 break;
4402 case kCondB:
4403 case kCondAE:
4404 if (dst == lhs_low) {
4405 __ LoadConst32(TMP, imm_low);
4406 __ Sltu(dst, lhs_low, TMP);
4407 }
4408 __ LoadConst32(TMP, imm_high);
4409 __ Sltu(AT, lhs_high, TMP);
4410 __ Sltu(TMP, TMP, lhs_high);
4411 if (dst != lhs_low) {
4412 __ LoadConst32(dst, imm_low);
4413 __ Sltu(dst, lhs_low, dst);
4414 }
4415 __ Slt(dst, TMP, dst);
4416 __ Or(dst, dst, AT);
4417 if (cond == kCondAE) {
4418 __ Xori(dst, dst, 1);
4419 }
4420 break;
4421 case kCondA:
4422 case kCondBE:
4423 if (dst == lhs_low) {
4424 __ LoadConst32(TMP, imm_low);
4425 __ Sltu(dst, TMP, lhs_low);
4426 }
4427 __ LoadConst32(TMP, imm_high);
4428 __ Sltu(AT, TMP, lhs_high);
4429 __ Sltu(TMP, lhs_high, TMP);
4430 if (dst != lhs_low) {
4431 __ LoadConst32(dst, imm_low);
4432 __ Sltu(dst, dst, lhs_low);
4433 }
4434 __ Slt(dst, TMP, dst);
4435 __ Or(dst, dst, AT);
4436 if (cond == kCondBE) {
4437 __ Xori(dst, dst, 1);
4438 }
4439 break;
4440 }
4441 } else {
4442 switch (cond) {
4443 case kCondEQ:
4444 __ Xor(TMP, lhs_high, rhs_high);
4445 __ Xor(AT, lhs_low, rhs_low);
4446 __ Or(dst, TMP, AT);
4447 __ Sltiu(dst, dst, 1);
4448 break;
4449 case kCondNE:
4450 __ Xor(TMP, lhs_high, rhs_high);
4451 __ Xor(AT, lhs_low, rhs_low);
4452 __ Or(dst, TMP, AT);
4453 __ Sltu(dst, ZERO, dst);
4454 break;
4455 case kCondLT:
4456 case kCondGE:
4457 __ Slt(TMP, rhs_high, lhs_high);
4458 __ Sltu(AT, lhs_low, rhs_low);
4459 __ Slt(TMP, TMP, AT);
4460 __ Slt(AT, lhs_high, rhs_high);
4461 __ Or(dst, AT, TMP);
4462 if (cond == kCondGE) {
4463 __ Xori(dst, dst, 1);
4464 }
4465 break;
4466 case kCondGT:
4467 case kCondLE:
4468 __ Slt(TMP, lhs_high, rhs_high);
4469 __ Sltu(AT, rhs_low, lhs_low);
4470 __ Slt(TMP, TMP, AT);
4471 __ Slt(AT, rhs_high, lhs_high);
4472 __ Or(dst, AT, TMP);
4473 if (cond == kCondLE) {
4474 __ Xori(dst, dst, 1);
4475 }
4476 break;
4477 case kCondB:
4478 case kCondAE:
4479 __ Sltu(TMP, rhs_high, lhs_high);
4480 __ Sltu(AT, lhs_low, rhs_low);
4481 __ Slt(TMP, TMP, AT);
4482 __ Sltu(AT, lhs_high, rhs_high);
4483 __ Or(dst, AT, TMP);
4484 if (cond == kCondAE) {
4485 __ Xori(dst, dst, 1);
4486 }
4487 break;
4488 case kCondA:
4489 case kCondBE:
4490 __ Sltu(TMP, lhs_high, rhs_high);
4491 __ Sltu(AT, rhs_low, lhs_low);
4492 __ Slt(TMP, TMP, AT);
4493 __ Sltu(AT, rhs_high, lhs_high);
4494 __ Or(dst, AT, TMP);
4495 if (cond == kCondBE) {
4496 __ Xori(dst, dst, 1);
4497 }
4498 break;
4499 }
4500 }
4501}
4502
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004503void InstructionCodeGeneratorMIPS::GenerateLongCompareAndBranch(IfCondition cond,
4504 LocationSummary* locations,
4505 MipsLabel* label) {
4506 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4507 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4508 Location rhs_location = locations->InAt(1);
4509 Register rhs_high = ZERO;
4510 Register rhs_low = ZERO;
4511 int64_t imm = 0;
4512 uint32_t imm_high = 0;
4513 uint32_t imm_low = 0;
4514 bool use_imm = rhs_location.IsConstant();
4515 if (use_imm) {
4516 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4517 imm_high = High32Bits(imm);
4518 imm_low = Low32Bits(imm);
4519 } else {
4520 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4521 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4522 }
4523
4524 if (use_imm && imm == 0) {
4525 switch (cond) {
4526 case kCondEQ:
4527 case kCondBE: // <= 0 if zero
4528 __ Or(TMP, lhs_high, lhs_low);
4529 __ Beqz(TMP, label);
4530 break;
4531 case kCondNE:
4532 case kCondA: // > 0 if non-zero
4533 __ Or(TMP, lhs_high, lhs_low);
4534 __ Bnez(TMP, label);
4535 break;
4536 case kCondLT:
4537 __ Bltz(lhs_high, label);
4538 break;
4539 case kCondGE:
4540 __ Bgez(lhs_high, label);
4541 break;
4542 case kCondLE:
4543 __ Or(TMP, lhs_high, lhs_low);
4544 __ Sra(AT, lhs_high, 31);
4545 __ Bgeu(AT, TMP, label);
4546 break;
4547 case kCondGT:
4548 __ Or(TMP, lhs_high, lhs_low);
4549 __ Sra(AT, lhs_high, 31);
4550 __ Bltu(AT, TMP, label);
4551 break;
4552 case kCondB: // always false
4553 break;
4554 case kCondAE: // always true
4555 __ B(label);
4556 break;
4557 }
4558 } else if (use_imm) {
4559 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4560 switch (cond) {
4561 case kCondEQ:
4562 __ LoadConst32(TMP, imm_high);
4563 __ Xor(TMP, TMP, lhs_high);
4564 __ LoadConst32(AT, imm_low);
4565 __ Xor(AT, AT, lhs_low);
4566 __ Or(TMP, TMP, AT);
4567 __ Beqz(TMP, label);
4568 break;
4569 case kCondNE:
4570 __ LoadConst32(TMP, imm_high);
4571 __ Xor(TMP, TMP, lhs_high);
4572 __ LoadConst32(AT, imm_low);
4573 __ Xor(AT, AT, lhs_low);
4574 __ Or(TMP, TMP, AT);
4575 __ Bnez(TMP, label);
4576 break;
4577 case kCondLT:
4578 __ LoadConst32(TMP, imm_high);
4579 __ Blt(lhs_high, TMP, label);
4580 __ Slt(TMP, TMP, lhs_high);
4581 __ LoadConst32(AT, imm_low);
4582 __ Sltu(AT, lhs_low, AT);
4583 __ Blt(TMP, AT, label);
4584 break;
4585 case kCondGE:
4586 __ LoadConst32(TMP, imm_high);
4587 __ Blt(TMP, lhs_high, label);
4588 __ Slt(TMP, lhs_high, TMP);
4589 __ LoadConst32(AT, imm_low);
4590 __ Sltu(AT, lhs_low, AT);
4591 __ Or(TMP, TMP, AT);
4592 __ Beqz(TMP, label);
4593 break;
4594 case kCondLE:
4595 __ LoadConst32(TMP, imm_high);
4596 __ Blt(lhs_high, TMP, label);
4597 __ Slt(TMP, TMP, lhs_high);
4598 __ LoadConst32(AT, imm_low);
4599 __ Sltu(AT, AT, lhs_low);
4600 __ Or(TMP, TMP, AT);
4601 __ Beqz(TMP, label);
4602 break;
4603 case kCondGT:
4604 __ LoadConst32(TMP, imm_high);
4605 __ Blt(TMP, lhs_high, label);
4606 __ Slt(TMP, lhs_high, TMP);
4607 __ LoadConst32(AT, imm_low);
4608 __ Sltu(AT, AT, lhs_low);
4609 __ Blt(TMP, AT, label);
4610 break;
4611 case kCondB:
4612 __ LoadConst32(TMP, imm_high);
4613 __ Bltu(lhs_high, TMP, label);
4614 __ Sltu(TMP, TMP, lhs_high);
4615 __ LoadConst32(AT, imm_low);
4616 __ Sltu(AT, lhs_low, AT);
4617 __ Blt(TMP, AT, label);
4618 break;
4619 case kCondAE:
4620 __ LoadConst32(TMP, imm_high);
4621 __ Bltu(TMP, lhs_high, label);
4622 __ Sltu(TMP, lhs_high, TMP);
4623 __ LoadConst32(AT, imm_low);
4624 __ Sltu(AT, lhs_low, AT);
4625 __ Or(TMP, TMP, AT);
4626 __ Beqz(TMP, label);
4627 break;
4628 case kCondBE:
4629 __ LoadConst32(TMP, imm_high);
4630 __ Bltu(lhs_high, TMP, label);
4631 __ Sltu(TMP, TMP, lhs_high);
4632 __ LoadConst32(AT, imm_low);
4633 __ Sltu(AT, AT, lhs_low);
4634 __ Or(TMP, TMP, AT);
4635 __ Beqz(TMP, label);
4636 break;
4637 case kCondA:
4638 __ LoadConst32(TMP, imm_high);
4639 __ Bltu(TMP, lhs_high, label);
4640 __ Sltu(TMP, lhs_high, TMP);
4641 __ LoadConst32(AT, imm_low);
4642 __ Sltu(AT, AT, lhs_low);
4643 __ Blt(TMP, AT, label);
4644 break;
4645 }
4646 } else {
4647 switch (cond) {
4648 case kCondEQ:
4649 __ Xor(TMP, lhs_high, rhs_high);
4650 __ Xor(AT, lhs_low, rhs_low);
4651 __ Or(TMP, TMP, AT);
4652 __ Beqz(TMP, label);
4653 break;
4654 case kCondNE:
4655 __ Xor(TMP, lhs_high, rhs_high);
4656 __ Xor(AT, lhs_low, rhs_low);
4657 __ Or(TMP, TMP, AT);
4658 __ Bnez(TMP, label);
4659 break;
4660 case kCondLT:
4661 __ Blt(lhs_high, rhs_high, label);
4662 __ Slt(TMP, rhs_high, lhs_high);
4663 __ Sltu(AT, lhs_low, rhs_low);
4664 __ Blt(TMP, AT, label);
4665 break;
4666 case kCondGE:
4667 __ Blt(rhs_high, lhs_high, label);
4668 __ Slt(TMP, lhs_high, rhs_high);
4669 __ Sltu(AT, lhs_low, rhs_low);
4670 __ Or(TMP, TMP, AT);
4671 __ Beqz(TMP, label);
4672 break;
4673 case kCondLE:
4674 __ Blt(lhs_high, rhs_high, label);
4675 __ Slt(TMP, rhs_high, lhs_high);
4676 __ Sltu(AT, rhs_low, lhs_low);
4677 __ Or(TMP, TMP, AT);
4678 __ Beqz(TMP, label);
4679 break;
4680 case kCondGT:
4681 __ Blt(rhs_high, lhs_high, label);
4682 __ Slt(TMP, lhs_high, rhs_high);
4683 __ Sltu(AT, rhs_low, lhs_low);
4684 __ Blt(TMP, AT, label);
4685 break;
4686 case kCondB:
4687 __ Bltu(lhs_high, rhs_high, label);
4688 __ Sltu(TMP, rhs_high, lhs_high);
4689 __ Sltu(AT, lhs_low, rhs_low);
4690 __ Blt(TMP, AT, label);
4691 break;
4692 case kCondAE:
4693 __ Bltu(rhs_high, lhs_high, label);
4694 __ Sltu(TMP, lhs_high, rhs_high);
4695 __ Sltu(AT, lhs_low, rhs_low);
4696 __ Or(TMP, TMP, AT);
4697 __ Beqz(TMP, label);
4698 break;
4699 case kCondBE:
4700 __ Bltu(lhs_high, rhs_high, label);
4701 __ Sltu(TMP, rhs_high, lhs_high);
4702 __ Sltu(AT, rhs_low, lhs_low);
4703 __ Or(TMP, TMP, AT);
4704 __ Beqz(TMP, label);
4705 break;
4706 case kCondA:
4707 __ Bltu(rhs_high, lhs_high, label);
4708 __ Sltu(TMP, lhs_high, rhs_high);
4709 __ Sltu(AT, rhs_low, lhs_low);
4710 __ Blt(TMP, AT, label);
4711 break;
4712 }
4713 }
4714}
4715
Alexey Frunze2ddb7172016-09-06 17:04:55 -07004716void InstructionCodeGeneratorMIPS::GenerateFpCompare(IfCondition cond,
4717 bool gt_bias,
4718 Primitive::Type type,
4719 LocationSummary* locations) {
4720 Register dst = locations->Out().AsRegister<Register>();
4721 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
4722 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
4723 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4724 if (type == Primitive::kPrimFloat) {
4725 if (isR6) {
4726 switch (cond) {
4727 case kCondEQ:
4728 __ CmpEqS(FTMP, lhs, rhs);
4729 __ Mfc1(dst, FTMP);
4730 __ Andi(dst, dst, 1);
4731 break;
4732 case kCondNE:
4733 __ CmpEqS(FTMP, lhs, rhs);
4734 __ Mfc1(dst, FTMP);
4735 __ Addiu(dst, dst, 1);
4736 break;
4737 case kCondLT:
4738 if (gt_bias) {
4739 __ CmpLtS(FTMP, lhs, rhs);
4740 } else {
4741 __ CmpUltS(FTMP, lhs, rhs);
4742 }
4743 __ Mfc1(dst, FTMP);
4744 __ Andi(dst, dst, 1);
4745 break;
4746 case kCondLE:
4747 if (gt_bias) {
4748 __ CmpLeS(FTMP, lhs, rhs);
4749 } else {
4750 __ CmpUleS(FTMP, lhs, rhs);
4751 }
4752 __ Mfc1(dst, FTMP);
4753 __ Andi(dst, dst, 1);
4754 break;
4755 case kCondGT:
4756 if (gt_bias) {
4757 __ CmpUltS(FTMP, rhs, lhs);
4758 } else {
4759 __ CmpLtS(FTMP, rhs, lhs);
4760 }
4761 __ Mfc1(dst, FTMP);
4762 __ Andi(dst, dst, 1);
4763 break;
4764 case kCondGE:
4765 if (gt_bias) {
4766 __ CmpUleS(FTMP, rhs, lhs);
4767 } else {
4768 __ CmpLeS(FTMP, rhs, lhs);
4769 }
4770 __ Mfc1(dst, FTMP);
4771 __ Andi(dst, dst, 1);
4772 break;
4773 default:
4774 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4775 UNREACHABLE();
4776 }
4777 } else {
4778 switch (cond) {
4779 case kCondEQ:
4780 __ CeqS(0, lhs, rhs);
4781 __ LoadConst32(dst, 1);
4782 __ Movf(dst, ZERO, 0);
4783 break;
4784 case kCondNE:
4785 __ CeqS(0, lhs, rhs);
4786 __ LoadConst32(dst, 1);
4787 __ Movt(dst, ZERO, 0);
4788 break;
4789 case kCondLT:
4790 if (gt_bias) {
4791 __ ColtS(0, lhs, rhs);
4792 } else {
4793 __ CultS(0, lhs, rhs);
4794 }
4795 __ LoadConst32(dst, 1);
4796 __ Movf(dst, ZERO, 0);
4797 break;
4798 case kCondLE:
4799 if (gt_bias) {
4800 __ ColeS(0, lhs, rhs);
4801 } else {
4802 __ CuleS(0, lhs, rhs);
4803 }
4804 __ LoadConst32(dst, 1);
4805 __ Movf(dst, ZERO, 0);
4806 break;
4807 case kCondGT:
4808 if (gt_bias) {
4809 __ CultS(0, rhs, lhs);
4810 } else {
4811 __ ColtS(0, rhs, lhs);
4812 }
4813 __ LoadConst32(dst, 1);
4814 __ Movf(dst, ZERO, 0);
4815 break;
4816 case kCondGE:
4817 if (gt_bias) {
4818 __ CuleS(0, rhs, lhs);
4819 } else {
4820 __ ColeS(0, rhs, lhs);
4821 }
4822 __ LoadConst32(dst, 1);
4823 __ Movf(dst, ZERO, 0);
4824 break;
4825 default:
4826 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4827 UNREACHABLE();
4828 }
4829 }
4830 } else {
4831 DCHECK_EQ(type, Primitive::kPrimDouble);
4832 if (isR6) {
4833 switch (cond) {
4834 case kCondEQ:
4835 __ CmpEqD(FTMP, lhs, rhs);
4836 __ Mfc1(dst, FTMP);
4837 __ Andi(dst, dst, 1);
4838 break;
4839 case kCondNE:
4840 __ CmpEqD(FTMP, lhs, rhs);
4841 __ Mfc1(dst, FTMP);
4842 __ Addiu(dst, dst, 1);
4843 break;
4844 case kCondLT:
4845 if (gt_bias) {
4846 __ CmpLtD(FTMP, lhs, rhs);
4847 } else {
4848 __ CmpUltD(FTMP, lhs, rhs);
4849 }
4850 __ Mfc1(dst, FTMP);
4851 __ Andi(dst, dst, 1);
4852 break;
4853 case kCondLE:
4854 if (gt_bias) {
4855 __ CmpLeD(FTMP, lhs, rhs);
4856 } else {
4857 __ CmpUleD(FTMP, lhs, rhs);
4858 }
4859 __ Mfc1(dst, FTMP);
4860 __ Andi(dst, dst, 1);
4861 break;
4862 case kCondGT:
4863 if (gt_bias) {
4864 __ CmpUltD(FTMP, rhs, lhs);
4865 } else {
4866 __ CmpLtD(FTMP, rhs, lhs);
4867 }
4868 __ Mfc1(dst, FTMP);
4869 __ Andi(dst, dst, 1);
4870 break;
4871 case kCondGE:
4872 if (gt_bias) {
4873 __ CmpUleD(FTMP, rhs, lhs);
4874 } else {
4875 __ CmpLeD(FTMP, rhs, lhs);
4876 }
4877 __ Mfc1(dst, FTMP);
4878 __ Andi(dst, dst, 1);
4879 break;
4880 default:
4881 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4882 UNREACHABLE();
4883 }
4884 } else {
4885 switch (cond) {
4886 case kCondEQ:
4887 __ CeqD(0, lhs, rhs);
4888 __ LoadConst32(dst, 1);
4889 __ Movf(dst, ZERO, 0);
4890 break;
4891 case kCondNE:
4892 __ CeqD(0, lhs, rhs);
4893 __ LoadConst32(dst, 1);
4894 __ Movt(dst, ZERO, 0);
4895 break;
4896 case kCondLT:
4897 if (gt_bias) {
4898 __ ColtD(0, lhs, rhs);
4899 } else {
4900 __ CultD(0, lhs, rhs);
4901 }
4902 __ LoadConst32(dst, 1);
4903 __ Movf(dst, ZERO, 0);
4904 break;
4905 case kCondLE:
4906 if (gt_bias) {
4907 __ ColeD(0, lhs, rhs);
4908 } else {
4909 __ CuleD(0, lhs, rhs);
4910 }
4911 __ LoadConst32(dst, 1);
4912 __ Movf(dst, ZERO, 0);
4913 break;
4914 case kCondGT:
4915 if (gt_bias) {
4916 __ CultD(0, rhs, lhs);
4917 } else {
4918 __ ColtD(0, rhs, lhs);
4919 }
4920 __ LoadConst32(dst, 1);
4921 __ Movf(dst, ZERO, 0);
4922 break;
4923 case kCondGE:
4924 if (gt_bias) {
4925 __ CuleD(0, rhs, lhs);
4926 } else {
4927 __ ColeD(0, rhs, lhs);
4928 }
4929 __ LoadConst32(dst, 1);
4930 __ Movf(dst, ZERO, 0);
4931 break;
4932 default:
4933 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4934 UNREACHABLE();
4935 }
4936 }
4937 }
4938}
4939
Alexey Frunze674b9ee2016-09-20 14:54:15 -07004940bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR2(IfCondition cond,
4941 bool gt_bias,
4942 Primitive::Type type,
4943 LocationSummary* input_locations,
4944 int cc) {
4945 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
4946 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
4947 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
4948 if (type == Primitive::kPrimFloat) {
4949 switch (cond) {
4950 case kCondEQ:
4951 __ CeqS(cc, lhs, rhs);
4952 return false;
4953 case kCondNE:
4954 __ CeqS(cc, lhs, rhs);
4955 return true;
4956 case kCondLT:
4957 if (gt_bias) {
4958 __ ColtS(cc, lhs, rhs);
4959 } else {
4960 __ CultS(cc, lhs, rhs);
4961 }
4962 return false;
4963 case kCondLE:
4964 if (gt_bias) {
4965 __ ColeS(cc, lhs, rhs);
4966 } else {
4967 __ CuleS(cc, lhs, rhs);
4968 }
4969 return false;
4970 case kCondGT:
4971 if (gt_bias) {
4972 __ CultS(cc, rhs, lhs);
4973 } else {
4974 __ ColtS(cc, rhs, lhs);
4975 }
4976 return false;
4977 case kCondGE:
4978 if (gt_bias) {
4979 __ CuleS(cc, rhs, lhs);
4980 } else {
4981 __ ColeS(cc, rhs, lhs);
4982 }
4983 return false;
4984 default:
4985 LOG(FATAL) << "Unexpected non-floating-point condition";
4986 UNREACHABLE();
4987 }
4988 } else {
4989 DCHECK_EQ(type, Primitive::kPrimDouble);
4990 switch (cond) {
4991 case kCondEQ:
4992 __ CeqD(cc, lhs, rhs);
4993 return false;
4994 case kCondNE:
4995 __ CeqD(cc, lhs, rhs);
4996 return true;
4997 case kCondLT:
4998 if (gt_bias) {
4999 __ ColtD(cc, lhs, rhs);
5000 } else {
5001 __ CultD(cc, lhs, rhs);
5002 }
5003 return false;
5004 case kCondLE:
5005 if (gt_bias) {
5006 __ ColeD(cc, lhs, rhs);
5007 } else {
5008 __ CuleD(cc, lhs, rhs);
5009 }
5010 return false;
5011 case kCondGT:
5012 if (gt_bias) {
5013 __ CultD(cc, rhs, lhs);
5014 } else {
5015 __ ColtD(cc, rhs, lhs);
5016 }
5017 return false;
5018 case kCondGE:
5019 if (gt_bias) {
5020 __ CuleD(cc, rhs, lhs);
5021 } else {
5022 __ ColeD(cc, rhs, lhs);
5023 }
5024 return false;
5025 default:
5026 LOG(FATAL) << "Unexpected non-floating-point condition";
5027 UNREACHABLE();
5028 }
5029 }
5030}
5031
5032bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR6(IfCondition cond,
5033 bool gt_bias,
5034 Primitive::Type type,
5035 LocationSummary* input_locations,
5036 FRegister dst) {
5037 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
5038 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
5039 CHECK(codegen_->GetInstructionSetFeatures().IsR6());
5040 if (type == Primitive::kPrimFloat) {
5041 switch (cond) {
5042 case kCondEQ:
5043 __ CmpEqS(dst, lhs, rhs);
5044 return false;
5045 case kCondNE:
5046 __ CmpEqS(dst, lhs, rhs);
5047 return true;
5048 case kCondLT:
5049 if (gt_bias) {
5050 __ CmpLtS(dst, lhs, rhs);
5051 } else {
5052 __ CmpUltS(dst, lhs, rhs);
5053 }
5054 return false;
5055 case kCondLE:
5056 if (gt_bias) {
5057 __ CmpLeS(dst, lhs, rhs);
5058 } else {
5059 __ CmpUleS(dst, lhs, rhs);
5060 }
5061 return false;
5062 case kCondGT:
5063 if (gt_bias) {
5064 __ CmpUltS(dst, rhs, lhs);
5065 } else {
5066 __ CmpLtS(dst, rhs, lhs);
5067 }
5068 return false;
5069 case kCondGE:
5070 if (gt_bias) {
5071 __ CmpUleS(dst, rhs, lhs);
5072 } else {
5073 __ CmpLeS(dst, rhs, lhs);
5074 }
5075 return false;
5076 default:
5077 LOG(FATAL) << "Unexpected non-floating-point condition";
5078 UNREACHABLE();
5079 }
5080 } else {
5081 DCHECK_EQ(type, Primitive::kPrimDouble);
5082 switch (cond) {
5083 case kCondEQ:
5084 __ CmpEqD(dst, lhs, rhs);
5085 return false;
5086 case kCondNE:
5087 __ CmpEqD(dst, lhs, rhs);
5088 return true;
5089 case kCondLT:
5090 if (gt_bias) {
5091 __ CmpLtD(dst, lhs, rhs);
5092 } else {
5093 __ CmpUltD(dst, lhs, rhs);
5094 }
5095 return false;
5096 case kCondLE:
5097 if (gt_bias) {
5098 __ CmpLeD(dst, lhs, rhs);
5099 } else {
5100 __ CmpUleD(dst, lhs, rhs);
5101 }
5102 return false;
5103 case kCondGT:
5104 if (gt_bias) {
5105 __ CmpUltD(dst, rhs, lhs);
5106 } else {
5107 __ CmpLtD(dst, rhs, lhs);
5108 }
5109 return false;
5110 case kCondGE:
5111 if (gt_bias) {
5112 __ CmpUleD(dst, rhs, lhs);
5113 } else {
5114 __ CmpLeD(dst, rhs, lhs);
5115 }
5116 return false;
5117 default:
5118 LOG(FATAL) << "Unexpected non-floating-point condition";
5119 UNREACHABLE();
5120 }
5121 }
5122}
5123
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005124void InstructionCodeGeneratorMIPS::GenerateFpCompareAndBranch(IfCondition cond,
5125 bool gt_bias,
5126 Primitive::Type type,
5127 LocationSummary* locations,
5128 MipsLabel* label) {
5129 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
5130 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
5131 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
5132 if (type == Primitive::kPrimFloat) {
5133 if (isR6) {
5134 switch (cond) {
5135 case kCondEQ:
5136 __ CmpEqS(FTMP, lhs, rhs);
5137 __ Bc1nez(FTMP, label);
5138 break;
5139 case kCondNE:
5140 __ CmpEqS(FTMP, lhs, rhs);
5141 __ Bc1eqz(FTMP, label);
5142 break;
5143 case kCondLT:
5144 if (gt_bias) {
5145 __ CmpLtS(FTMP, lhs, rhs);
5146 } else {
5147 __ CmpUltS(FTMP, lhs, rhs);
5148 }
5149 __ Bc1nez(FTMP, label);
5150 break;
5151 case kCondLE:
5152 if (gt_bias) {
5153 __ CmpLeS(FTMP, lhs, rhs);
5154 } else {
5155 __ CmpUleS(FTMP, lhs, rhs);
5156 }
5157 __ Bc1nez(FTMP, label);
5158 break;
5159 case kCondGT:
5160 if (gt_bias) {
5161 __ CmpUltS(FTMP, rhs, lhs);
5162 } else {
5163 __ CmpLtS(FTMP, rhs, lhs);
5164 }
5165 __ Bc1nez(FTMP, label);
5166 break;
5167 case kCondGE:
5168 if (gt_bias) {
5169 __ CmpUleS(FTMP, rhs, lhs);
5170 } else {
5171 __ CmpLeS(FTMP, rhs, lhs);
5172 }
5173 __ Bc1nez(FTMP, label);
5174 break;
5175 default:
5176 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005177 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005178 }
5179 } else {
5180 switch (cond) {
5181 case kCondEQ:
5182 __ CeqS(0, lhs, rhs);
5183 __ Bc1t(0, label);
5184 break;
5185 case kCondNE:
5186 __ CeqS(0, lhs, rhs);
5187 __ Bc1f(0, label);
5188 break;
5189 case kCondLT:
5190 if (gt_bias) {
5191 __ ColtS(0, lhs, rhs);
5192 } else {
5193 __ CultS(0, lhs, rhs);
5194 }
5195 __ Bc1t(0, label);
5196 break;
5197 case kCondLE:
5198 if (gt_bias) {
5199 __ ColeS(0, lhs, rhs);
5200 } else {
5201 __ CuleS(0, lhs, rhs);
5202 }
5203 __ Bc1t(0, label);
5204 break;
5205 case kCondGT:
5206 if (gt_bias) {
5207 __ CultS(0, rhs, lhs);
5208 } else {
5209 __ ColtS(0, rhs, lhs);
5210 }
5211 __ Bc1t(0, label);
5212 break;
5213 case kCondGE:
5214 if (gt_bias) {
5215 __ CuleS(0, rhs, lhs);
5216 } else {
5217 __ ColeS(0, rhs, lhs);
5218 }
5219 __ Bc1t(0, label);
5220 break;
5221 default:
5222 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005223 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005224 }
5225 }
5226 } else {
5227 DCHECK_EQ(type, Primitive::kPrimDouble);
5228 if (isR6) {
5229 switch (cond) {
5230 case kCondEQ:
5231 __ CmpEqD(FTMP, lhs, rhs);
5232 __ Bc1nez(FTMP, label);
5233 break;
5234 case kCondNE:
5235 __ CmpEqD(FTMP, lhs, rhs);
5236 __ Bc1eqz(FTMP, label);
5237 break;
5238 case kCondLT:
5239 if (gt_bias) {
5240 __ CmpLtD(FTMP, lhs, rhs);
5241 } else {
5242 __ CmpUltD(FTMP, lhs, rhs);
5243 }
5244 __ Bc1nez(FTMP, label);
5245 break;
5246 case kCondLE:
5247 if (gt_bias) {
5248 __ CmpLeD(FTMP, lhs, rhs);
5249 } else {
5250 __ CmpUleD(FTMP, lhs, rhs);
5251 }
5252 __ Bc1nez(FTMP, label);
5253 break;
5254 case kCondGT:
5255 if (gt_bias) {
5256 __ CmpUltD(FTMP, rhs, lhs);
5257 } else {
5258 __ CmpLtD(FTMP, rhs, lhs);
5259 }
5260 __ Bc1nez(FTMP, label);
5261 break;
5262 case kCondGE:
5263 if (gt_bias) {
5264 __ CmpUleD(FTMP, rhs, lhs);
5265 } else {
5266 __ CmpLeD(FTMP, rhs, lhs);
5267 }
5268 __ Bc1nez(FTMP, label);
5269 break;
5270 default:
5271 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005272 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005273 }
5274 } else {
5275 switch (cond) {
5276 case kCondEQ:
5277 __ CeqD(0, lhs, rhs);
5278 __ Bc1t(0, label);
5279 break;
5280 case kCondNE:
5281 __ CeqD(0, lhs, rhs);
5282 __ Bc1f(0, label);
5283 break;
5284 case kCondLT:
5285 if (gt_bias) {
5286 __ ColtD(0, lhs, rhs);
5287 } else {
5288 __ CultD(0, lhs, rhs);
5289 }
5290 __ Bc1t(0, label);
5291 break;
5292 case kCondLE:
5293 if (gt_bias) {
5294 __ ColeD(0, lhs, rhs);
5295 } else {
5296 __ CuleD(0, lhs, rhs);
5297 }
5298 __ Bc1t(0, label);
5299 break;
5300 case kCondGT:
5301 if (gt_bias) {
5302 __ CultD(0, rhs, lhs);
5303 } else {
5304 __ ColtD(0, rhs, lhs);
5305 }
5306 __ Bc1t(0, label);
5307 break;
5308 case kCondGE:
5309 if (gt_bias) {
5310 __ CuleD(0, rhs, lhs);
5311 } else {
5312 __ ColeD(0, rhs, lhs);
5313 }
5314 __ Bc1t(0, label);
5315 break;
5316 default:
5317 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005318 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005319 }
5320 }
5321 }
5322}
5323
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005324void InstructionCodeGeneratorMIPS::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00005325 size_t condition_input_index,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005326 MipsLabel* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00005327 MipsLabel* false_target) {
5328 HInstruction* cond = instruction->InputAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005329
David Brazdil0debae72015-11-12 18:37:00 +00005330 if (true_target == nullptr && false_target == nullptr) {
5331 // Nothing to do. The code always falls through.
5332 return;
5333 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00005334 // Constant condition, statically compared against "true" (integer value 1).
5335 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00005336 if (true_target != nullptr) {
5337 __ B(true_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005338 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005339 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00005340 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00005341 if (false_target != nullptr) {
5342 __ B(false_target);
5343 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005344 }
David Brazdil0debae72015-11-12 18:37:00 +00005345 return;
5346 }
5347
5348 // The following code generates these patterns:
5349 // (1) true_target == nullptr && false_target != nullptr
5350 // - opposite condition true => branch to false_target
5351 // (2) true_target != nullptr && false_target == nullptr
5352 // - condition true => branch to true_target
5353 // (3) true_target != nullptr && false_target != nullptr
5354 // - condition true => branch to true_target
5355 // - branch to false_target
5356 if (IsBooleanValueOrMaterializedCondition(cond)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005357 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00005358 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005359 DCHECK(cond_val.IsRegister());
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005360 if (true_target == nullptr) {
David Brazdil0debae72015-11-12 18:37:00 +00005361 __ Beqz(cond_val.AsRegister<Register>(), false_target);
5362 } else {
5363 __ Bnez(cond_val.AsRegister<Register>(), true_target);
5364 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005365 } else {
5366 // The condition instruction has not been materialized, use its inputs as
5367 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00005368 HCondition* condition = cond->AsCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005369 Primitive::Type type = condition->InputAt(0)->GetType();
5370 LocationSummary* locations = cond->GetLocations();
5371 IfCondition if_cond = condition->GetCondition();
5372 MipsLabel* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00005373
David Brazdil0debae72015-11-12 18:37:00 +00005374 if (true_target == nullptr) {
5375 if_cond = condition->GetOppositeCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005376 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00005377 }
5378
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005379 switch (type) {
5380 default:
5381 GenerateIntCompareAndBranch(if_cond, locations, branch_target);
5382 break;
5383 case Primitive::kPrimLong:
5384 GenerateLongCompareAndBranch(if_cond, locations, branch_target);
5385 break;
5386 case Primitive::kPrimFloat:
5387 case Primitive::kPrimDouble:
5388 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
5389 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005390 }
5391 }
David Brazdil0debae72015-11-12 18:37:00 +00005392
5393 // If neither branch falls through (case 3), the conditional branch to `true_target`
5394 // was already emitted (case 2) and we need to emit a jump to `false_target`.
5395 if (true_target != nullptr && false_target != nullptr) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005396 __ B(false_target);
5397 }
5398}
5399
5400void LocationsBuilderMIPS::VisitIf(HIf* if_instr) {
5401 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00005402 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005403 locations->SetInAt(0, Location::RequiresRegister());
5404 }
5405}
5406
5407void InstructionCodeGeneratorMIPS::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00005408 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
5409 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
5410 MipsLabel* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
5411 nullptr : codegen_->GetLabelOf(true_successor);
5412 MipsLabel* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
5413 nullptr : codegen_->GetLabelOf(false_successor);
5414 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005415}
5416
5417void LocationsBuilderMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
5418 LocationSummary* locations = new (GetGraph()->GetArena())
5419 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01005420 InvokeRuntimeCallingConvention calling_convention;
5421 RegisterSet caller_saves = RegisterSet::Empty();
5422 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5423 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00005424 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005425 locations->SetInAt(0, Location::RequiresRegister());
5426 }
5427}
5428
5429void InstructionCodeGeneratorMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08005430 SlowPathCodeMIPS* slow_path =
5431 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00005432 GenerateTestAndBranch(deoptimize,
5433 /* condition_input_index */ 0,
5434 slow_path->GetEntryLabel(),
5435 /* false_target */ nullptr);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005436}
5437
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005438// This function returns true if a conditional move can be generated for HSelect.
5439// Otherwise it returns false and HSelect must be implemented in terms of conditonal
5440// branches and regular moves.
5441//
5442// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
5443//
5444// While determining feasibility of a conditional move and setting inputs/outputs
5445// are two distinct tasks, this function does both because they share quite a bit
5446// of common logic.
5447static bool CanMoveConditionally(HSelect* select, bool is_r6, LocationSummary* locations_to_set) {
5448 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
5449 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5450 HCondition* condition = cond->AsCondition();
5451
5452 Primitive::Type cond_type = materialized ? Primitive::kPrimInt : condition->InputAt(0)->GetType();
5453 Primitive::Type dst_type = select->GetType();
5454
5455 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
5456 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
5457 bool is_true_value_zero_constant =
5458 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
5459 bool is_false_value_zero_constant =
5460 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
5461
5462 bool can_move_conditionally = false;
5463 bool use_const_for_false_in = false;
5464 bool use_const_for_true_in = false;
5465
5466 if (!cond->IsConstant()) {
5467 switch (cond_type) {
5468 default:
5469 switch (dst_type) {
5470 default:
5471 // Moving int on int condition.
5472 if (is_r6) {
5473 if (is_true_value_zero_constant) {
5474 // seleqz out_reg, false_reg, cond_reg
5475 can_move_conditionally = true;
5476 use_const_for_true_in = true;
5477 } else if (is_false_value_zero_constant) {
5478 // selnez out_reg, true_reg, cond_reg
5479 can_move_conditionally = true;
5480 use_const_for_false_in = true;
5481 } else if (materialized) {
5482 // Not materializing unmaterialized int conditions
5483 // to keep the instruction count low.
5484 // selnez AT, true_reg, cond_reg
5485 // seleqz TMP, false_reg, cond_reg
5486 // or out_reg, AT, TMP
5487 can_move_conditionally = true;
5488 }
5489 } else {
5490 // movn out_reg, true_reg/ZERO, cond_reg
5491 can_move_conditionally = true;
5492 use_const_for_true_in = is_true_value_zero_constant;
5493 }
5494 break;
5495 case Primitive::kPrimLong:
5496 // Moving long on int condition.
5497 if (is_r6) {
5498 if (is_true_value_zero_constant) {
5499 // seleqz out_reg_lo, false_reg_lo, cond_reg
5500 // seleqz out_reg_hi, false_reg_hi, cond_reg
5501 can_move_conditionally = true;
5502 use_const_for_true_in = true;
5503 } else if (is_false_value_zero_constant) {
5504 // selnez out_reg_lo, true_reg_lo, cond_reg
5505 // selnez out_reg_hi, true_reg_hi, cond_reg
5506 can_move_conditionally = true;
5507 use_const_for_false_in = true;
5508 }
5509 // Other long conditional moves would generate 6+ instructions,
5510 // which is too many.
5511 } else {
5512 // movn out_reg_lo, true_reg_lo/ZERO, cond_reg
5513 // movn out_reg_hi, true_reg_hi/ZERO, cond_reg
5514 can_move_conditionally = true;
5515 use_const_for_true_in = is_true_value_zero_constant;
5516 }
5517 break;
5518 case Primitive::kPrimFloat:
5519 case Primitive::kPrimDouble:
5520 // Moving float/double on int condition.
5521 if (is_r6) {
5522 if (materialized) {
5523 // Not materializing unmaterialized int conditions
5524 // to keep the instruction count low.
5525 can_move_conditionally = true;
5526 if (is_true_value_zero_constant) {
5527 // sltu TMP, ZERO, cond_reg
5528 // mtc1 TMP, temp_cond_reg
5529 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5530 use_const_for_true_in = true;
5531 } else if (is_false_value_zero_constant) {
5532 // sltu TMP, ZERO, cond_reg
5533 // mtc1 TMP, temp_cond_reg
5534 // selnez.fmt out_reg, true_reg, temp_cond_reg
5535 use_const_for_false_in = true;
5536 } else {
5537 // sltu TMP, ZERO, cond_reg
5538 // mtc1 TMP, temp_cond_reg
5539 // sel.fmt temp_cond_reg, false_reg, true_reg
5540 // mov.fmt out_reg, temp_cond_reg
5541 }
5542 }
5543 } else {
5544 // movn.fmt out_reg, true_reg, cond_reg
5545 can_move_conditionally = true;
5546 }
5547 break;
5548 }
5549 break;
5550 case Primitive::kPrimLong:
5551 // We don't materialize long comparison now
5552 // and use conditional branches instead.
5553 break;
5554 case Primitive::kPrimFloat:
5555 case Primitive::kPrimDouble:
5556 switch (dst_type) {
5557 default:
5558 // Moving int on float/double condition.
5559 if (is_r6) {
5560 if (is_true_value_zero_constant) {
5561 // mfc1 TMP, temp_cond_reg
5562 // seleqz out_reg, false_reg, TMP
5563 can_move_conditionally = true;
5564 use_const_for_true_in = true;
5565 } else if (is_false_value_zero_constant) {
5566 // mfc1 TMP, temp_cond_reg
5567 // selnez out_reg, true_reg, TMP
5568 can_move_conditionally = true;
5569 use_const_for_false_in = true;
5570 } else {
5571 // mfc1 TMP, temp_cond_reg
5572 // selnez AT, true_reg, TMP
5573 // seleqz TMP, false_reg, TMP
5574 // or out_reg, AT, TMP
5575 can_move_conditionally = true;
5576 }
5577 } else {
5578 // movt out_reg, true_reg/ZERO, cc
5579 can_move_conditionally = true;
5580 use_const_for_true_in = is_true_value_zero_constant;
5581 }
5582 break;
5583 case Primitive::kPrimLong:
5584 // Moving long on float/double condition.
5585 if (is_r6) {
5586 if (is_true_value_zero_constant) {
5587 // mfc1 TMP, temp_cond_reg
5588 // seleqz out_reg_lo, false_reg_lo, TMP
5589 // seleqz out_reg_hi, false_reg_hi, TMP
5590 can_move_conditionally = true;
5591 use_const_for_true_in = true;
5592 } else if (is_false_value_zero_constant) {
5593 // mfc1 TMP, temp_cond_reg
5594 // selnez out_reg_lo, true_reg_lo, TMP
5595 // selnez out_reg_hi, true_reg_hi, TMP
5596 can_move_conditionally = true;
5597 use_const_for_false_in = true;
5598 }
5599 // Other long conditional moves would generate 6+ instructions,
5600 // which is too many.
5601 } else {
5602 // movt out_reg_lo, true_reg_lo/ZERO, cc
5603 // movt out_reg_hi, true_reg_hi/ZERO, cc
5604 can_move_conditionally = true;
5605 use_const_for_true_in = is_true_value_zero_constant;
5606 }
5607 break;
5608 case Primitive::kPrimFloat:
5609 case Primitive::kPrimDouble:
5610 // Moving float/double on float/double condition.
5611 if (is_r6) {
5612 can_move_conditionally = true;
5613 if (is_true_value_zero_constant) {
5614 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5615 use_const_for_true_in = true;
5616 } else if (is_false_value_zero_constant) {
5617 // selnez.fmt out_reg, true_reg, temp_cond_reg
5618 use_const_for_false_in = true;
5619 } else {
5620 // sel.fmt temp_cond_reg, false_reg, true_reg
5621 // mov.fmt out_reg, temp_cond_reg
5622 }
5623 } else {
5624 // movt.fmt out_reg, true_reg, cc
5625 can_move_conditionally = true;
5626 }
5627 break;
5628 }
5629 break;
5630 }
5631 }
5632
5633 if (can_move_conditionally) {
5634 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
5635 } else {
5636 DCHECK(!use_const_for_false_in);
5637 DCHECK(!use_const_for_true_in);
5638 }
5639
5640 if (locations_to_set != nullptr) {
5641 if (use_const_for_false_in) {
5642 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
5643 } else {
5644 locations_to_set->SetInAt(0,
5645 Primitive::IsFloatingPointType(dst_type)
5646 ? Location::RequiresFpuRegister()
5647 : Location::RequiresRegister());
5648 }
5649 if (use_const_for_true_in) {
5650 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
5651 } else {
5652 locations_to_set->SetInAt(1,
5653 Primitive::IsFloatingPointType(dst_type)
5654 ? Location::RequiresFpuRegister()
5655 : Location::RequiresRegister());
5656 }
5657 if (materialized) {
5658 locations_to_set->SetInAt(2, Location::RequiresRegister());
5659 }
5660 // On R6 we don't require the output to be the same as the
5661 // first input for conditional moves unlike on R2.
5662 bool is_out_same_as_first_in = !can_move_conditionally || !is_r6;
5663 if (is_out_same_as_first_in) {
5664 locations_to_set->SetOut(Location::SameAsFirstInput());
5665 } else {
5666 locations_to_set->SetOut(Primitive::IsFloatingPointType(dst_type)
5667 ? Location::RequiresFpuRegister()
5668 : Location::RequiresRegister());
5669 }
5670 }
5671
5672 return can_move_conditionally;
5673}
5674
5675void InstructionCodeGeneratorMIPS::GenConditionalMoveR2(HSelect* select) {
5676 LocationSummary* locations = select->GetLocations();
5677 Location dst = locations->Out();
5678 Location src = locations->InAt(1);
5679 Register src_reg = ZERO;
5680 Register src_reg_high = ZERO;
5681 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5682 Register cond_reg = TMP;
5683 int cond_cc = 0;
5684 Primitive::Type cond_type = Primitive::kPrimInt;
5685 bool cond_inverted = false;
5686 Primitive::Type dst_type = select->GetType();
5687
5688 if (IsBooleanValueOrMaterializedCondition(cond)) {
5689 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5690 } else {
5691 HCondition* condition = cond->AsCondition();
5692 LocationSummary* cond_locations = cond->GetLocations();
5693 IfCondition if_cond = condition->GetCondition();
5694 cond_type = condition->InputAt(0)->GetType();
5695 switch (cond_type) {
5696 default:
5697 DCHECK_NE(cond_type, Primitive::kPrimLong);
5698 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5699 break;
5700 case Primitive::kPrimFloat:
5701 case Primitive::kPrimDouble:
5702 cond_inverted = MaterializeFpCompareR2(if_cond,
5703 condition->IsGtBias(),
5704 cond_type,
5705 cond_locations,
5706 cond_cc);
5707 break;
5708 }
5709 }
5710
5711 DCHECK(dst.Equals(locations->InAt(0)));
5712 if (src.IsRegister()) {
5713 src_reg = src.AsRegister<Register>();
5714 } else if (src.IsRegisterPair()) {
5715 src_reg = src.AsRegisterPairLow<Register>();
5716 src_reg_high = src.AsRegisterPairHigh<Register>();
5717 } else if (src.IsConstant()) {
5718 DCHECK(src.GetConstant()->IsZeroBitPattern());
5719 }
5720
5721 switch (cond_type) {
5722 default:
5723 switch (dst_type) {
5724 default:
5725 if (cond_inverted) {
5726 __ Movz(dst.AsRegister<Register>(), src_reg, cond_reg);
5727 } else {
5728 __ Movn(dst.AsRegister<Register>(), src_reg, cond_reg);
5729 }
5730 break;
5731 case Primitive::kPrimLong:
5732 if (cond_inverted) {
5733 __ Movz(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5734 __ Movz(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5735 } else {
5736 __ Movn(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5737 __ Movn(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5738 }
5739 break;
5740 case Primitive::kPrimFloat:
5741 if (cond_inverted) {
5742 __ MovzS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5743 } else {
5744 __ MovnS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5745 }
5746 break;
5747 case Primitive::kPrimDouble:
5748 if (cond_inverted) {
5749 __ MovzD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5750 } else {
5751 __ MovnD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5752 }
5753 break;
5754 }
5755 break;
5756 case Primitive::kPrimLong:
5757 LOG(FATAL) << "Unreachable";
5758 UNREACHABLE();
5759 case Primitive::kPrimFloat:
5760 case Primitive::kPrimDouble:
5761 switch (dst_type) {
5762 default:
5763 if (cond_inverted) {
5764 __ Movf(dst.AsRegister<Register>(), src_reg, cond_cc);
5765 } else {
5766 __ Movt(dst.AsRegister<Register>(), src_reg, cond_cc);
5767 }
5768 break;
5769 case Primitive::kPrimLong:
5770 if (cond_inverted) {
5771 __ Movf(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5772 __ Movf(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5773 } else {
5774 __ Movt(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5775 __ Movt(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5776 }
5777 break;
5778 case Primitive::kPrimFloat:
5779 if (cond_inverted) {
5780 __ MovfS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5781 } else {
5782 __ MovtS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5783 }
5784 break;
5785 case Primitive::kPrimDouble:
5786 if (cond_inverted) {
5787 __ MovfD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5788 } else {
5789 __ MovtD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5790 }
5791 break;
5792 }
5793 break;
5794 }
5795}
5796
5797void InstructionCodeGeneratorMIPS::GenConditionalMoveR6(HSelect* select) {
5798 LocationSummary* locations = select->GetLocations();
5799 Location dst = locations->Out();
5800 Location false_src = locations->InAt(0);
5801 Location true_src = locations->InAt(1);
5802 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5803 Register cond_reg = TMP;
5804 FRegister fcond_reg = FTMP;
5805 Primitive::Type cond_type = Primitive::kPrimInt;
5806 bool cond_inverted = false;
5807 Primitive::Type dst_type = select->GetType();
5808
5809 if (IsBooleanValueOrMaterializedCondition(cond)) {
5810 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5811 } else {
5812 HCondition* condition = cond->AsCondition();
5813 LocationSummary* cond_locations = cond->GetLocations();
5814 IfCondition if_cond = condition->GetCondition();
5815 cond_type = condition->InputAt(0)->GetType();
5816 switch (cond_type) {
5817 default:
5818 DCHECK_NE(cond_type, Primitive::kPrimLong);
5819 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5820 break;
5821 case Primitive::kPrimFloat:
5822 case Primitive::kPrimDouble:
5823 cond_inverted = MaterializeFpCompareR6(if_cond,
5824 condition->IsGtBias(),
5825 cond_type,
5826 cond_locations,
5827 fcond_reg);
5828 break;
5829 }
5830 }
5831
5832 if (true_src.IsConstant()) {
5833 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
5834 }
5835 if (false_src.IsConstant()) {
5836 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
5837 }
5838
5839 switch (dst_type) {
5840 default:
5841 if (Primitive::IsFloatingPointType(cond_type)) {
5842 __ Mfc1(cond_reg, fcond_reg);
5843 }
5844 if (true_src.IsConstant()) {
5845 if (cond_inverted) {
5846 __ Selnez(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5847 } else {
5848 __ Seleqz(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5849 }
5850 } else if (false_src.IsConstant()) {
5851 if (cond_inverted) {
5852 __ Seleqz(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5853 } else {
5854 __ Selnez(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5855 }
5856 } else {
5857 DCHECK_NE(cond_reg, AT);
5858 if (cond_inverted) {
5859 __ Seleqz(AT, true_src.AsRegister<Register>(), cond_reg);
5860 __ Selnez(TMP, false_src.AsRegister<Register>(), cond_reg);
5861 } else {
5862 __ Selnez(AT, true_src.AsRegister<Register>(), cond_reg);
5863 __ Seleqz(TMP, false_src.AsRegister<Register>(), cond_reg);
5864 }
5865 __ Or(dst.AsRegister<Register>(), AT, TMP);
5866 }
5867 break;
5868 case Primitive::kPrimLong: {
5869 if (Primitive::IsFloatingPointType(cond_type)) {
5870 __ Mfc1(cond_reg, fcond_reg);
5871 }
5872 Register dst_lo = dst.AsRegisterPairLow<Register>();
5873 Register dst_hi = dst.AsRegisterPairHigh<Register>();
5874 if (true_src.IsConstant()) {
5875 Register src_lo = false_src.AsRegisterPairLow<Register>();
5876 Register src_hi = false_src.AsRegisterPairHigh<Register>();
5877 if (cond_inverted) {
5878 __ Selnez(dst_lo, src_lo, cond_reg);
5879 __ Selnez(dst_hi, src_hi, cond_reg);
5880 } else {
5881 __ Seleqz(dst_lo, src_lo, cond_reg);
5882 __ Seleqz(dst_hi, src_hi, cond_reg);
5883 }
5884 } else {
5885 DCHECK(false_src.IsConstant());
5886 Register src_lo = true_src.AsRegisterPairLow<Register>();
5887 Register src_hi = true_src.AsRegisterPairHigh<Register>();
5888 if (cond_inverted) {
5889 __ Seleqz(dst_lo, src_lo, cond_reg);
5890 __ Seleqz(dst_hi, src_hi, cond_reg);
5891 } else {
5892 __ Selnez(dst_lo, src_lo, cond_reg);
5893 __ Selnez(dst_hi, src_hi, cond_reg);
5894 }
5895 }
5896 break;
5897 }
5898 case Primitive::kPrimFloat: {
5899 if (!Primitive::IsFloatingPointType(cond_type)) {
5900 // sel*.fmt tests bit 0 of the condition register, account for that.
5901 __ Sltu(TMP, ZERO, cond_reg);
5902 __ Mtc1(TMP, fcond_reg);
5903 }
5904 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
5905 if (true_src.IsConstant()) {
5906 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
5907 if (cond_inverted) {
5908 __ SelnezS(dst_reg, src_reg, fcond_reg);
5909 } else {
5910 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5911 }
5912 } else if (false_src.IsConstant()) {
5913 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
5914 if (cond_inverted) {
5915 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5916 } else {
5917 __ SelnezS(dst_reg, src_reg, fcond_reg);
5918 }
5919 } else {
5920 if (cond_inverted) {
5921 __ SelS(fcond_reg,
5922 true_src.AsFpuRegister<FRegister>(),
5923 false_src.AsFpuRegister<FRegister>());
5924 } else {
5925 __ SelS(fcond_reg,
5926 false_src.AsFpuRegister<FRegister>(),
5927 true_src.AsFpuRegister<FRegister>());
5928 }
5929 __ MovS(dst_reg, fcond_reg);
5930 }
5931 break;
5932 }
5933 case Primitive::kPrimDouble: {
5934 if (!Primitive::IsFloatingPointType(cond_type)) {
5935 // sel*.fmt tests bit 0 of the condition register, account for that.
5936 __ Sltu(TMP, ZERO, cond_reg);
5937 __ Mtc1(TMP, fcond_reg);
5938 }
5939 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
5940 if (true_src.IsConstant()) {
5941 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
5942 if (cond_inverted) {
5943 __ SelnezD(dst_reg, src_reg, fcond_reg);
5944 } else {
5945 __ SeleqzD(dst_reg, src_reg, fcond_reg);
5946 }
5947 } else if (false_src.IsConstant()) {
5948 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
5949 if (cond_inverted) {
5950 __ SeleqzD(dst_reg, src_reg, fcond_reg);
5951 } else {
5952 __ SelnezD(dst_reg, src_reg, fcond_reg);
5953 }
5954 } else {
5955 if (cond_inverted) {
5956 __ SelD(fcond_reg,
5957 true_src.AsFpuRegister<FRegister>(),
5958 false_src.AsFpuRegister<FRegister>());
5959 } else {
5960 __ SelD(fcond_reg,
5961 false_src.AsFpuRegister<FRegister>(),
5962 true_src.AsFpuRegister<FRegister>());
5963 }
5964 __ MovD(dst_reg, fcond_reg);
5965 }
5966 break;
5967 }
5968 }
5969}
5970
Goran Jakovljevicc6418422016-12-05 16:31:55 +01005971void LocationsBuilderMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
5972 LocationSummary* locations = new (GetGraph()->GetArena())
5973 LocationSummary(flag, LocationSummary::kNoCall);
5974 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07005975}
5976
Goran Jakovljevicc6418422016-12-05 16:31:55 +01005977void InstructionCodeGeneratorMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
5978 __ LoadFromOffset(kLoadWord,
5979 flag->GetLocations()->Out().AsRegister<Register>(),
5980 SP,
5981 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07005982}
5983
David Brazdil74eb1b22015-12-14 11:44:01 +00005984void LocationsBuilderMIPS::VisitSelect(HSelect* select) {
5985 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005986 CanMoveConditionally(select, codegen_->GetInstructionSetFeatures().IsR6(), locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00005987}
5988
5989void InstructionCodeGeneratorMIPS::VisitSelect(HSelect* select) {
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005990 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
5991 if (CanMoveConditionally(select, is_r6, /* locations_to_set */ nullptr)) {
5992 if (is_r6) {
5993 GenConditionalMoveR6(select);
5994 } else {
5995 GenConditionalMoveR2(select);
5996 }
5997 } else {
5998 LocationSummary* locations = select->GetLocations();
5999 MipsLabel false_target;
6000 GenerateTestAndBranch(select,
6001 /* condition_input_index */ 2,
6002 /* true_target */ nullptr,
6003 &false_target);
6004 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
6005 __ Bind(&false_target);
6006 }
David Brazdil74eb1b22015-12-14 11:44:01 +00006007}
6008
David Srbecky0cf44932015-12-09 14:09:59 +00006009void LocationsBuilderMIPS::VisitNativeDebugInfo(HNativeDebugInfo* info) {
6010 new (GetGraph()->GetArena()) LocationSummary(info);
6011}
6012
David Srbeckyd28f4a02016-03-14 17:14:24 +00006013void InstructionCodeGeneratorMIPS::VisitNativeDebugInfo(HNativeDebugInfo*) {
6014 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00006015}
6016
6017void CodeGeneratorMIPS::GenerateNop() {
6018 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00006019}
6020
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006021void LocationsBuilderMIPS::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
6022 Primitive::Type field_type = field_info.GetFieldType();
6023 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6024 bool generate_volatile = field_info.IsVolatile() && is_wide;
Alexey Frunze15958152017-02-09 19:08:30 -08006025 bool object_field_get_with_read_barrier =
6026 kEmitCompilerReadBarrier && (field_type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006027 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08006028 instruction,
6029 generate_volatile
6030 ? LocationSummary::kCallOnMainOnly
6031 : (object_field_get_with_read_barrier
6032 ? LocationSummary::kCallOnSlowPath
6033 : LocationSummary::kNoCall));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006034
Alexey Frunzec61c0762017-04-10 13:54:23 -07006035 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6036 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6037 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006038 locations->SetInAt(0, Location::RequiresRegister());
6039 if (generate_volatile) {
6040 InvokeRuntimeCallingConvention calling_convention;
6041 // need A0 to hold base + offset
6042 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6043 if (field_type == Primitive::kPrimLong) {
6044 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimLong));
6045 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006046 // Use Location::Any() to prevent situations when running out of available fp registers.
6047 locations->SetOut(Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006048 // Need some temp core regs since FP results are returned in core registers
6049 Location reg = calling_convention.GetReturnLocation(Primitive::kPrimLong);
6050 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairLow<Register>()));
6051 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairHigh<Register>()));
6052 }
6053 } else {
6054 if (Primitive::IsFloatingPointType(instruction->GetType())) {
6055 locations->SetOut(Location::RequiresFpuRegister());
6056 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006057 // The output overlaps in the case of an object field get with
6058 // read barriers enabled: we do not want the move to overwrite the
6059 // object's location, as we need it to emit the read barrier.
6060 locations->SetOut(Location::RequiresRegister(),
6061 object_field_get_with_read_barrier
6062 ? Location::kOutputOverlap
6063 : Location::kNoOutputOverlap);
6064 }
6065 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6066 // We need a temporary register for the read barrier marking slow
6067 // path in CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier.
6068 locations->AddTemp(Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006069 }
6070 }
6071}
6072
6073void InstructionCodeGeneratorMIPS::HandleFieldGet(HInstruction* instruction,
6074 const FieldInfo& field_info,
6075 uint32_t dex_pc) {
6076 Primitive::Type type = field_info.GetFieldType();
6077 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08006078 Location obj_loc = locations->InAt(0);
6079 Register obj = obj_loc.AsRegister<Register>();
6080 Location dst_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006081 LoadOperandType load_type = kLoadUnsignedByte;
6082 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006083 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006084 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006085
6086 switch (type) {
6087 case Primitive::kPrimBoolean:
6088 load_type = kLoadUnsignedByte;
6089 break;
6090 case Primitive::kPrimByte:
6091 load_type = kLoadSignedByte;
6092 break;
6093 case Primitive::kPrimShort:
6094 load_type = kLoadSignedHalfword;
6095 break;
6096 case Primitive::kPrimChar:
6097 load_type = kLoadUnsignedHalfword;
6098 break;
6099 case Primitive::kPrimInt:
6100 case Primitive::kPrimFloat:
6101 case Primitive::kPrimNot:
6102 load_type = kLoadWord;
6103 break;
6104 case Primitive::kPrimLong:
6105 case Primitive::kPrimDouble:
6106 load_type = kLoadDoubleword;
6107 break;
6108 case Primitive::kPrimVoid:
6109 LOG(FATAL) << "Unreachable type " << type;
6110 UNREACHABLE();
6111 }
6112
6113 if (is_volatile && load_type == kLoadDoubleword) {
6114 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006115 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006116 // Do implicit Null check
6117 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6118 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
Serban Constantinescufca16662016-07-14 09:21:59 +01006119 codegen_->InvokeRuntime(kQuickA64Load, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006120 CheckEntrypointTypes<kQuickA64Load, int64_t, volatile const int64_t*>();
6121 if (type == Primitive::kPrimDouble) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006122 // FP results are returned in core registers. Need to move them.
Alexey Frunze15958152017-02-09 19:08:30 -08006123 if (dst_loc.IsFpuRegister()) {
6124 __ Mtc1(locations->GetTemp(1).AsRegister<Register>(), dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006125 __ MoveToFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunze15958152017-02-09 19:08:30 -08006126 dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006127 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006128 DCHECK(dst_loc.IsDoubleStackSlot());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006129 __ StoreToOffset(kStoreWord,
6130 locations->GetTemp(1).AsRegister<Register>(),
6131 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006132 dst_loc.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006133 __ StoreToOffset(kStoreWord,
6134 locations->GetTemp(2).AsRegister<Register>(),
6135 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006136 dst_loc.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006137 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006138 }
6139 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006140 if (type == Primitive::kPrimNot) {
6141 // /* HeapReference<Object> */ dst = *(obj + offset)
6142 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
6143 Location temp_loc = locations->GetTemp(0);
6144 // Note that a potential implicit null check is handled in this
6145 // CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier call.
6146 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6147 dst_loc,
6148 obj,
6149 offset,
6150 temp_loc,
6151 /* needs_null_check */ true);
6152 if (is_volatile) {
6153 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6154 }
6155 } else {
6156 __ LoadFromOffset(kLoadWord, dst_loc.AsRegister<Register>(), obj, offset, null_checker);
6157 if (is_volatile) {
6158 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6159 }
6160 // If read barriers are enabled, emit read barriers other than
6161 // Baker's using a slow path (and also unpoison the loaded
6162 // reference, if heap poisoning is enabled).
6163 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
6164 }
6165 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006166 Register dst;
6167 if (type == Primitive::kPrimLong) {
Alexey Frunze15958152017-02-09 19:08:30 -08006168 DCHECK(dst_loc.IsRegisterPair());
6169 dst = dst_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006170 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006171 DCHECK(dst_loc.IsRegister());
6172 dst = dst_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006173 }
Alexey Frunze2923db72016-08-20 01:55:47 -07006174 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006175 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006176 DCHECK(dst_loc.IsFpuRegister());
6177 FRegister dst = dst_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006178 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006179 __ LoadSFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006180 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006181 __ LoadDFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006182 }
6183 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006184 }
6185
Alexey Frunze15958152017-02-09 19:08:30 -08006186 // Memory barriers, in the case of references, are handled in the
6187 // previous switch statement.
6188 if (is_volatile && (type != Primitive::kPrimNot)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006189 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6190 }
6191}
6192
6193void LocationsBuilderMIPS::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
6194 Primitive::Type field_type = field_info.GetFieldType();
6195 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6196 bool generate_volatile = field_info.IsVolatile() && is_wide;
6197 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006198 instruction, generate_volatile ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006199
6200 locations->SetInAt(0, Location::RequiresRegister());
6201 if (generate_volatile) {
6202 InvokeRuntimeCallingConvention calling_convention;
6203 // need A0 to hold base + offset
6204 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6205 if (field_type == Primitive::kPrimLong) {
6206 locations->SetInAt(1, Location::RegisterPairLocation(
6207 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
6208 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006209 // Use Location::Any() to prevent situations when running out of available fp registers.
6210 locations->SetInAt(1, Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006211 // Pass FP parameters in core registers.
6212 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
6213 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
6214 }
6215 } else {
6216 if (Primitive::IsFloatingPointType(field_type)) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006217 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006218 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006219 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006220 }
6221 }
6222}
6223
6224void InstructionCodeGeneratorMIPS::HandleFieldSet(HInstruction* instruction,
6225 const FieldInfo& field_info,
Goran Jakovljevice114da22016-12-26 14:21:43 +01006226 uint32_t dex_pc,
6227 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006228 Primitive::Type type = field_info.GetFieldType();
6229 LocationSummary* locations = instruction->GetLocations();
6230 Register obj = locations->InAt(0).AsRegister<Register>();
Alexey Frunzef58b2482016-09-02 22:14:06 -07006231 Location value_location = locations->InAt(1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006232 StoreOperandType store_type = kStoreByte;
6233 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006234 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Alexey Frunzec061de12017-02-14 13:27:23 -08006235 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006236 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006237
6238 switch (type) {
6239 case Primitive::kPrimBoolean:
6240 case Primitive::kPrimByte:
6241 store_type = kStoreByte;
6242 break;
6243 case Primitive::kPrimShort:
6244 case Primitive::kPrimChar:
6245 store_type = kStoreHalfword;
6246 break;
6247 case Primitive::kPrimInt:
6248 case Primitive::kPrimFloat:
6249 case Primitive::kPrimNot:
6250 store_type = kStoreWord;
6251 break;
6252 case Primitive::kPrimLong:
6253 case Primitive::kPrimDouble:
6254 store_type = kStoreDoubleword;
6255 break;
6256 case Primitive::kPrimVoid:
6257 LOG(FATAL) << "Unreachable type " << type;
6258 UNREACHABLE();
6259 }
6260
6261 if (is_volatile) {
6262 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
6263 }
6264
6265 if (is_volatile && store_type == kStoreDoubleword) {
6266 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006267 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006268 // Do implicit Null check.
6269 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6270 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6271 if (type == Primitive::kPrimDouble) {
6272 // Pass FP parameters in core registers.
Alexey Frunzef58b2482016-09-02 22:14:06 -07006273 if (value_location.IsFpuRegister()) {
6274 __ Mfc1(locations->GetTemp(1).AsRegister<Register>(),
6275 value_location.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006276 __ MoveFromFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunzef58b2482016-09-02 22:14:06 -07006277 value_location.AsFpuRegister<FRegister>());
6278 } else if (value_location.IsDoubleStackSlot()) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006279 __ LoadFromOffset(kLoadWord,
6280 locations->GetTemp(1).AsRegister<Register>(),
6281 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006282 value_location.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006283 __ LoadFromOffset(kLoadWord,
6284 locations->GetTemp(2).AsRegister<Register>(),
6285 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006286 value_location.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006287 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006288 DCHECK(value_location.IsConstant());
6289 DCHECK(value_location.GetConstant()->IsDoubleConstant());
6290 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006291 __ LoadConst64(locations->GetTemp(2).AsRegister<Register>(),
6292 locations->GetTemp(1).AsRegister<Register>(),
6293 value);
6294 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006295 }
Serban Constantinescufca16662016-07-14 09:21:59 +01006296 codegen_->InvokeRuntime(kQuickA64Store, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006297 CheckEntrypointTypes<kQuickA64Store, void, volatile int64_t *, int64_t>();
6298 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006299 if (value_location.IsConstant()) {
6300 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
6301 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
6302 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006303 Register src;
6304 if (type == Primitive::kPrimLong) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006305 src = value_location.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006306 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006307 src = value_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006308 }
Alexey Frunzec061de12017-02-14 13:27:23 -08006309 if (kPoisonHeapReferences && needs_write_barrier) {
6310 // Note that in the case where `value` is a null reference,
6311 // we do not enter this block, as a null reference does not
6312 // need poisoning.
6313 DCHECK_EQ(type, Primitive::kPrimNot);
6314 __ PoisonHeapReference(TMP, src);
6315 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
6316 } else {
6317 __ StoreToOffset(store_type, src, obj, offset, null_checker);
6318 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006319 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006320 FRegister src = value_location.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006321 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006322 __ StoreSToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006323 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006324 __ StoreDToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006325 }
6326 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006327 }
6328
Alexey Frunzec061de12017-02-14 13:27:23 -08006329 if (needs_write_barrier) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006330 Register src = value_location.AsRegister<Register>();
Goran Jakovljevice114da22016-12-26 14:21:43 +01006331 codegen_->MarkGCCard(obj, src, value_can_be_null);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006332 }
6333
6334 if (is_volatile) {
6335 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
6336 }
6337}
6338
6339void LocationsBuilderMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6340 HandleFieldGet(instruction, instruction->GetFieldInfo());
6341}
6342
6343void InstructionCodeGeneratorMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6344 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
6345}
6346
6347void LocationsBuilderMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
6348 HandleFieldSet(instruction, instruction->GetFieldInfo());
6349}
6350
6351void InstructionCodeGeneratorMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01006352 HandleFieldSet(instruction,
6353 instruction->GetFieldInfo(),
6354 instruction->GetDexPc(),
6355 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006356}
6357
Alexey Frunze15958152017-02-09 19:08:30 -08006358void InstructionCodeGeneratorMIPS::GenerateReferenceLoadOneRegister(
6359 HInstruction* instruction,
6360 Location out,
6361 uint32_t offset,
6362 Location maybe_temp,
6363 ReadBarrierOption read_barrier_option) {
6364 Register out_reg = out.AsRegister<Register>();
6365 if (read_barrier_option == kWithReadBarrier) {
6366 CHECK(kEmitCompilerReadBarrier);
6367 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6368 if (kUseBakerReadBarrier) {
6369 // Load with fast path based Baker's read barrier.
6370 // /* HeapReference<Object> */ out = *(out + offset)
6371 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6372 out,
6373 out_reg,
6374 offset,
6375 maybe_temp,
6376 /* needs_null_check */ false);
6377 } else {
6378 // Load with slow path based read barrier.
6379 // Save the value of `out` into `maybe_temp` before overwriting it
6380 // in the following move operation, as we will need it for the
6381 // read barrier below.
6382 __ Move(maybe_temp.AsRegister<Register>(), out_reg);
6383 // /* HeapReference<Object> */ out = *(out + offset)
6384 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6385 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6386 }
6387 } else {
6388 // Plain load with no read barrier.
6389 // /* HeapReference<Object> */ out = *(out + offset)
6390 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6391 __ MaybeUnpoisonHeapReference(out_reg);
6392 }
6393}
6394
6395void InstructionCodeGeneratorMIPS::GenerateReferenceLoadTwoRegisters(
6396 HInstruction* instruction,
6397 Location out,
6398 Location obj,
6399 uint32_t offset,
6400 Location maybe_temp,
6401 ReadBarrierOption read_barrier_option) {
6402 Register out_reg = out.AsRegister<Register>();
6403 Register obj_reg = obj.AsRegister<Register>();
6404 if (read_barrier_option == kWithReadBarrier) {
6405 CHECK(kEmitCompilerReadBarrier);
6406 if (kUseBakerReadBarrier) {
6407 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6408 // Load with fast path based Baker's read barrier.
6409 // /* HeapReference<Object> */ out = *(obj + offset)
6410 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6411 out,
6412 obj_reg,
6413 offset,
6414 maybe_temp,
6415 /* needs_null_check */ false);
6416 } else {
6417 // Load with slow path based read barrier.
6418 // /* HeapReference<Object> */ out = *(obj + offset)
6419 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6420 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6421 }
6422 } else {
6423 // Plain load with no read barrier.
6424 // /* HeapReference<Object> */ out = *(obj + offset)
6425 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6426 __ MaybeUnpoisonHeapReference(out_reg);
6427 }
6428}
6429
6430void InstructionCodeGeneratorMIPS::GenerateGcRootFieldLoad(HInstruction* instruction,
6431 Location root,
6432 Register obj,
6433 uint32_t offset,
6434 ReadBarrierOption read_barrier_option) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07006435 Register root_reg = root.AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08006436 if (read_barrier_option == kWithReadBarrier) {
6437 DCHECK(kEmitCompilerReadBarrier);
6438 if (kUseBakerReadBarrier) {
6439 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6440 // Baker's read barrier are used:
6441 //
6442 // root = obj.field;
6443 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6444 // if (temp != null) {
6445 // root = temp(root)
6446 // }
6447
6448 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6449 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6450 static_assert(
6451 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6452 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6453 "have different sizes.");
6454 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6455 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6456 "have different sizes.");
6457
6458 // Slow path marking the GC root `root`.
6459 Location temp = Location::RegisterLocation(T9);
6460 SlowPathCodeMIPS* slow_path =
6461 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(
6462 instruction,
6463 root,
6464 /*entrypoint*/ temp);
6465 codegen_->AddSlowPath(slow_path);
6466
6467 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6468 const int32_t entry_point_offset =
6469 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(root.reg() - 1);
6470 // Loading the entrypoint does not require a load acquire since it is only changed when
6471 // threads are suspended or running a checkpoint.
6472 __ LoadFromOffset(kLoadWord, temp.AsRegister<Register>(), TR, entry_point_offset);
6473 // The entrypoint is null when the GC is not marking, this prevents one load compared to
6474 // checking GetIsGcMarking.
6475 __ Bnez(temp.AsRegister<Register>(), slow_path->GetEntryLabel());
6476 __ Bind(slow_path->GetExitLabel());
6477 } else {
6478 // GC root loaded through a slow path for read barriers other
6479 // than Baker's.
6480 // /* GcRoot<mirror::Object>* */ root = obj + offset
6481 __ Addiu32(root_reg, obj, offset);
6482 // /* mirror::Object* */ root = root->Read()
6483 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6484 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006485 } else {
6486 // Plain GC root load with no read barrier.
6487 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6488 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6489 // Note that GC roots are not affected by heap poisoning, thus we
6490 // do not have to unpoison `root_reg` here.
6491 }
6492}
6493
Alexey Frunze15958152017-02-09 19:08:30 -08006494void CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6495 Location ref,
6496 Register obj,
6497 uint32_t offset,
6498 Location temp,
6499 bool needs_null_check) {
6500 DCHECK(kEmitCompilerReadBarrier);
6501 DCHECK(kUseBakerReadBarrier);
6502
6503 // /* HeapReference<Object> */ ref = *(obj + offset)
6504 Location no_index = Location::NoLocation();
6505 ScaleFactor no_scale_factor = TIMES_1;
6506 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6507 ref,
6508 obj,
6509 offset,
6510 no_index,
6511 no_scale_factor,
6512 temp,
6513 needs_null_check);
6514}
6515
6516void CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6517 Location ref,
6518 Register obj,
6519 uint32_t data_offset,
6520 Location index,
6521 Location temp,
6522 bool needs_null_check) {
6523 DCHECK(kEmitCompilerReadBarrier);
6524 DCHECK(kUseBakerReadBarrier);
6525
6526 static_assert(
6527 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6528 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
6529 // /* HeapReference<Object> */ ref =
6530 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6531 ScaleFactor scale_factor = TIMES_4;
6532 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6533 ref,
6534 obj,
6535 data_offset,
6536 index,
6537 scale_factor,
6538 temp,
6539 needs_null_check);
6540}
6541
6542void CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6543 Location ref,
6544 Register obj,
6545 uint32_t offset,
6546 Location index,
6547 ScaleFactor scale_factor,
6548 Location temp,
6549 bool needs_null_check,
6550 bool always_update_field) {
6551 DCHECK(kEmitCompilerReadBarrier);
6552 DCHECK(kUseBakerReadBarrier);
6553
6554 // In slow path based read barriers, the read barrier call is
6555 // inserted after the original load. However, in fast path based
6556 // Baker's read barriers, we need to perform the load of
6557 // mirror::Object::monitor_ *before* the original reference load.
6558 // This load-load ordering is required by the read barrier.
6559 // The fast path/slow path (for Baker's algorithm) should look like:
6560 //
6561 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6562 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6563 // HeapReference<Object> ref = *src; // Original reference load.
6564 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6565 // if (is_gray) {
6566 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6567 // }
6568 //
6569 // Note: the original implementation in ReadBarrier::Barrier is
6570 // slightly more complex as it performs additional checks that we do
6571 // not do here for performance reasons.
6572
6573 Register ref_reg = ref.AsRegister<Register>();
6574 Register temp_reg = temp.AsRegister<Register>();
6575 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6576
6577 // /* int32_t */ monitor = obj->monitor_
6578 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
6579 if (needs_null_check) {
6580 MaybeRecordImplicitNullCheck(instruction);
6581 }
6582 // /* LockWord */ lock_word = LockWord(monitor)
6583 static_assert(sizeof(LockWord) == sizeof(int32_t),
6584 "art::LockWord and int32_t have different sizes.");
6585
6586 __ Sync(0); // Barrier to prevent load-load reordering.
6587
6588 // The actual reference load.
6589 if (index.IsValid()) {
6590 // Load types involving an "index": ArrayGet,
6591 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6592 // intrinsics.
6593 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
6594 if (index.IsConstant()) {
6595 size_t computed_offset =
6596 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
6597 __ LoadFromOffset(kLoadWord, ref_reg, obj, computed_offset);
6598 } else {
6599 // Handle the special case of the
6600 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6601 // intrinsics, which use a register pair as index ("long
6602 // offset"), of which only the low part contains data.
6603 Register index_reg = index.IsRegisterPair()
6604 ? index.AsRegisterPairLow<Register>()
6605 : index.AsRegister<Register>();
Chris Larsencd0295d2017-03-31 15:26:54 -07006606 __ ShiftAndAdd(TMP, index_reg, obj, scale_factor, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08006607 __ LoadFromOffset(kLoadWord, ref_reg, TMP, offset);
6608 }
6609 } else {
6610 // /* HeapReference<Object> */ ref = *(obj + offset)
6611 __ LoadFromOffset(kLoadWord, ref_reg, obj, offset);
6612 }
6613
6614 // Object* ref = ref_addr->AsMirrorPtr()
6615 __ MaybeUnpoisonHeapReference(ref_reg);
6616
6617 // Slow path marking the object `ref` when it is gray.
6618 SlowPathCodeMIPS* slow_path;
6619 if (always_update_field) {
6620 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS only supports address
6621 // of the form `obj + field_offset`, where `obj` is a register and
6622 // `field_offset` is a register pair (of which only the lower half
6623 // is used). Thus `offset` and `scale_factor` above are expected
6624 // to be null in this code path.
6625 DCHECK_EQ(offset, 0u);
6626 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
6627 slow_path = new (GetGraph()->GetArena())
6628 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(instruction,
6629 ref,
6630 obj,
6631 /* field_offset */ index,
6632 temp_reg);
6633 } else {
6634 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(instruction, ref);
6635 }
6636 AddSlowPath(slow_path);
6637
6638 // if (rb_state == ReadBarrier::GrayState())
6639 // ref = ReadBarrier::Mark(ref);
6640 // Given the numeric representation, it's enough to check the low bit of the
6641 // rb_state. We do that by shifting the bit into the sign bit (31) and
6642 // performing a branch on less than zero.
6643 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6644 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6645 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
6646 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
6647 __ Bltz(temp_reg, slow_path->GetEntryLabel());
6648 __ Bind(slow_path->GetExitLabel());
6649}
6650
6651void CodeGeneratorMIPS::GenerateReadBarrierSlow(HInstruction* instruction,
6652 Location out,
6653 Location ref,
6654 Location obj,
6655 uint32_t offset,
6656 Location index) {
6657 DCHECK(kEmitCompilerReadBarrier);
6658
6659 // Insert a slow path based read barrier *after* the reference load.
6660 //
6661 // If heap poisoning is enabled, the unpoisoning of the loaded
6662 // reference will be carried out by the runtime within the slow
6663 // path.
6664 //
6665 // Note that `ref` currently does not get unpoisoned (when heap
6666 // poisoning is enabled), which is alright as the `ref` argument is
6667 // not used by the artReadBarrierSlow entry point.
6668 //
6669 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6670 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena())
6671 ReadBarrierForHeapReferenceSlowPathMIPS(instruction, out, ref, obj, offset, index);
6672 AddSlowPath(slow_path);
6673
6674 __ B(slow_path->GetEntryLabel());
6675 __ Bind(slow_path->GetExitLabel());
6676}
6677
6678void CodeGeneratorMIPS::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6679 Location out,
6680 Location ref,
6681 Location obj,
6682 uint32_t offset,
6683 Location index) {
6684 if (kEmitCompilerReadBarrier) {
6685 // Baker's read barriers shall be handled by the fast path
6686 // (CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier).
6687 DCHECK(!kUseBakerReadBarrier);
6688 // If heap poisoning is enabled, unpoisoning will be taken care of
6689 // by the runtime within the slow path.
6690 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
6691 } else if (kPoisonHeapReferences) {
6692 __ UnpoisonHeapReference(out.AsRegister<Register>());
6693 }
6694}
6695
6696void CodeGeneratorMIPS::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6697 Location out,
6698 Location root) {
6699 DCHECK(kEmitCompilerReadBarrier);
6700
6701 // Insert a slow path based read barrier *after* the GC root load.
6702 //
6703 // Note that GC roots are not affected by heap poisoning, so we do
6704 // not need to do anything special for this here.
6705 SlowPathCodeMIPS* slow_path =
6706 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS(instruction, out, root);
6707 AddSlowPath(slow_path);
6708
6709 __ B(slow_path->GetEntryLabel());
6710 __ Bind(slow_path->GetExitLabel());
6711}
6712
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006713void LocationsBuilderMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006714 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
6715 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07006716 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006717 switch (type_check_kind) {
6718 case TypeCheckKind::kExactCheck:
6719 case TypeCheckKind::kAbstractClassCheck:
6720 case TypeCheckKind::kClassHierarchyCheck:
6721 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08006722 call_kind =
6723 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006724 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006725 break;
6726 case TypeCheckKind::kArrayCheck:
6727 case TypeCheckKind::kUnresolvedCheck:
6728 case TypeCheckKind::kInterfaceCheck:
6729 call_kind = LocationSummary::kCallOnSlowPath;
6730 break;
6731 }
6732
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006733 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07006734 if (baker_read_barrier_slow_path) {
6735 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6736 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006737 locations->SetInAt(0, Location::RequiresRegister());
6738 locations->SetInAt(1, Location::RequiresRegister());
6739 // The output does overlap inputs.
6740 // Note that TypeCheckSlowPathMIPS uses this register too.
6741 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08006742 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006743}
6744
6745void InstructionCodeGeneratorMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006746 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006747 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08006748 Location obj_loc = locations->InAt(0);
6749 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006750 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08006751 Location out_loc = locations->Out();
6752 Register out = out_loc.AsRegister<Register>();
6753 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6754 DCHECK_LE(num_temps, 1u);
6755 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006756 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6757 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6758 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6759 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006760 MipsLabel done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006761 SlowPathCodeMIPS* slow_path = nullptr;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006762
6763 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006764 // Avoid this check if we know `obj` is not null.
6765 if (instruction->MustDoNullCheck()) {
6766 __ Move(out, ZERO);
6767 __ Beqz(obj, &done);
6768 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006769
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006770 switch (type_check_kind) {
6771 case TypeCheckKind::kExactCheck: {
6772 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006773 GenerateReferenceLoadTwoRegisters(instruction,
6774 out_loc,
6775 obj_loc,
6776 class_offset,
6777 maybe_temp_loc,
6778 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006779 // Classes must be equal for the instanceof to succeed.
6780 __ Xor(out, out, cls);
6781 __ Sltiu(out, out, 1);
6782 break;
6783 }
6784
6785 case TypeCheckKind::kAbstractClassCheck: {
6786 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006787 GenerateReferenceLoadTwoRegisters(instruction,
6788 out_loc,
6789 obj_loc,
6790 class_offset,
6791 maybe_temp_loc,
6792 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006793 // If the class is abstract, we eagerly fetch the super class of the
6794 // object to avoid doing a comparison we know will fail.
6795 MipsLabel loop;
6796 __ Bind(&loop);
6797 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08006798 GenerateReferenceLoadOneRegister(instruction,
6799 out_loc,
6800 super_offset,
6801 maybe_temp_loc,
6802 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006803 // If `out` is null, we use it for the result, and jump to `done`.
6804 __ Beqz(out, &done);
6805 __ Bne(out, cls, &loop);
6806 __ LoadConst32(out, 1);
6807 break;
6808 }
6809
6810 case TypeCheckKind::kClassHierarchyCheck: {
6811 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006812 GenerateReferenceLoadTwoRegisters(instruction,
6813 out_loc,
6814 obj_loc,
6815 class_offset,
6816 maybe_temp_loc,
6817 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006818 // Walk over the class hierarchy to find a match.
6819 MipsLabel loop, success;
6820 __ Bind(&loop);
6821 __ Beq(out, cls, &success);
6822 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08006823 GenerateReferenceLoadOneRegister(instruction,
6824 out_loc,
6825 super_offset,
6826 maybe_temp_loc,
6827 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006828 __ Bnez(out, &loop);
6829 // If `out` is null, we use it for the result, and jump to `done`.
6830 __ B(&done);
6831 __ Bind(&success);
6832 __ LoadConst32(out, 1);
6833 break;
6834 }
6835
6836 case TypeCheckKind::kArrayObjectCheck: {
6837 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006838 GenerateReferenceLoadTwoRegisters(instruction,
6839 out_loc,
6840 obj_loc,
6841 class_offset,
6842 maybe_temp_loc,
6843 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006844 // Do an exact check.
6845 MipsLabel success;
6846 __ Beq(out, cls, &success);
6847 // Otherwise, we need to check that the object's class is a non-primitive array.
6848 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08006849 GenerateReferenceLoadOneRegister(instruction,
6850 out_loc,
6851 component_offset,
6852 maybe_temp_loc,
6853 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006854 // If `out` is null, we use it for the result, and jump to `done`.
6855 __ Beqz(out, &done);
6856 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
6857 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
6858 __ Sltiu(out, out, 1);
6859 __ B(&done);
6860 __ Bind(&success);
6861 __ LoadConst32(out, 1);
6862 break;
6863 }
6864
6865 case TypeCheckKind::kArrayCheck: {
6866 // No read barrier since the slow path will retry upon failure.
6867 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006868 GenerateReferenceLoadTwoRegisters(instruction,
6869 out_loc,
6870 obj_loc,
6871 class_offset,
6872 maybe_temp_loc,
6873 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006874 DCHECK(locations->OnlyCallsOnSlowPath());
6875 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
6876 /* is_fatal */ false);
6877 codegen_->AddSlowPath(slow_path);
6878 __ Bne(out, cls, slow_path->GetEntryLabel());
6879 __ LoadConst32(out, 1);
6880 break;
6881 }
6882
6883 case TypeCheckKind::kUnresolvedCheck:
6884 case TypeCheckKind::kInterfaceCheck: {
6885 // Note that we indeed only call on slow path, but we always go
6886 // into the slow path for the unresolved and interface check
6887 // cases.
6888 //
6889 // We cannot directly call the InstanceofNonTrivial runtime
6890 // entry point without resorting to a type checking slow path
6891 // here (i.e. by calling InvokeRuntime directly), as it would
6892 // require to assign fixed registers for the inputs of this
6893 // HInstanceOf instruction (following the runtime calling
6894 // convention), which might be cluttered by the potential first
6895 // read barrier emission at the beginning of this method.
6896 //
6897 // TODO: Introduce a new runtime entry point taking the object
6898 // to test (instead of its class) as argument, and let it deal
6899 // with the read barrier issues. This will let us refactor this
6900 // case of the `switch` code as it was previously (with a direct
6901 // call to the runtime not using a type checking slow path).
6902 // This should also be beneficial for the other cases above.
6903 DCHECK(locations->OnlyCallsOnSlowPath());
6904 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
6905 /* is_fatal */ false);
6906 codegen_->AddSlowPath(slow_path);
6907 __ B(slow_path->GetEntryLabel());
6908 break;
6909 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006910 }
6911
6912 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006913
6914 if (slow_path != nullptr) {
6915 __ Bind(slow_path->GetExitLabel());
6916 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006917}
6918
6919void LocationsBuilderMIPS::VisitIntConstant(HIntConstant* constant) {
6920 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
6921 locations->SetOut(Location::ConstantLocation(constant));
6922}
6923
6924void InstructionCodeGeneratorMIPS::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
6925 // Will be generated at use site.
6926}
6927
6928void LocationsBuilderMIPS::VisitNullConstant(HNullConstant* constant) {
6929 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
6930 locations->SetOut(Location::ConstantLocation(constant));
6931}
6932
6933void InstructionCodeGeneratorMIPS::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
6934 // Will be generated at use site.
6935}
6936
6937void LocationsBuilderMIPS::HandleInvoke(HInvoke* invoke) {
6938 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
6939 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
6940}
6941
6942void LocationsBuilderMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
6943 HandleInvoke(invoke);
Alexey Frunze1b8464d2016-11-12 17:22:05 -08006944 // The register T7 is required to be used for the hidden argument in
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006945 // art_quick_imt_conflict_trampoline, so add the hidden argument.
Alexey Frunze1b8464d2016-11-12 17:22:05 -08006946 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T7));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006947}
6948
6949void InstructionCodeGeneratorMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
6950 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
6951 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006952 Location receiver = invoke->GetLocations()->InAt(0);
6953 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07006954 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006955
6956 // Set the hidden argument.
6957 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
6958 invoke->GetDexMethodIndex());
6959
6960 // temp = object->GetClass();
6961 if (receiver.IsStackSlot()) {
6962 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
6963 __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
6964 } else {
6965 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
6966 }
6967 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08006968 // Instead of simply (possibly) unpoisoning `temp` here, we should
6969 // emit a read barrier for the previous class reference load.
6970 // However this is not required in practice, as this is an
6971 // intermediate/temporary reference and because the current
6972 // concurrent copying collector keeps the from-space memory
6973 // intact/accessible until the end of the marking phase (the
6974 // concurrent copying collector may not in the future).
6975 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006976 __ LoadFromOffset(kLoadWord, temp, temp,
6977 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
6978 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006979 invoke->GetImtIndex(), kMipsPointerSize));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006980 // temp = temp->GetImtEntryAt(method_offset);
6981 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
6982 // T9 = temp->GetEntryPoint();
6983 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
6984 // T9();
6985 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07006986 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006987 DCHECK(!codegen_->IsLeafMethod());
6988 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
6989}
6990
6991void LocationsBuilderMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen701566a2015-10-27 15:29:13 -07006992 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
6993 if (intrinsic.TryDispatch(invoke)) {
6994 return;
6995 }
6996
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006997 HandleInvoke(invoke);
6998}
6999
7000void LocationsBuilderMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007001 // Explicit clinit checks triggered by static invokes must have been pruned by
7002 // art::PrepareForRegisterAllocation.
7003 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007004
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007005 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007006 bool has_extra_input = invoke->HasPcRelativeMethodLoadKind() && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007007
Chris Larsen701566a2015-10-27 15:29:13 -07007008 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
7009 if (intrinsic.TryDispatch(invoke)) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007010 if (invoke->GetLocations()->CanCall() && has_extra_input) {
7011 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
7012 }
Chris Larsen701566a2015-10-27 15:29:13 -07007013 return;
7014 }
7015
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007016 HandleInvoke(invoke);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007017
7018 // Add the extra input register if either the dex cache array base register
7019 // or the PC-relative base register for accessing literals is needed.
7020 if (has_extra_input) {
7021 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
7022 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007023}
7024
Orion Hodsonac141392017-01-13 11:53:47 +00007025void LocationsBuilderMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
7026 HandleInvoke(invoke);
7027}
7028
7029void InstructionCodeGeneratorMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
7030 codegen_->GenerateInvokePolymorphicCall(invoke);
7031}
7032
Chris Larsen701566a2015-10-27 15:29:13 -07007033static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007034 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen701566a2015-10-27 15:29:13 -07007035 IntrinsicCodeGeneratorMIPS intrinsic(codegen);
7036 intrinsic.Dispatch(invoke);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007037 return true;
7038 }
7039 return false;
7040}
7041
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007042HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind(
Alexey Frunze06a46c42016-07-19 15:00:40 -07007043 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007044 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007045 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007046 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
Vladimir Markoaad75c62016-10-03 08:46:48 +00007047 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007048 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007049 bool is_r6 = GetInstructionSetFeatures().IsR6();
7050 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007051 switch (desired_string_load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007052 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007053 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007054 DCHECK(!Runtime::Current()->UseJitCompilation());
Alexey Frunze06a46c42016-07-19 15:00:40 -07007055 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007056 case HLoadString::LoadKind::kBootImageAddress:
7057 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007058 case HLoadString::LoadKind::kJitTableAddress:
7059 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007060 fallback_load = false;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007061 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007062 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007063 fallback_load = false;
7064 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007065 }
7066 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007067 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007068 }
7069 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007070}
7071
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007072HLoadClass::LoadKind CodeGeneratorMIPS::GetSupportedLoadClassKind(
7073 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007074 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007075 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007076 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
7077 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007078 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007079 bool is_r6 = GetInstructionSetFeatures().IsR6();
7080 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007081 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007082 case HLoadClass::LoadKind::kInvalid:
7083 LOG(FATAL) << "UNREACHABLE";
7084 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007085 case HLoadClass::LoadKind::kReferrersClass:
7086 fallback_load = false;
7087 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007088 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007089 case HLoadClass::LoadKind::kBssEntry:
7090 DCHECK(!Runtime::Current()->UseJitCompilation());
7091 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007092 case HLoadClass::LoadKind::kBootImageAddress:
7093 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007094 case HLoadClass::LoadKind::kJitTableAddress:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007095 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007096 fallback_load = false;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007097 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007098 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007099 fallback_load = false;
7100 break;
7101 }
7102 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007103 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007104 }
7105 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007106}
7107
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007108Register CodeGeneratorMIPS::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
7109 Register temp) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007110 CHECK(!GetInstructionSetFeatures().IsR6());
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007111 CHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
7112 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
7113 if (!invoke->GetLocations()->Intrinsified()) {
7114 return location.AsRegister<Register>();
7115 }
7116 // For intrinsics we allow any location, so it may be on the stack.
7117 if (!location.IsRegister()) {
7118 __ LoadFromOffset(kLoadWord, temp, SP, location.GetStackIndex());
7119 return temp;
7120 }
7121 // For register locations, check if the register was saved. If so, get it from the stack.
7122 // Note: There is a chance that the register was saved but not overwritten, so we could
7123 // save one load. However, since this is just an intrinsic slow path we prefer this
7124 // simple and more robust approach rather that trying to determine if that's the case.
7125 SlowPathCode* slow_path = GetCurrentSlowPath();
7126 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
7127 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
7128 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
7129 __ LoadFromOffset(kLoadWord, temp, SP, stack_offset);
7130 return temp;
7131 }
7132 return location.AsRegister<Register>();
7133}
7134
Vladimir Markodc151b22015-10-15 18:02:30 +01007135HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS::GetSupportedInvokeStaticOrDirectDispatch(
7136 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01007137 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007138 HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007139 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007140 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007141 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
7142 // with irreducible loops.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007143 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007144 bool is_r6 = GetInstructionSetFeatures().IsR6();
7145 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007146 switch (dispatch_info.method_load_kind) {
Vladimir Marko65979462017-05-19 17:25:12 +01007147 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007148 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007149 break;
Vladimir Markodc151b22015-10-15 18:02:30 +01007150 default:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007151 fallback_load = false;
Vladimir Markodc151b22015-10-15 18:02:30 +01007152 break;
7153 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007154 if (fallback_load) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007155 dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007156 dispatch_info.method_load_data = 0;
7157 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007158 return dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01007159}
7160
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007161void CodeGeneratorMIPS::GenerateStaticOrDirectCall(
7162 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007163 // All registers are assumed to be correctly set up per the calling convention.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007164 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007165 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
7166 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007167 bool is_r6 = GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007168 Register base_reg = (invoke->HasPcRelativeMethodLoadKind() && !is_r6)
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007169 ? GetInvokeStaticOrDirectExtraParameter(invoke, temp.AsRegister<Register>())
7170 : ZERO;
7171
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007172 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007173 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007174 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007175 uint32_t offset =
7176 GetThreadOffset<kMipsPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007177 __ LoadFromOffset(kLoadWord,
7178 temp.AsRegister<Register>(),
7179 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007180 offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007181 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007182 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007183 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00007184 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007185 break;
Vladimir Marko65979462017-05-19 17:25:12 +01007186 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
7187 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007188 PcRelativePatchInfo* info_high = NewPcRelativeMethodPatch(invoke->GetTargetMethod());
7189 PcRelativePatchInfo* info_low =
7190 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
Vladimir Marko65979462017-05-19 17:25:12 +01007191 bool reordering = __ SetReorder(false);
7192 Register temp_reg = temp.AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007193 EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base_reg, info_low);
Alexey Frunze6079dca2017-05-28 19:10:28 -07007194 __ Addiu(temp_reg, TMP, /* placeholder */ 0x5678);
Vladimir Marko65979462017-05-19 17:25:12 +01007195 __ SetReorder(reordering);
7196 break;
7197 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007198 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
7199 __ LoadConst32(temp.AsRegister<Register>(), invoke->GetMethodAddress());
7200 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007201 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007202 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007203 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007204 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
7205 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007206 Register temp_reg = temp.AsRegister<Register>();
7207 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007208 EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base_reg, info_low);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007209 __ Lw(temp_reg, TMP, /* placeholder */ 0x5678);
7210 __ SetReorder(reordering);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007211 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007212 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007213 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
7214 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
7215 return; // No code pointer retrieval; the runtime performs the call directly.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007216 }
7217 }
7218
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007219 switch (code_ptr_location) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007220 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007221 __ Bal(&frame_entry_label_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007222 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007223 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
7224 // T9 = callee_method->entry_point_from_quick_compiled_code_;
Goran Jakovljevic1a878372015-10-26 14:28:52 +01007225 __ LoadFromOffset(kLoadWord,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007226 T9,
7227 callee_method.AsRegister<Register>(),
7228 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07007229 kMipsPointerSize).Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007230 // T9()
7231 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007232 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007233 break;
7234 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007235 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
7236
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007237 DCHECK(!IsLeafMethod());
7238}
7239
7240void InstructionCodeGeneratorMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007241 // Explicit clinit checks triggered by static invokes must have been pruned by
7242 // art::PrepareForRegisterAllocation.
7243 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007244
7245 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7246 return;
7247 }
7248
7249 LocationSummary* locations = invoke->GetLocations();
7250 codegen_->GenerateStaticOrDirectCall(invoke,
7251 locations->HasTemps()
7252 ? locations->GetTemp(0)
7253 : Location::NoLocation());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007254}
7255
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007256void CodeGeneratorMIPS::GenerateVirtualCall(
7257 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Goran Jakovljevice919b072016-10-04 10:17:34 +02007258 // Use the calling convention instead of the location of the receiver, as
7259 // intrinsics may have put the receiver in a different register. In the intrinsics
7260 // slow path, the arguments have been moved to the right place, so here we are
7261 // guaranteed that the receiver is the first register of the calling convention.
7262 InvokeDexCallingConvention calling_convention;
7263 Register receiver = calling_convention.GetRegisterAt(0);
7264
Chris Larsen3acee732015-11-18 13:31:08 -08007265 Register temp = temp_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007266 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7267 invoke->GetVTableIndex(), kMipsPointerSize).SizeValue();
7268 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07007269 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007270
7271 // temp = object->GetClass();
Goran Jakovljevice919b072016-10-04 10:17:34 +02007272 __ LoadFromOffset(kLoadWord, temp, receiver, class_offset);
Chris Larsen3acee732015-11-18 13:31:08 -08007273 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08007274 // Instead of simply (possibly) unpoisoning `temp` here, we should
7275 // emit a read barrier for the previous class reference load.
7276 // However this is not required in practice, as this is an
7277 // intermediate/temporary reference and because the current
7278 // concurrent copying collector keeps the from-space memory
7279 // intact/accessible until the end of the marking phase (the
7280 // concurrent copying collector may not in the future).
7281 __ MaybeUnpoisonHeapReference(temp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007282 // temp = temp->GetMethodAt(method_offset);
7283 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
7284 // T9 = temp->GetEntryPoint();
7285 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
7286 // T9();
7287 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007288 __ NopIfNoReordering();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007289 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Chris Larsen3acee732015-11-18 13:31:08 -08007290}
7291
7292void InstructionCodeGeneratorMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
7293 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7294 return;
7295 }
7296
7297 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007298 DCHECK(!codegen_->IsLeafMethod());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007299}
7300
7301void LocationsBuilderMIPS::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00007302 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007303 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007304 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007305 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
7306 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007307 return;
7308 }
Vladimir Marko41559982017-01-06 14:04:23 +00007309 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007310 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze15958152017-02-09 19:08:30 -08007311 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
7312 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunze06a46c42016-07-19 15:00:40 -07007313 ? LocationSummary::kCallOnSlowPath
7314 : LocationSummary::kNoCall;
7315 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007316 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
7317 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7318 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007319 switch (load_kind) {
7320 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007321 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007322 case HLoadClass::LoadKind::kBootImageAddress:
7323 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007324 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007325 break;
7326 }
7327 FALLTHROUGH_INTENDED;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007328 case HLoadClass::LoadKind::kReferrersClass:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007329 locations->SetInAt(0, Location::RequiresRegister());
7330 break;
7331 default:
7332 break;
7333 }
7334 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007335 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
7336 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7337 // Rely on the type resolution or initialization and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007338 // Request a temp to hold the BSS entry location for the slow path.
7339 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007340 RegisterSet caller_saves = RegisterSet::Empty();
7341 InvokeRuntimeCallingConvention calling_convention;
7342 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7343 locations->SetCustomSlowPathCallerSaves(caller_saves);
7344 } else {
7345 // For non-Baker read barriers we have a temp-clobbering call.
7346 }
7347 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007348}
7349
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007350// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7351// move.
7352void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00007353 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007354 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00007355 codegen_->GenerateLoadClassRuntimeCall(cls);
Pavle Batutae87a7182015-10-28 13:10:42 +01007356 return;
7357 }
Vladimir Marko41559982017-01-06 14:04:23 +00007358 DCHECK(!cls->NeedsAccessCheck());
Pavle Batutae87a7182015-10-28 13:10:42 +01007359
Vladimir Marko41559982017-01-06 14:04:23 +00007360 LocationSummary* locations = cls->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007361 Location out_loc = locations->Out();
7362 Register out = out_loc.AsRegister<Register>();
7363 Register base_or_current_method_reg;
7364 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7365 switch (load_kind) {
7366 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007367 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007368 case HLoadClass::LoadKind::kBootImageAddress:
7369 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007370 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7371 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007372 case HLoadClass::LoadKind::kReferrersClass:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007373 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007374 base_or_current_method_reg = locations->InAt(0).AsRegister<Register>();
7375 break;
7376 default:
7377 base_or_current_method_reg = ZERO;
7378 break;
7379 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00007380
Alexey Frunze15958152017-02-09 19:08:30 -08007381 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
7382 ? kWithoutReadBarrier
7383 : kCompilerReadBarrierOption;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007384 bool generate_null_check = false;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007385 CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high = nullptr;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007386 switch (load_kind) {
7387 case HLoadClass::LoadKind::kReferrersClass: {
7388 DCHECK(!cls->CanCallRuntime());
7389 DCHECK(!cls->MustGenerateClinitCheck());
7390 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7391 GenerateGcRootFieldLoad(cls,
7392 out_loc,
7393 base_or_current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08007394 ArtMethod::DeclaringClassOffset().Int32Value(),
7395 read_barrier_option);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007396 break;
7397 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007398 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007399 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08007400 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007401 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Alexey Frunze06a46c42016-07-19 15:00:40 -07007402 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007403 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7404 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007405 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007406 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7407 out,
7408 base_or_current_method_reg,
7409 info_low);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007410 __ Addiu(out, out, /* placeholder */ 0x5678);
7411 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007412 break;
7413 }
7414 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08007415 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007416 uint32_t address = dchecked_integral_cast<uint32_t>(
7417 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
7418 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007419 __ LoadLiteral(out,
7420 base_or_current_method_reg,
7421 codegen_->DeduplicateBootImageAddressLiteral(address));
7422 break;
7423 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007424 case HLoadClass::LoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007425 bss_info_high = codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
7426 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7427 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007428 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007429 Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
7430 bool reordering = __ SetReorder(false);
7431 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high,
7432 temp,
7433 base_or_current_method_reg,
7434 info_low);
7435 GenerateGcRootFieldLoad(cls, out_loc, temp, /* placeholder */ 0x5678, read_barrier_option);
7436 __ SetReorder(reordering);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007437 generate_null_check = true;
7438 break;
7439 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007440 case HLoadClass::LoadKind::kJitTableAddress: {
Alexey Frunze627c1a02017-01-30 19:28:14 -08007441 CodeGeneratorMIPS::JitPatchInfo* info = codegen_->NewJitRootClassPatch(cls->GetDexFile(),
7442 cls->GetTypeIndex(),
7443 cls->GetClass());
7444 bool reordering = __ SetReorder(false);
7445 __ Bind(&info->high_label);
7446 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze15958152017-02-09 19:08:30 -08007447 GenerateGcRootFieldLoad(cls, out_loc, out, /* placeholder */ 0x5678, read_barrier_option);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007448 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007449 break;
7450 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007451 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007452 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00007453 LOG(FATAL) << "UNREACHABLE";
7454 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007455 }
7456
7457 if (generate_null_check || cls->MustGenerateClinitCheck()) {
7458 DCHECK(cls->CanCallRuntime());
7459 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007460 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck(), bss_info_high);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007461 codegen_->AddSlowPath(slow_path);
7462 if (generate_null_check) {
7463 __ Beqz(out, slow_path->GetEntryLabel());
7464 }
7465 if (cls->MustGenerateClinitCheck()) {
7466 GenerateClassInitializationCheck(slow_path, out);
7467 } else {
7468 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007469 }
7470 }
7471}
7472
7473static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07007474 return Thread::ExceptionOffset<kMipsPointerSize>().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007475}
7476
7477void LocationsBuilderMIPS::VisitLoadException(HLoadException* load) {
7478 LocationSummary* locations =
7479 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
7480 locations->SetOut(Location::RequiresRegister());
7481}
7482
7483void InstructionCodeGeneratorMIPS::VisitLoadException(HLoadException* load) {
7484 Register out = load->GetLocations()->Out().AsRegister<Register>();
7485 __ LoadFromOffset(kLoadWord, out, TR, GetExceptionTlsOffset());
7486}
7487
7488void LocationsBuilderMIPS::VisitClearException(HClearException* clear) {
7489 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
7490}
7491
7492void InstructionCodeGeneratorMIPS::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
7493 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
7494}
7495
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007496void LocationsBuilderMIPS::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08007497 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007498 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007499 HLoadString::LoadKind load_kind = load->GetLoadKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07007500 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007501 switch (load_kind) {
7502 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007503 case HLoadString::LoadKind::kBootImageAddress:
7504 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007505 case HLoadString::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007506 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007507 break;
7508 }
7509 FALLTHROUGH_INTENDED;
7510 // We need an extra register for PC-relative dex cache accesses.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007511 case HLoadString::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007512 locations->SetInAt(0, Location::RequiresRegister());
7513 break;
7514 default:
7515 break;
7516 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007517 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzebb51df82016-11-01 16:07:32 -07007518 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007519 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzebb51df82016-11-01 16:07:32 -07007520 } else {
7521 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007522 if (load_kind == HLoadString::LoadKind::kBssEntry) {
7523 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7524 // Rely on the pResolveString and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007525 // Request a temp to hold the BSS entry location for the slow path.
7526 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007527 RegisterSet caller_saves = RegisterSet::Empty();
7528 InvokeRuntimeCallingConvention calling_convention;
7529 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7530 locations->SetCustomSlowPathCallerSaves(caller_saves);
7531 } else {
7532 // For non-Baker read barriers we have a temp-clobbering call.
7533 }
7534 }
Alexey Frunzebb51df82016-11-01 16:07:32 -07007535 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007536}
7537
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007538// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7539// move.
7540void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007541 HLoadString::LoadKind load_kind = load->GetLoadKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007542 LocationSummary* locations = load->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007543 Location out_loc = locations->Out();
7544 Register out = out_loc.AsRegister<Register>();
7545 Register base_or_current_method_reg;
7546 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7547 switch (load_kind) {
7548 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007549 case HLoadString::LoadKind::kBootImageAddress:
7550 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007551 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007552 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7553 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007554 default:
7555 base_or_current_method_reg = ZERO;
7556 break;
7557 }
7558
7559 switch (load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007560 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00007561 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007562 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007563 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007564 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7565 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007566 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007567 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7568 out,
7569 base_or_current_method_reg,
7570 info_low);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007571 __ Addiu(out, out, /* placeholder */ 0x5678);
7572 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007573 return; // No dex cache slow path.
7574 }
7575 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007576 uint32_t address = dchecked_integral_cast<uint32_t>(
7577 reinterpret_cast<uintptr_t>(load->GetString().Get()));
7578 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007579 __ LoadLiteral(out,
7580 base_or_current_method_reg,
7581 codegen_->DeduplicateBootImageAddressLiteral(address));
7582 return; // No dex cache slow path.
7583 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00007584 case HLoadString::LoadKind::kBssEntry: {
7585 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007586 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007587 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007588 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7589 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007590 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007591 Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
7592 bool reordering = __ SetReorder(false);
7593 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7594 temp,
7595 base_or_current_method_reg,
7596 info_low);
7597 GenerateGcRootFieldLoad(load,
7598 out_loc,
7599 temp,
7600 /* placeholder */ 0x5678,
7601 kCompilerReadBarrierOption);
7602 __ SetReorder(reordering);
7603 SlowPathCodeMIPS* slow_path =
7604 new (GetGraph()->GetArena()) LoadStringSlowPathMIPS(load, info_high);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007605 codegen_->AddSlowPath(slow_path);
7606 __ Beqz(out, slow_path->GetEntryLabel());
7607 __ Bind(slow_path->GetExitLabel());
7608 return;
7609 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08007610 case HLoadString::LoadKind::kJitTableAddress: {
7611 CodeGeneratorMIPS::JitPatchInfo* info =
7612 codegen_->NewJitRootStringPatch(load->GetDexFile(),
7613 load->GetStringIndex(),
7614 load->GetString());
7615 bool reordering = __ SetReorder(false);
7616 __ Bind(&info->high_label);
7617 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze15958152017-02-09 19:08:30 -08007618 GenerateGcRootFieldLoad(load,
7619 out_loc,
7620 out,
7621 /* placeholder */ 0x5678,
7622 kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007623 __ SetReorder(reordering);
7624 return;
7625 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007626 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007627 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007628 }
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007629
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007630 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007631 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007632 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007633 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007634 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007635 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
7636 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007637}
7638
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007639void LocationsBuilderMIPS::VisitLongConstant(HLongConstant* constant) {
7640 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7641 locations->SetOut(Location::ConstantLocation(constant));
7642}
7643
7644void InstructionCodeGeneratorMIPS::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
7645 // Will be generated at use site.
7646}
7647
7648void LocationsBuilderMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7649 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007650 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007651 InvokeRuntimeCallingConvention calling_convention;
7652 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7653}
7654
7655void InstructionCodeGeneratorMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7656 if (instruction->IsEnter()) {
Serban Constantinescufca16662016-07-14 09:21:59 +01007657 codegen_->InvokeRuntime(kQuickLockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007658 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7659 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01007660 codegen_->InvokeRuntime(kQuickUnlockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007661 }
7662 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
7663}
7664
7665void LocationsBuilderMIPS::VisitMul(HMul* mul) {
7666 LocationSummary* locations =
7667 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
7668 switch (mul->GetResultType()) {
7669 case Primitive::kPrimInt:
7670 case Primitive::kPrimLong:
7671 locations->SetInAt(0, Location::RequiresRegister());
7672 locations->SetInAt(1, Location::RequiresRegister());
7673 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7674 break;
7675
7676 case Primitive::kPrimFloat:
7677 case Primitive::kPrimDouble:
7678 locations->SetInAt(0, Location::RequiresFpuRegister());
7679 locations->SetInAt(1, Location::RequiresFpuRegister());
7680 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
7681 break;
7682
7683 default:
7684 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
7685 }
7686}
7687
7688void InstructionCodeGeneratorMIPS::VisitMul(HMul* instruction) {
7689 Primitive::Type type = instruction->GetType();
7690 LocationSummary* locations = instruction->GetLocations();
7691 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7692
7693 switch (type) {
7694 case Primitive::kPrimInt: {
7695 Register dst = locations->Out().AsRegister<Register>();
7696 Register lhs = locations->InAt(0).AsRegister<Register>();
7697 Register rhs = locations->InAt(1).AsRegister<Register>();
7698
7699 if (isR6) {
7700 __ MulR6(dst, lhs, rhs);
7701 } else {
7702 __ MulR2(dst, lhs, rhs);
7703 }
7704 break;
7705 }
7706 case Primitive::kPrimLong: {
7707 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
7708 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
7709 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
7710 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
7711 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
7712 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
7713
7714 // Extra checks to protect caused by the existance of A1_A2.
7715 // The algorithm is wrong if dst_high is either lhs_lo or rhs_lo:
7716 // (e.g. lhs=a0_a1, rhs=a2_a3 and dst=a1_a2).
7717 DCHECK_NE(dst_high, lhs_low);
7718 DCHECK_NE(dst_high, rhs_low);
7719
7720 // A_B * C_D
7721 // dst_hi: [ low(A*D) + low(B*C) + hi(B*D) ]
7722 // dst_lo: [ low(B*D) ]
7723 // Note: R2 and R6 MUL produce the low 32 bit of the multiplication result.
7724
7725 if (isR6) {
7726 __ MulR6(TMP, lhs_high, rhs_low);
7727 __ MulR6(dst_high, lhs_low, rhs_high);
7728 __ Addu(dst_high, dst_high, TMP);
7729 __ MuhuR6(TMP, lhs_low, rhs_low);
7730 __ Addu(dst_high, dst_high, TMP);
7731 __ MulR6(dst_low, lhs_low, rhs_low);
7732 } else {
7733 __ MulR2(TMP, lhs_high, rhs_low);
7734 __ MulR2(dst_high, lhs_low, rhs_high);
7735 __ Addu(dst_high, dst_high, TMP);
7736 __ MultuR2(lhs_low, rhs_low);
7737 __ Mfhi(TMP);
7738 __ Addu(dst_high, dst_high, TMP);
7739 __ Mflo(dst_low);
7740 }
7741 break;
7742 }
7743 case Primitive::kPrimFloat:
7744 case Primitive::kPrimDouble: {
7745 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
7746 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
7747 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
7748 if (type == Primitive::kPrimFloat) {
7749 __ MulS(dst, lhs, rhs);
7750 } else {
7751 __ MulD(dst, lhs, rhs);
7752 }
7753 break;
7754 }
7755 default:
7756 LOG(FATAL) << "Unexpected mul type " << type;
7757 }
7758}
7759
7760void LocationsBuilderMIPS::VisitNeg(HNeg* neg) {
7761 LocationSummary* locations =
7762 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
7763 switch (neg->GetResultType()) {
7764 case Primitive::kPrimInt:
7765 case Primitive::kPrimLong:
7766 locations->SetInAt(0, Location::RequiresRegister());
7767 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7768 break;
7769
7770 case Primitive::kPrimFloat:
7771 case Primitive::kPrimDouble:
7772 locations->SetInAt(0, Location::RequiresFpuRegister());
7773 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
7774 break;
7775
7776 default:
7777 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
7778 }
7779}
7780
7781void InstructionCodeGeneratorMIPS::VisitNeg(HNeg* instruction) {
7782 Primitive::Type type = instruction->GetType();
7783 LocationSummary* locations = instruction->GetLocations();
7784
7785 switch (type) {
7786 case Primitive::kPrimInt: {
7787 Register dst = locations->Out().AsRegister<Register>();
7788 Register src = locations->InAt(0).AsRegister<Register>();
7789 __ Subu(dst, ZERO, src);
7790 break;
7791 }
7792 case Primitive::kPrimLong: {
7793 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
7794 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
7795 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
7796 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
7797 __ Subu(dst_low, ZERO, src_low);
7798 __ Sltu(TMP, ZERO, dst_low);
7799 __ Subu(dst_high, ZERO, src_high);
7800 __ Subu(dst_high, dst_high, TMP);
7801 break;
7802 }
7803 case Primitive::kPrimFloat:
7804 case Primitive::kPrimDouble: {
7805 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
7806 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
7807 if (type == Primitive::kPrimFloat) {
7808 __ NegS(dst, src);
7809 } else {
7810 __ NegD(dst, src);
7811 }
7812 break;
7813 }
7814 default:
7815 LOG(FATAL) << "Unexpected neg type " << type;
7816 }
7817}
7818
7819void LocationsBuilderMIPS::VisitNewArray(HNewArray* instruction) {
7820 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007821 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007822 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007823 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00007824 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7825 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007826}
7827
7828void InstructionCodeGeneratorMIPS::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08007829 // Note: if heap poisoning is enabled, the entry point takes care
7830 // of poisoning the reference.
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00007831 codegen_->InvokeRuntime(kQuickAllocArrayResolved, instruction, instruction->GetDexPc());
7832 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007833}
7834
7835void LocationsBuilderMIPS::VisitNewInstance(HNewInstance* instruction) {
7836 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007837 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007838 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00007839 if (instruction->IsStringAlloc()) {
7840 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
7841 } else {
7842 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00007843 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007844 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
7845}
7846
7847void InstructionCodeGeneratorMIPS::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08007848 // Note: if heap poisoning is enabled, the entry point takes care
7849 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00007850 if (instruction->IsStringAlloc()) {
7851 // String is allocated through StringFactory. Call NewEmptyString entry point.
7852 Register temp = instruction->GetLocations()->GetTemp(0).AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07007853 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00007854 __ LoadFromOffset(kLoadWord, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
7855 __ LoadFromOffset(kLoadWord, T9, temp, code_offset.Int32Value());
7856 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007857 __ NopIfNoReordering();
David Brazdil6de19382016-01-08 17:37:10 +00007858 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
7859 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01007860 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00007861 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00007862 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007863}
7864
7865void LocationsBuilderMIPS::VisitNot(HNot* instruction) {
7866 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
7867 locations->SetInAt(0, Location::RequiresRegister());
7868 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7869}
7870
7871void InstructionCodeGeneratorMIPS::VisitNot(HNot* instruction) {
7872 Primitive::Type type = instruction->GetType();
7873 LocationSummary* locations = instruction->GetLocations();
7874
7875 switch (type) {
7876 case Primitive::kPrimInt: {
7877 Register dst = locations->Out().AsRegister<Register>();
7878 Register src = locations->InAt(0).AsRegister<Register>();
7879 __ Nor(dst, src, ZERO);
7880 break;
7881 }
7882
7883 case Primitive::kPrimLong: {
7884 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
7885 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
7886 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
7887 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
7888 __ Nor(dst_high, src_high, ZERO);
7889 __ Nor(dst_low, src_low, ZERO);
7890 break;
7891 }
7892
7893 default:
7894 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
7895 }
7896}
7897
7898void LocationsBuilderMIPS::VisitBooleanNot(HBooleanNot* instruction) {
7899 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
7900 locations->SetInAt(0, Location::RequiresRegister());
7901 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7902}
7903
7904void InstructionCodeGeneratorMIPS::VisitBooleanNot(HBooleanNot* instruction) {
7905 LocationSummary* locations = instruction->GetLocations();
7906 __ Xori(locations->Out().AsRegister<Register>(),
7907 locations->InAt(0).AsRegister<Register>(),
7908 1);
7909}
7910
7911void LocationsBuilderMIPS::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01007912 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
7913 locations->SetInAt(0, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007914}
7915
Calin Juravle2ae48182016-03-16 14:05:09 +00007916void CodeGeneratorMIPS::GenerateImplicitNullCheck(HNullCheck* instruction) {
7917 if (CanMoveNullCheckToUser(instruction)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007918 return;
7919 }
7920 Location obj = instruction->GetLocations()->InAt(0);
7921
7922 __ Lw(ZERO, obj.AsRegister<Register>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00007923 RecordPcInfo(instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007924}
7925
Calin Juravle2ae48182016-03-16 14:05:09 +00007926void CodeGeneratorMIPS::GenerateExplicitNullCheck(HNullCheck* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007927 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00007928 AddSlowPath(slow_path);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007929
7930 Location obj = instruction->GetLocations()->InAt(0);
7931
7932 __ Beqz(obj.AsRegister<Register>(), slow_path->GetEntryLabel());
7933}
7934
7935void InstructionCodeGeneratorMIPS::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00007936 codegen_->GenerateNullCheck(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007937}
7938
7939void LocationsBuilderMIPS::VisitOr(HOr* instruction) {
7940 HandleBinaryOp(instruction);
7941}
7942
7943void InstructionCodeGeneratorMIPS::VisitOr(HOr* instruction) {
7944 HandleBinaryOp(instruction);
7945}
7946
7947void LocationsBuilderMIPS::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
7948 LOG(FATAL) << "Unreachable";
7949}
7950
7951void InstructionCodeGeneratorMIPS::VisitParallelMove(HParallelMove* instruction) {
7952 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
7953}
7954
7955void LocationsBuilderMIPS::VisitParameterValue(HParameterValue* instruction) {
7956 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
7957 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
7958 if (location.IsStackSlot()) {
7959 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
7960 } else if (location.IsDoubleStackSlot()) {
7961 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
7962 }
7963 locations->SetOut(location);
7964}
7965
7966void InstructionCodeGeneratorMIPS::VisitParameterValue(HParameterValue* instruction
7967 ATTRIBUTE_UNUSED) {
7968 // Nothing to do, the parameter is already at its location.
7969}
7970
7971void LocationsBuilderMIPS::VisitCurrentMethod(HCurrentMethod* instruction) {
7972 LocationSummary* locations =
7973 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
7974 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
7975}
7976
7977void InstructionCodeGeneratorMIPS::VisitCurrentMethod(HCurrentMethod* instruction
7978 ATTRIBUTE_UNUSED) {
7979 // Nothing to do, the method is already at its location.
7980}
7981
7982void LocationsBuilderMIPS::VisitPhi(HPhi* instruction) {
7983 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01007984 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007985 locations->SetInAt(i, Location::Any());
7986 }
7987 locations->SetOut(Location::Any());
7988}
7989
7990void InstructionCodeGeneratorMIPS::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
7991 LOG(FATAL) << "Unreachable";
7992}
7993
7994void LocationsBuilderMIPS::VisitRem(HRem* rem) {
7995 Primitive::Type type = rem->GetResultType();
7996 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007997 (type == Primitive::kPrimInt) ? LocationSummary::kNoCall : LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007998 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
7999
8000 switch (type) {
8001 case Primitive::kPrimInt:
8002 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08008003 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008004 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8005 break;
8006
8007 case Primitive::kPrimLong: {
8008 InvokeRuntimeCallingConvention calling_convention;
8009 locations->SetInAt(0, Location::RegisterPairLocation(
8010 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8011 locations->SetInAt(1, Location::RegisterPairLocation(
8012 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
8013 locations->SetOut(calling_convention.GetReturnLocation(type));
8014 break;
8015 }
8016
8017 case Primitive::kPrimFloat:
8018 case Primitive::kPrimDouble: {
8019 InvokeRuntimeCallingConvention calling_convention;
8020 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8021 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
8022 locations->SetOut(calling_convention.GetReturnLocation(type));
8023 break;
8024 }
8025
8026 default:
8027 LOG(FATAL) << "Unexpected rem type " << type;
8028 }
8029}
8030
8031void InstructionCodeGeneratorMIPS::VisitRem(HRem* instruction) {
8032 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008033
8034 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08008035 case Primitive::kPrimInt:
8036 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008037 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008038 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008039 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008040 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
8041 break;
8042 }
8043 case Primitive::kPrimFloat: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008044 codegen_->InvokeRuntime(kQuickFmodf, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008045 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008046 break;
8047 }
8048 case Primitive::kPrimDouble: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008049 codegen_->InvokeRuntime(kQuickFmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008050 CheckEntrypointTypes<kQuickFmod, double, double, double>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008051 break;
8052 }
8053 default:
8054 LOG(FATAL) << "Unexpected rem type " << type;
8055 }
8056}
8057
Igor Murashkind01745e2017-04-05 16:40:31 -07008058void LocationsBuilderMIPS::VisitConstructorFence(HConstructorFence* constructor_fence) {
8059 constructor_fence->SetLocations(nullptr);
8060}
8061
8062void InstructionCodeGeneratorMIPS::VisitConstructorFence(
8063 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
8064 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
8065}
8066
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008067void LocationsBuilderMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8068 memory_barrier->SetLocations(nullptr);
8069}
8070
8071void InstructionCodeGeneratorMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8072 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
8073}
8074
8075void LocationsBuilderMIPS::VisitReturn(HReturn* ret) {
8076 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
8077 Primitive::Type return_type = ret->InputAt(0)->GetType();
8078 locations->SetInAt(0, MipsReturnLocation(return_type));
8079}
8080
8081void InstructionCodeGeneratorMIPS::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
8082 codegen_->GenerateFrameExit();
8083}
8084
8085void LocationsBuilderMIPS::VisitReturnVoid(HReturnVoid* ret) {
8086 ret->SetLocations(nullptr);
8087}
8088
8089void InstructionCodeGeneratorMIPS::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
8090 codegen_->GenerateFrameExit();
8091}
8092
Alexey Frunze92d90602015-12-18 18:16:36 -08008093void LocationsBuilderMIPS::VisitRor(HRor* ror) {
8094 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008095}
8096
Alexey Frunze92d90602015-12-18 18:16:36 -08008097void InstructionCodeGeneratorMIPS::VisitRor(HRor* ror) {
8098 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008099}
8100
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008101void LocationsBuilderMIPS::VisitShl(HShl* shl) {
8102 HandleShift(shl);
8103}
8104
8105void InstructionCodeGeneratorMIPS::VisitShl(HShl* shl) {
8106 HandleShift(shl);
8107}
8108
8109void LocationsBuilderMIPS::VisitShr(HShr* shr) {
8110 HandleShift(shr);
8111}
8112
8113void InstructionCodeGeneratorMIPS::VisitShr(HShr* shr) {
8114 HandleShift(shr);
8115}
8116
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008117void LocationsBuilderMIPS::VisitSub(HSub* instruction) {
8118 HandleBinaryOp(instruction);
8119}
8120
8121void InstructionCodeGeneratorMIPS::VisitSub(HSub* instruction) {
8122 HandleBinaryOp(instruction);
8123}
8124
8125void LocationsBuilderMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8126 HandleFieldGet(instruction, instruction->GetFieldInfo());
8127}
8128
8129void InstructionCodeGeneratorMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8130 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
8131}
8132
8133void LocationsBuilderMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
8134 HandleFieldSet(instruction, instruction->GetFieldInfo());
8135}
8136
8137void InstructionCodeGeneratorMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01008138 HandleFieldSet(instruction,
8139 instruction->GetFieldInfo(),
8140 instruction->GetDexPc(),
8141 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008142}
8143
8144void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldGet(
8145 HUnresolvedInstanceFieldGet* instruction) {
8146 FieldAccessCallingConventionMIPS calling_convention;
8147 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8148 instruction->GetFieldType(),
8149 calling_convention);
8150}
8151
8152void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldGet(
8153 HUnresolvedInstanceFieldGet* instruction) {
8154 FieldAccessCallingConventionMIPS calling_convention;
8155 codegen_->GenerateUnresolvedFieldAccess(instruction,
8156 instruction->GetFieldType(),
8157 instruction->GetFieldIndex(),
8158 instruction->GetDexPc(),
8159 calling_convention);
8160}
8161
8162void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldSet(
8163 HUnresolvedInstanceFieldSet* instruction) {
8164 FieldAccessCallingConventionMIPS calling_convention;
8165 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8166 instruction->GetFieldType(),
8167 calling_convention);
8168}
8169
8170void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldSet(
8171 HUnresolvedInstanceFieldSet* instruction) {
8172 FieldAccessCallingConventionMIPS calling_convention;
8173 codegen_->GenerateUnresolvedFieldAccess(instruction,
8174 instruction->GetFieldType(),
8175 instruction->GetFieldIndex(),
8176 instruction->GetDexPc(),
8177 calling_convention);
8178}
8179
8180void LocationsBuilderMIPS::VisitUnresolvedStaticFieldGet(
8181 HUnresolvedStaticFieldGet* instruction) {
8182 FieldAccessCallingConventionMIPS calling_convention;
8183 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8184 instruction->GetFieldType(),
8185 calling_convention);
8186}
8187
8188void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldGet(
8189 HUnresolvedStaticFieldGet* instruction) {
8190 FieldAccessCallingConventionMIPS calling_convention;
8191 codegen_->GenerateUnresolvedFieldAccess(instruction,
8192 instruction->GetFieldType(),
8193 instruction->GetFieldIndex(),
8194 instruction->GetDexPc(),
8195 calling_convention);
8196}
8197
8198void LocationsBuilderMIPS::VisitUnresolvedStaticFieldSet(
8199 HUnresolvedStaticFieldSet* instruction) {
8200 FieldAccessCallingConventionMIPS calling_convention;
8201 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8202 instruction->GetFieldType(),
8203 calling_convention);
8204}
8205
8206void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldSet(
8207 HUnresolvedStaticFieldSet* instruction) {
8208 FieldAccessCallingConventionMIPS calling_convention;
8209 codegen_->GenerateUnresolvedFieldAccess(instruction,
8210 instruction->GetFieldType(),
8211 instruction->GetFieldIndex(),
8212 instruction->GetDexPc(),
8213 calling_convention);
8214}
8215
8216void LocationsBuilderMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01008217 LocationSummary* locations =
8218 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01008219 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008220}
8221
8222void InstructionCodeGeneratorMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
8223 HBasicBlock* block = instruction->GetBlock();
8224 if (block->GetLoopInformation() != nullptr) {
8225 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
8226 // The back edge will generate the suspend check.
8227 return;
8228 }
8229 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
8230 // The goto will generate the suspend check.
8231 return;
8232 }
8233 GenerateSuspendCheck(instruction, nullptr);
8234}
8235
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008236void LocationsBuilderMIPS::VisitThrow(HThrow* instruction) {
8237 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008238 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008239 InvokeRuntimeCallingConvention calling_convention;
8240 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8241}
8242
8243void InstructionCodeGeneratorMIPS::VisitThrow(HThrow* instruction) {
Serban Constantinescufca16662016-07-14 09:21:59 +01008244 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008245 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
8246}
8247
8248void LocationsBuilderMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8249 Primitive::Type input_type = conversion->GetInputType();
8250 Primitive::Type result_type = conversion->GetResultType();
8251 DCHECK_NE(input_type, result_type);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008252 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008253
8254 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
8255 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
8256 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
8257 }
8258
8259 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008260 if (!isR6 &&
8261 ((Primitive::IsFloatingPointType(result_type) && input_type == Primitive::kPrimLong) ||
8262 (result_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(input_type)))) {
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008263 call_kind = LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008264 }
8265
8266 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
8267
8268 if (call_kind == LocationSummary::kNoCall) {
8269 if (Primitive::IsFloatingPointType(input_type)) {
8270 locations->SetInAt(0, Location::RequiresFpuRegister());
8271 } else {
8272 locations->SetInAt(0, Location::RequiresRegister());
8273 }
8274
8275 if (Primitive::IsFloatingPointType(result_type)) {
8276 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8277 } else {
8278 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8279 }
8280 } else {
8281 InvokeRuntimeCallingConvention calling_convention;
8282
8283 if (Primitive::IsFloatingPointType(input_type)) {
8284 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8285 } else {
8286 DCHECK_EQ(input_type, Primitive::kPrimLong);
8287 locations->SetInAt(0, Location::RegisterPairLocation(
8288 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8289 }
8290
8291 locations->SetOut(calling_convention.GetReturnLocation(result_type));
8292 }
8293}
8294
8295void InstructionCodeGeneratorMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8296 LocationSummary* locations = conversion->GetLocations();
8297 Primitive::Type result_type = conversion->GetResultType();
8298 Primitive::Type input_type = conversion->GetInputType();
8299 bool has_sign_extension = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008300 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008301
8302 DCHECK_NE(input_type, result_type);
8303
8304 if (result_type == Primitive::kPrimLong && Primitive::IsIntegralType(input_type)) {
8305 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8306 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8307 Register src = locations->InAt(0).AsRegister<Register>();
8308
Alexey Frunzea871ef12016-06-27 15:20:11 -07008309 if (dst_low != src) {
8310 __ Move(dst_low, src);
8311 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008312 __ Sra(dst_high, src, 31);
8313 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
8314 Register dst = locations->Out().AsRegister<Register>();
8315 Register src = (input_type == Primitive::kPrimLong)
8316 ? locations->InAt(0).AsRegisterPairLow<Register>()
8317 : locations->InAt(0).AsRegister<Register>();
8318
8319 switch (result_type) {
8320 case Primitive::kPrimChar:
8321 __ Andi(dst, src, 0xFFFF);
8322 break;
8323 case Primitive::kPrimByte:
8324 if (has_sign_extension) {
8325 __ Seb(dst, src);
8326 } else {
8327 __ Sll(dst, src, 24);
8328 __ Sra(dst, dst, 24);
8329 }
8330 break;
8331 case Primitive::kPrimShort:
8332 if (has_sign_extension) {
8333 __ Seh(dst, src);
8334 } else {
8335 __ Sll(dst, src, 16);
8336 __ Sra(dst, dst, 16);
8337 }
8338 break;
8339 case Primitive::kPrimInt:
Alexey Frunzea871ef12016-06-27 15:20:11 -07008340 if (dst != src) {
8341 __ Move(dst, src);
8342 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008343 break;
8344
8345 default:
8346 LOG(FATAL) << "Unexpected type conversion from " << input_type
8347 << " to " << result_type;
8348 }
8349 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008350 if (input_type == Primitive::kPrimLong) {
8351 if (isR6) {
8352 // cvt.s.l/cvt.d.l requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8353 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8354 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8355 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8356 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8357 __ Mtc1(src_low, FTMP);
8358 __ Mthc1(src_high, FTMP);
8359 if (result_type == Primitive::kPrimFloat) {
8360 __ Cvtsl(dst, FTMP);
8361 } else {
8362 __ Cvtdl(dst, FTMP);
8363 }
8364 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008365 QuickEntrypointEnum entrypoint = (result_type == Primitive::kPrimFloat) ? kQuickL2f
8366 : kQuickL2d;
8367 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008368 if (result_type == Primitive::kPrimFloat) {
8369 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
8370 } else {
8371 CheckEntrypointTypes<kQuickL2d, double, int64_t>();
8372 }
8373 }
8374 } else {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008375 Register src = locations->InAt(0).AsRegister<Register>();
8376 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8377 __ Mtc1(src, FTMP);
8378 if (result_type == Primitive::kPrimFloat) {
8379 __ Cvtsw(dst, FTMP);
8380 } else {
8381 __ Cvtdw(dst, FTMP);
8382 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008383 }
8384 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
8385 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008386
8387 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
8388 // value of the output type if the input is outside of the range after the truncation or
8389 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
8390 // results. This matches the desired float/double-to-int/long conversion exactly.
8391 //
8392 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
8393 // value when the input is either a NaN or is outside of the range of the output type
8394 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
8395 // the same result.
8396 //
8397 // The code takes care of the different behaviors by first comparing the input to the
8398 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
8399 // If the input is greater than or equal to the minimum, it procedes to the truncate
8400 // instruction, which will handle such an input the same way irrespective of NAN2008.
8401 // Otherwise the input is compared to itself to determine whether it is a NaN or not
8402 // in order to return either zero or the minimum value.
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008403 if (result_type == Primitive::kPrimLong) {
8404 if (isR6) {
8405 // trunc.l.s/trunc.l.d requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8406 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8407 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8408 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8409 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008410
8411 if (input_type == Primitive::kPrimFloat) {
8412 __ TruncLS(FTMP, src);
8413 } else {
8414 __ TruncLD(FTMP, src);
8415 }
8416 __ Mfc1(dst_low, FTMP);
8417 __ Mfhc1(dst_high, FTMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008418 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008419 QuickEntrypointEnum entrypoint = (input_type == Primitive::kPrimFloat) ? kQuickF2l
8420 : kQuickD2l;
8421 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008422 if (input_type == Primitive::kPrimFloat) {
8423 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
8424 } else {
8425 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
8426 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008427 }
8428 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008429 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8430 Register dst = locations->Out().AsRegister<Register>();
8431 MipsLabel truncate;
8432 MipsLabel done;
8433
Lena Djokicf4e23a82017-05-09 15:43:45 +02008434 if (!isR6) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008435 if (input_type == Primitive::kPrimFloat) {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008436 uint32_t min_val = bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
8437 __ LoadConst32(TMP, min_val);
8438 __ Mtc1(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008439 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008440 uint64_t min_val = bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
8441 __ LoadConst32(TMP, High32Bits(min_val));
8442 __ Mtc1(ZERO, FTMP);
8443 __ MoveToFpuHigh(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008444 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008445
8446 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008447 __ ColeS(0, FTMP, src);
8448 } else {
8449 __ ColeD(0, FTMP, src);
8450 }
8451 __ Bc1t(0, &truncate);
8452
8453 if (input_type == Primitive::kPrimFloat) {
8454 __ CeqS(0, src, src);
8455 } else {
8456 __ CeqD(0, src, src);
8457 }
8458 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
8459 __ Movf(dst, ZERO, 0);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008460
8461 __ B(&done);
8462
8463 __ Bind(&truncate);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008464 }
8465
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008466 if (input_type == Primitive::kPrimFloat) {
8467 __ TruncWS(FTMP, src);
8468 } else {
8469 __ TruncWD(FTMP, src);
8470 }
8471 __ Mfc1(dst, FTMP);
8472
Lena Djokicf4e23a82017-05-09 15:43:45 +02008473 if (!isR6) {
8474 __ Bind(&done);
8475 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008476 }
8477 } else if (Primitive::IsFloatingPointType(result_type) &&
8478 Primitive::IsFloatingPointType(input_type)) {
8479 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8480 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8481 if (result_type == Primitive::kPrimFloat) {
8482 __ Cvtsd(dst, src);
8483 } else {
8484 __ Cvtds(dst, src);
8485 }
8486 } else {
8487 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
8488 << " to " << result_type;
8489 }
8490}
8491
8492void LocationsBuilderMIPS::VisitUShr(HUShr* ushr) {
8493 HandleShift(ushr);
8494}
8495
8496void InstructionCodeGeneratorMIPS::VisitUShr(HUShr* ushr) {
8497 HandleShift(ushr);
8498}
8499
8500void LocationsBuilderMIPS::VisitXor(HXor* instruction) {
8501 HandleBinaryOp(instruction);
8502}
8503
8504void InstructionCodeGeneratorMIPS::VisitXor(HXor* instruction) {
8505 HandleBinaryOp(instruction);
8506}
8507
8508void LocationsBuilderMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8509 // Nothing to do, this should be removed during prepare for register allocator.
8510 LOG(FATAL) << "Unreachable";
8511}
8512
8513void InstructionCodeGeneratorMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8514 // Nothing to do, this should be removed during prepare for register allocator.
8515 LOG(FATAL) << "Unreachable";
8516}
8517
8518void LocationsBuilderMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008519 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008520}
8521
8522void InstructionCodeGeneratorMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008523 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008524}
8525
8526void LocationsBuilderMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008527 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008528}
8529
8530void InstructionCodeGeneratorMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008531 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008532}
8533
8534void LocationsBuilderMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008535 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008536}
8537
8538void InstructionCodeGeneratorMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008539 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008540}
8541
8542void LocationsBuilderMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008543 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008544}
8545
8546void InstructionCodeGeneratorMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008547 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008548}
8549
8550void LocationsBuilderMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008551 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008552}
8553
8554void InstructionCodeGeneratorMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008555 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008556}
8557
8558void LocationsBuilderMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008559 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008560}
8561
8562void InstructionCodeGeneratorMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008563 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008564}
8565
8566void LocationsBuilderMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008567 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008568}
8569
8570void InstructionCodeGeneratorMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008571 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008572}
8573
8574void LocationsBuilderMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008575 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008576}
8577
8578void InstructionCodeGeneratorMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008579 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008580}
8581
8582void LocationsBuilderMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008583 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008584}
8585
8586void InstructionCodeGeneratorMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008587 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008588}
8589
8590void LocationsBuilderMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008591 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008592}
8593
8594void InstructionCodeGeneratorMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008595 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008596}
8597
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008598void LocationsBuilderMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8599 LocationSummary* locations =
8600 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
8601 locations->SetInAt(0, Location::RequiresRegister());
8602}
8603
Alexey Frunze96b66822016-09-10 02:32:44 -07008604void InstructionCodeGeneratorMIPS::GenPackedSwitchWithCompares(Register value_reg,
8605 int32_t lower_bound,
8606 uint32_t num_entries,
8607 HBasicBlock* switch_block,
8608 HBasicBlock* default_block) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008609 // Create a set of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008610 Register temp_reg = TMP;
8611 __ Addiu32(temp_reg, value_reg, -lower_bound);
8612 // Jump to default if index is negative
8613 // Note: We don't check the case that index is positive while value < lower_bound, because in
8614 // this case, index >= num_entries must be true. So that we can save one branch instruction.
8615 __ Bltz(temp_reg, codegen_->GetLabelOf(default_block));
8616
Alexey Frunze96b66822016-09-10 02:32:44 -07008617 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008618 // Jump to successors[0] if value == lower_bound.
8619 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[0]));
8620 int32_t last_index = 0;
8621 for (; num_entries - last_index > 2; last_index += 2) {
8622 __ Addiu(temp_reg, temp_reg, -2);
8623 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
8624 __ Bltz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
8625 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
8626 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
8627 }
8628 if (num_entries - last_index == 2) {
8629 // The last missing case_value.
8630 __ Addiu(temp_reg, temp_reg, -1);
8631 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008632 }
8633
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008634 // And the default for any other value.
Alexey Frunze96b66822016-09-10 02:32:44 -07008635 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008636 __ B(codegen_->GetLabelOf(default_block));
8637 }
8638}
8639
Alexey Frunze96b66822016-09-10 02:32:44 -07008640void InstructionCodeGeneratorMIPS::GenTableBasedPackedSwitch(Register value_reg,
8641 Register constant_area,
8642 int32_t lower_bound,
8643 uint32_t num_entries,
8644 HBasicBlock* switch_block,
8645 HBasicBlock* default_block) {
8646 // Create a jump table.
8647 std::vector<MipsLabel*> labels(num_entries);
8648 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
8649 for (uint32_t i = 0; i < num_entries; i++) {
8650 labels[i] = codegen_->GetLabelOf(successors[i]);
8651 }
8652 JumpTable* table = __ CreateJumpTable(std::move(labels));
8653
8654 // Is the value in range?
8655 __ Addiu32(TMP, value_reg, -lower_bound);
8656 if (IsInt<16>(static_cast<int32_t>(num_entries))) {
8657 __ Sltiu(AT, TMP, num_entries);
8658 __ Beqz(AT, codegen_->GetLabelOf(default_block));
8659 } else {
8660 __ LoadConst32(AT, num_entries);
8661 __ Bgeu(TMP, AT, codegen_->GetLabelOf(default_block));
8662 }
8663
8664 // We are in the range of the table.
8665 // Load the target address from the jump table, indexing by the value.
8666 __ LoadLabelAddress(AT, constant_area, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07008667 __ ShiftAndAdd(TMP, TMP, AT, 2, TMP);
Alexey Frunze96b66822016-09-10 02:32:44 -07008668 __ Lw(TMP, TMP, 0);
8669 // Compute the absolute target address by adding the table start address
8670 // (the table contains offsets to targets relative to its start).
8671 __ Addu(TMP, TMP, AT);
8672 // And jump.
8673 __ Jr(TMP);
8674 __ NopIfNoReordering();
8675}
8676
8677void InstructionCodeGeneratorMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8678 int32_t lower_bound = switch_instr->GetStartValue();
8679 uint32_t num_entries = switch_instr->GetNumEntries();
8680 LocationSummary* locations = switch_instr->GetLocations();
8681 Register value_reg = locations->InAt(0).AsRegister<Register>();
8682 HBasicBlock* switch_block = switch_instr->GetBlock();
8683 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
8684
8685 if (codegen_->GetInstructionSetFeatures().IsR6() &&
8686 num_entries > kPackedSwitchJumpTableThreshold) {
8687 // R6 uses PC-relative addressing to access the jump table.
8688 // R2, OTOH, requires an HMipsComputeBaseMethodAddress input to access
8689 // the jump table and it is implemented by changing HPackedSwitch to
8690 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress.
8691 // See VisitMipsPackedSwitch() for the table-based implementation on R2.
8692 GenTableBasedPackedSwitch(value_reg,
8693 ZERO,
8694 lower_bound,
8695 num_entries,
8696 switch_block,
8697 default_block);
8698 } else {
8699 GenPackedSwitchWithCompares(value_reg,
8700 lower_bound,
8701 num_entries,
8702 switch_block,
8703 default_block);
8704 }
8705}
8706
8707void LocationsBuilderMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
8708 LocationSummary* locations =
8709 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
8710 locations->SetInAt(0, Location::RequiresRegister());
8711 // Constant area pointer (HMipsComputeBaseMethodAddress).
8712 locations->SetInAt(1, Location::RequiresRegister());
8713}
8714
8715void InstructionCodeGeneratorMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
8716 int32_t lower_bound = switch_instr->GetStartValue();
8717 uint32_t num_entries = switch_instr->GetNumEntries();
8718 LocationSummary* locations = switch_instr->GetLocations();
8719 Register value_reg = locations->InAt(0).AsRegister<Register>();
8720 Register constant_area = locations->InAt(1).AsRegister<Register>();
8721 HBasicBlock* switch_block = switch_instr->GetBlock();
8722 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
8723
8724 // This is an R2-only path. HPackedSwitch has been changed to
8725 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress
8726 // required to address the jump table relative to PC.
8727 GenTableBasedPackedSwitch(value_reg,
8728 constant_area,
8729 lower_bound,
8730 num_entries,
8731 switch_block,
8732 default_block);
8733}
8734
Alexey Frunzee3fb2452016-05-10 16:08:05 -07008735void LocationsBuilderMIPS::VisitMipsComputeBaseMethodAddress(
8736 HMipsComputeBaseMethodAddress* insn) {
8737 LocationSummary* locations =
8738 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
8739 locations->SetOut(Location::RequiresRegister());
8740}
8741
8742void InstructionCodeGeneratorMIPS::VisitMipsComputeBaseMethodAddress(
8743 HMipsComputeBaseMethodAddress* insn) {
8744 LocationSummary* locations = insn->GetLocations();
8745 Register reg = locations->Out().AsRegister<Register>();
8746
8747 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
8748
8749 // Generate a dummy PC-relative call to obtain PC.
8750 __ Nal();
8751 // Grab the return address off RA.
8752 __ Move(reg, RA);
8753
8754 // Remember this offset (the obtained PC value) for later use with constant area.
8755 __ BindPcRelBaseLabel();
8756}
8757
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008758void LocationsBuilderMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
8759 // The trampoline uses the same calling convention as dex calling conventions,
8760 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
8761 // the method_idx.
8762 HandleInvoke(invoke);
8763}
8764
8765void InstructionCodeGeneratorMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
8766 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
8767}
8768
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008769void LocationsBuilderMIPS::VisitClassTableGet(HClassTableGet* instruction) {
8770 LocationSummary* locations =
8771 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
8772 locations->SetInAt(0, Location::RequiresRegister());
8773 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00008774}
8775
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008776void InstructionCodeGeneratorMIPS::VisitClassTableGet(HClassTableGet* instruction) {
8777 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00008778 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008779 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008780 instruction->GetIndex(), kMipsPointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008781 __ LoadFromOffset(kLoadWord,
8782 locations->Out().AsRegister<Register>(),
8783 locations->InAt(0).AsRegister<Register>(),
8784 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008785 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008786 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00008787 instruction->GetIndex(), kMipsPointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00008788 __ LoadFromOffset(kLoadWord,
8789 locations->Out().AsRegister<Register>(),
8790 locations->InAt(0).AsRegister<Register>(),
8791 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008792 __ LoadFromOffset(kLoadWord,
8793 locations->Out().AsRegister<Register>(),
8794 locations->Out().AsRegister<Register>(),
8795 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008796 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00008797}
8798
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008799#undef __
8800#undef QUICK_ENTRY_POINT
8801
8802} // namespace mips
8803} // namespace art