blob: 9db2bd35ca8bd0485073972d9c54edf4a0be44e3 [file] [log] [blame]
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips/asm_support_mips.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020020#include "arch/mips/entrypoints_direct_mips.h"
21#include "arch/mips/instruction_set_features_mips.h"
22#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010023#include "class_table.h"
Chris Larsen701566a2015-10-27 15:29:13 -070024#include "code_generator_utils.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010025#include "compiled_method.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020026#include "entrypoints/quick/quick_entrypoints.h"
27#include "entrypoints/quick/quick_entrypoints_enum.h"
28#include "gc/accounting/card_table.h"
29#include "intrinsics.h"
Chris Larsen701566a2015-10-27 15:29:13 -070030#include "intrinsics_mips.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020031#include "mirror/array-inl.h"
32#include "mirror/class-inl.h"
33#include "offsets.h"
34#include "thread.h"
35#include "utils/assembler.h"
36#include "utils/mips/assembler_mips.h"
37#include "utils/stack_checks.h"
38
39namespace art {
40namespace mips {
41
42static constexpr int kCurrentMethodStackOffset = 0;
43static constexpr Register kMethodRegisterArgument = A0;
44
Alexey Frunze4147fcc2017-06-17 19:57:27 -070045// Flags controlling the use of thunks for Baker read barriers.
46constexpr bool kBakerReadBarrierThunksEnableForFields = true;
47constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
48constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
49
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020050Location MipsReturnLocation(Primitive::Type return_type) {
51 switch (return_type) {
52 case Primitive::kPrimBoolean:
53 case Primitive::kPrimByte:
54 case Primitive::kPrimChar:
55 case Primitive::kPrimShort:
56 case Primitive::kPrimInt:
57 case Primitive::kPrimNot:
58 return Location::RegisterLocation(V0);
59
60 case Primitive::kPrimLong:
61 return Location::RegisterPairLocation(V0, V1);
62
63 case Primitive::kPrimFloat:
64 case Primitive::kPrimDouble:
65 return Location::FpuRegisterLocation(F0);
66
67 case Primitive::kPrimVoid:
68 return Location();
69 }
70 UNREACHABLE();
71}
72
73Location InvokeDexCallingConventionVisitorMIPS::GetReturnLocation(Primitive::Type type) const {
74 return MipsReturnLocation(type);
75}
76
77Location InvokeDexCallingConventionVisitorMIPS::GetMethodLocation() const {
78 return Location::RegisterLocation(kMethodRegisterArgument);
79}
80
81Location InvokeDexCallingConventionVisitorMIPS::GetNextLocation(Primitive::Type type) {
82 Location next_location;
83
84 switch (type) {
85 case Primitive::kPrimBoolean:
86 case Primitive::kPrimByte:
87 case Primitive::kPrimChar:
88 case Primitive::kPrimShort:
89 case Primitive::kPrimInt:
90 case Primitive::kPrimNot: {
91 uint32_t gp_index = gp_index_++;
92 if (gp_index < calling_convention.GetNumberOfRegisters()) {
93 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index));
94 } else {
95 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
96 next_location = Location::StackSlot(stack_offset);
97 }
98 break;
99 }
100
101 case Primitive::kPrimLong: {
102 uint32_t gp_index = gp_index_;
103 gp_index_ += 2;
104 if (gp_index + 1 < calling_convention.GetNumberOfRegisters()) {
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800105 Register reg = calling_convention.GetRegisterAt(gp_index);
106 if (reg == A1 || reg == A3) {
107 gp_index_++; // Skip A1(A3), and use A2_A3(T0_T1) instead.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200108 gp_index++;
109 }
110 Register low_even = calling_convention.GetRegisterAt(gp_index);
111 Register high_odd = calling_convention.GetRegisterAt(gp_index + 1);
112 DCHECK_EQ(low_even + 1, high_odd);
113 next_location = Location::RegisterPairLocation(low_even, high_odd);
114 } else {
115 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
116 next_location = Location::DoubleStackSlot(stack_offset);
117 }
118 break;
119 }
120
121 // Note: both float and double types are stored in even FPU registers. On 32 bit FPU, double
122 // will take up the even/odd pair, while floats are stored in even regs only.
123 // On 64 bit FPU, both double and float are stored in even registers only.
124 case Primitive::kPrimFloat:
125 case Primitive::kPrimDouble: {
126 uint32_t float_index = float_index_++;
127 if (float_index < calling_convention.GetNumberOfFpuRegisters()) {
128 next_location = Location::FpuRegisterLocation(
129 calling_convention.GetFpuRegisterAt(float_index));
130 } else {
131 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
132 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
133 : Location::StackSlot(stack_offset);
134 }
135 break;
136 }
137
138 case Primitive::kPrimVoid:
139 LOG(FATAL) << "Unexpected parameter type " << type;
140 break;
141 }
142
143 // Space on the stack is reserved for all arguments.
144 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
145
146 return next_location;
147}
148
149Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
150 return MipsReturnLocation(type);
151}
152
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100153// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
154#define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700155#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200156
157class BoundsCheckSlowPathMIPS : public SlowPathCodeMIPS {
158 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000159 explicit BoundsCheckSlowPathMIPS(HBoundsCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200160
161 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
162 LocationSummary* locations = instruction_->GetLocations();
163 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
164 __ Bind(GetEntryLabel());
165 if (instruction_->CanThrowIntoCatchBlock()) {
166 // Live registers will be restored in the catch block if caught.
167 SaveLiveRegisters(codegen, instruction_->GetLocations());
168 }
169 // We're moving two locations to locations that could overlap, so we need a parallel
170 // move resolver.
171 InvokeRuntimeCallingConvention calling_convention;
172 codegen->EmitParallelMoves(locations->InAt(0),
173 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
174 Primitive::kPrimInt,
175 locations->InAt(1),
176 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
177 Primitive::kPrimInt);
Serban Constantinescufca16662016-07-14 09:21:59 +0100178 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
179 ? kQuickThrowStringBounds
180 : kQuickThrowArrayBounds;
181 mips_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100182 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200183 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
184 }
185
186 bool IsFatal() const OVERRIDE { return true; }
187
188 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS"; }
189
190 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200191 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS);
192};
193
194class DivZeroCheckSlowPathMIPS : public SlowPathCodeMIPS {
195 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000196 explicit DivZeroCheckSlowPathMIPS(HDivZeroCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200197
198 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
199 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
200 __ Bind(GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +0100201 mips_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200202 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
203 }
204
205 bool IsFatal() const OVERRIDE { return true; }
206
207 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS"; }
208
209 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200210 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS);
211};
212
213class LoadClassSlowPathMIPS : public SlowPathCodeMIPS {
214 public:
215 LoadClassSlowPathMIPS(HLoadClass* cls,
216 HInstruction* at,
217 uint32_t dex_pc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700218 bool do_clinit,
219 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high = nullptr)
220 : SlowPathCodeMIPS(at),
221 cls_(cls),
222 dex_pc_(dex_pc),
223 do_clinit_(do_clinit),
224 bss_info_high_(bss_info_high) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200225 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
226 }
227
228 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000229 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700230 Location out = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200231 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700232 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700233 InvokeRuntimeCallingConvention calling_convention;
234 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
235 const bool is_load_class_bss_entry =
236 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200237 __ Bind(GetEntryLabel());
238 SaveLiveRegisters(codegen, locations);
239
Alexey Frunzec61c0762017-04-10 13:54:23 -0700240 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
241 Register entry_address = kNoRegister;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700242 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700243 Register temp = locations->GetTemp(0).AsRegister<Register>();
244 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
245 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
246 // kSaveEverything call.
247 entry_address = temp_is_a0 ? out.AsRegister<Register>() : temp;
248 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
249 if (temp_is_a0) {
250 __ Move(entry_address, temp);
251 }
252 }
253
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000254 dex::TypeIndex type_index = cls_->GetTypeIndex();
255 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100256 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
257 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000258 mips_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200259 if (do_clinit_) {
260 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
261 } else {
262 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
263 }
264
Alexey Frunzec61c0762017-04-10 13:54:23 -0700265 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700266 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700267 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700268 DCHECK(bss_info_high_);
269 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
270 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, bss_info_high_);
Alexey Frunzea663d9d2017-07-31 18:43:18 -0700271 __ Sw(calling_convention.GetRegisterAt(0),
272 entry_address,
273 /* placeholder */ 0x5678,
274 &info_low->label);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700275 }
276
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200277 // Move the class to the desired location.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200278 if (out.IsValid()) {
279 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000280 Primitive::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700281 mips_codegen->MoveLocation(out,
282 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
283 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200284 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200285 RestoreLiveRegisters(codegen, locations);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700286
287 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700288 if (is_load_class_bss_entry && !baker_or_no_read_barriers) {
289 // For non-Baker read barriers we need to re-calculate the address of
Alexey Frunzec61c0762017-04-10 13:54:23 -0700290 // the class entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700291 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000292 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700293 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000294 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700295 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
296 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, info_high);
Alexey Frunzea663d9d2017-07-31 18:43:18 -0700297 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base);
298 __ Sw(out.AsRegister<Register>(), TMP, /* placeholder */ 0x5678, &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000299 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200300 __ B(GetExitLabel());
301 }
302
303 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS"; }
304
305 private:
306 // The class this slow path will load.
307 HLoadClass* const cls_;
308
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200309 // The dex PC of `at_`.
310 const uint32_t dex_pc_;
311
312 // Whether to initialize the class.
313 const bool do_clinit_;
314
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700315 // Pointer to the high half PC-relative patch info for HLoadClass/kBssEntry.
316 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high_;
317
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200318 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS);
319};
320
321class LoadStringSlowPathMIPS : public SlowPathCodeMIPS {
322 public:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700323 explicit LoadStringSlowPathMIPS(HLoadString* instruction,
324 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high)
325 : SlowPathCodeMIPS(instruction), bss_info_high_(bss_info_high) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200326
327 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700328 DCHECK(instruction_->IsLoadString());
329 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200330 LocationSummary* locations = instruction_->GetLocations();
331 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunzec61c0762017-04-10 13:54:23 -0700332 HLoadString* load = instruction_->AsLoadString();
333 const dex::StringIndex string_index = load->GetStringIndex();
334 Register out = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200335 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700336 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700337 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200338 __ Bind(GetEntryLabel());
339 SaveLiveRegisters(codegen, locations);
340
Alexey Frunzec61c0762017-04-10 13:54:23 -0700341 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
342 Register entry_address = kNoRegister;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700343 if (baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700344 Register temp = locations->GetTemp(0).AsRegister<Register>();
345 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
346 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
347 // kSaveEverything call.
348 entry_address = temp_is_a0 ? out : temp;
349 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
350 if (temp_is_a0) {
351 __ Move(entry_address, temp);
352 }
353 }
354
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000355 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100356 mips_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200357 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700358
359 // Store the resolved string to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700360 if (baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700361 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700362 DCHECK(bss_info_high_);
363 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100364 mips_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, bss_info_high_);
Alexey Frunzea663d9d2017-07-31 18:43:18 -0700365 __ Sw(calling_convention.GetRegisterAt(0),
366 entry_address,
367 /* placeholder */ 0x5678,
368 &info_low->label);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700369 }
370
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200371 Primitive::Type type = instruction_->GetType();
372 mips_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700373 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200374 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200375 RestoreLiveRegisters(codegen, locations);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000376
Alexey Frunzec61c0762017-04-10 13:54:23 -0700377 // Store the resolved string to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700378 if (!baker_or_no_read_barriers) {
379 // For non-Baker read barriers we need to re-calculate the address of
Alexey Frunzec61c0762017-04-10 13:54:23 -0700380 // the string entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700381 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700382 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700383 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100384 mips_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700385 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100386 mips_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, info_high);
Alexey Frunzea663d9d2017-07-31 18:43:18 -0700387 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base);
388 __ Sw(out, TMP, /* placeholder */ 0x5678, &info_low->label);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700389 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200390 __ B(GetExitLabel());
391 }
392
393 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS"; }
394
395 private:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700396 // Pointer to the high half PC-relative patch info.
397 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high_;
398
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200399 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS);
400};
401
402class NullCheckSlowPathMIPS : public SlowPathCodeMIPS {
403 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000404 explicit NullCheckSlowPathMIPS(HNullCheck* instr) : SlowPathCodeMIPS(instr) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200405
406 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
407 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
408 __ Bind(GetEntryLabel());
409 if (instruction_->CanThrowIntoCatchBlock()) {
410 // Live registers will be restored in the catch block if caught.
411 SaveLiveRegisters(codegen, instruction_->GetLocations());
412 }
Serban Constantinescufca16662016-07-14 09:21:59 +0100413 mips_codegen->InvokeRuntime(kQuickThrowNullPointer,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200414 instruction_,
415 instruction_->GetDexPc(),
Serban Constantinescufca16662016-07-14 09:21:59 +0100416 this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200417 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
418 }
419
420 bool IsFatal() const OVERRIDE { return true; }
421
422 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS"; }
423
424 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200425 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS);
426};
427
428class SuspendCheckSlowPathMIPS : public SlowPathCodeMIPS {
429 public:
430 SuspendCheckSlowPathMIPS(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000431 : SlowPathCodeMIPS(instruction), successor_(successor) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200432
433 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Lena Djokicca8c2952017-05-29 11:31:46 +0200434 LocationSummary* locations = instruction_->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200435 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
436 __ Bind(GetEntryLabel());
Lena Djokicca8c2952017-05-29 11:31:46 +0200437 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufca16662016-07-14 09:21:59 +0100438 mips_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200439 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Lena Djokicca8c2952017-05-29 11:31:46 +0200440 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200441 if (successor_ == nullptr) {
442 __ B(GetReturnLabel());
443 } else {
444 __ B(mips_codegen->GetLabelOf(successor_));
445 }
446 }
447
448 MipsLabel* GetReturnLabel() {
449 DCHECK(successor_ == nullptr);
450 return &return_label_;
451 }
452
453 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS"; }
454
455 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200456 // If not null, the block to branch to after the suspend check.
457 HBasicBlock* const successor_;
458
459 // If `successor_` is null, the label to branch to after the suspend check.
460 MipsLabel return_label_;
461
462 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS);
463};
464
465class TypeCheckSlowPathMIPS : public SlowPathCodeMIPS {
466 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800467 explicit TypeCheckSlowPathMIPS(HInstruction* instruction, bool is_fatal)
468 : SlowPathCodeMIPS(instruction), is_fatal_(is_fatal) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200469
470 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
471 LocationSummary* locations = instruction_->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200472 uint32_t dex_pc = instruction_->GetDexPc();
473 DCHECK(instruction_->IsCheckCast()
474 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
475 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
476
477 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800478 if (!is_fatal_) {
479 SaveLiveRegisters(codegen, locations);
480 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200481
482 // We're moving two locations to locations that could overlap, so we need a parallel
483 // move resolver.
484 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800485 codegen->EmitParallelMoves(locations->InAt(0),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200486 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
487 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800488 locations->InAt(1),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200489 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
490 Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200491 if (instruction_->IsInstanceOf()) {
Serban Constantinescufca16662016-07-14 09:21:59 +0100492 mips_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800493 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200494 Primitive::Type ret_type = instruction_->GetType();
495 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
496 mips_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200497 } else {
498 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800499 mips_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
500 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200501 }
502
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800503 if (!is_fatal_) {
504 RestoreLiveRegisters(codegen, locations);
505 __ B(GetExitLabel());
506 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200507 }
508
509 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS"; }
510
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800511 bool IsFatal() const OVERRIDE { return is_fatal_; }
512
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200513 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800514 const bool is_fatal_;
515
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200516 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS);
517};
518
519class DeoptimizationSlowPathMIPS : public SlowPathCodeMIPS {
520 public:
Aart Bik42249c32016-01-07 15:33:50 -0800521 explicit DeoptimizationSlowPathMIPS(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000522 : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200523
524 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800525 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200526 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100527 LocationSummary* locations = instruction_->GetLocations();
528 SaveLiveRegisters(codegen, locations);
529 InvokeRuntimeCallingConvention calling_convention;
530 __ LoadConst32(calling_convention.GetRegisterAt(0),
531 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufca16662016-07-14 09:21:59 +0100532 mips_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100533 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200534 }
535
536 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS"; }
537
538 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200539 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS);
540};
541
Alexey Frunze15958152017-02-09 19:08:30 -0800542class ArraySetSlowPathMIPS : public SlowPathCodeMIPS {
543 public:
544 explicit ArraySetSlowPathMIPS(HInstruction* instruction) : SlowPathCodeMIPS(instruction) {}
545
546 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
547 LocationSummary* locations = instruction_->GetLocations();
548 __ Bind(GetEntryLabel());
549 SaveLiveRegisters(codegen, locations);
550
551 InvokeRuntimeCallingConvention calling_convention;
552 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
553 parallel_move.AddMove(
554 locations->InAt(0),
555 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
556 Primitive::kPrimNot,
557 nullptr);
558 parallel_move.AddMove(
559 locations->InAt(1),
560 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
561 Primitive::kPrimInt,
562 nullptr);
563 parallel_move.AddMove(
564 locations->InAt(2),
565 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
566 Primitive::kPrimNot,
567 nullptr);
568 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
569
570 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
571 mips_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
572 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
573 RestoreLiveRegisters(codegen, locations);
574 __ B(GetExitLabel());
575 }
576
577 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS"; }
578
579 private:
580 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS);
581};
582
583// Slow path marking an object reference `ref` during a read
584// barrier. The field `obj.field` in the object `obj` holding this
585// reference does not get updated by this slow path after marking (see
586// ReadBarrierMarkAndUpdateFieldSlowPathMIPS below for that).
587//
588// This means that after the execution of this slow path, `ref` will
589// always be up-to-date, but `obj.field` may not; i.e., after the
590// flip, `ref` will be a to-space reference, but `obj.field` will
591// probably still be a from-space reference (unless it gets updated by
592// another thread, or if another thread installed another object
593// reference (different from `ref`) in `obj.field`).
594//
595// If `entrypoint` is a valid location it is assumed to already be
596// holding the entrypoint. The case where the entrypoint is passed in
597// is for the GcRoot read barrier.
598class ReadBarrierMarkSlowPathMIPS : public SlowPathCodeMIPS {
599 public:
600 ReadBarrierMarkSlowPathMIPS(HInstruction* instruction,
601 Location ref,
602 Location entrypoint = Location::NoLocation())
603 : SlowPathCodeMIPS(instruction), ref_(ref), entrypoint_(entrypoint) {
604 DCHECK(kEmitCompilerReadBarrier);
605 }
606
607 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
608
609 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
610 LocationSummary* locations = instruction_->GetLocations();
611 Register ref_reg = ref_.AsRegister<Register>();
612 DCHECK(locations->CanCall());
613 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
614 DCHECK(instruction_->IsInstanceFieldGet() ||
615 instruction_->IsStaticFieldGet() ||
616 instruction_->IsArrayGet() ||
617 instruction_->IsArraySet() ||
618 instruction_->IsLoadClass() ||
619 instruction_->IsLoadString() ||
620 instruction_->IsInstanceOf() ||
621 instruction_->IsCheckCast() ||
622 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
623 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
624 << "Unexpected instruction in read barrier marking slow path: "
625 << instruction_->DebugName();
626
627 __ Bind(GetEntryLabel());
628 // No need to save live registers; it's taken care of by the
629 // entrypoint. Also, there is no need to update the stack mask,
630 // as this runtime call will not trigger a garbage collection.
631 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
632 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
633 (S2 <= ref_reg && ref_reg <= S7) ||
634 (ref_reg == FP)) << ref_reg;
635 // "Compact" slow path, saving two moves.
636 //
637 // Instead of using the standard runtime calling convention (input
638 // and output in A0 and V0 respectively):
639 //
640 // A0 <- ref
641 // V0 <- ReadBarrierMark(A0)
642 // ref <- V0
643 //
644 // we just use rX (the register containing `ref`) as input and output
645 // of a dedicated entrypoint:
646 //
647 // rX <- ReadBarrierMarkRegX(rX)
648 //
649 if (entrypoint_.IsValid()) {
650 mips_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
651 DCHECK_EQ(entrypoint_.AsRegister<Register>(), T9);
652 __ Jalr(entrypoint_.AsRegister<Register>());
653 __ NopIfNoReordering();
654 } else {
655 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100656 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800657 // This runtime call does not require a stack map.
658 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
659 instruction_,
660 this,
661 /* direct */ false);
662 }
663 __ B(GetExitLabel());
664 }
665
666 private:
667 // The location (register) of the marked object reference.
668 const Location ref_;
669
670 // The location of the entrypoint if already loaded.
671 const Location entrypoint_;
672
673 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS);
674};
675
676// Slow path marking an object reference `ref` during a read barrier,
677// and if needed, atomically updating the field `obj.field` in the
678// object `obj` holding this reference after marking (contrary to
679// ReadBarrierMarkSlowPathMIPS above, which never tries to update
680// `obj.field`).
681//
682// This means that after the execution of this slow path, both `ref`
683// and `obj.field` will be up-to-date; i.e., after the flip, both will
684// hold the same to-space reference (unless another thread installed
685// another object reference (different from `ref`) in `obj.field`).
686class ReadBarrierMarkAndUpdateFieldSlowPathMIPS : public SlowPathCodeMIPS {
687 public:
688 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(HInstruction* instruction,
689 Location ref,
690 Register obj,
691 Location field_offset,
692 Register temp1)
693 : SlowPathCodeMIPS(instruction),
694 ref_(ref),
695 obj_(obj),
696 field_offset_(field_offset),
697 temp1_(temp1) {
698 DCHECK(kEmitCompilerReadBarrier);
699 }
700
701 const char* GetDescription() const OVERRIDE {
702 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS";
703 }
704
705 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
706 LocationSummary* locations = instruction_->GetLocations();
707 Register ref_reg = ref_.AsRegister<Register>();
708 DCHECK(locations->CanCall());
709 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
710 // This slow path is only used by the UnsafeCASObject intrinsic.
711 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
712 << "Unexpected instruction in read barrier marking and field updating slow path: "
713 << instruction_->DebugName();
714 DCHECK(instruction_->GetLocations()->Intrinsified());
715 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
716 DCHECK(field_offset_.IsRegisterPair()) << field_offset_;
717
718 __ Bind(GetEntryLabel());
719
720 // Save the old reference.
721 // Note that we cannot use AT or TMP to save the old reference, as those
722 // are used by the code that follows, but we need the old reference after
723 // the call to the ReadBarrierMarkRegX entry point.
724 DCHECK_NE(temp1_, AT);
725 DCHECK_NE(temp1_, TMP);
726 __ Move(temp1_, ref_reg);
727
728 // No need to save live registers; it's taken care of by the
729 // entrypoint. Also, there is no need to update the stack mask,
730 // as this runtime call will not trigger a garbage collection.
731 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
732 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
733 (S2 <= ref_reg && ref_reg <= S7) ||
734 (ref_reg == FP)) << ref_reg;
735 // "Compact" slow path, saving two moves.
736 //
737 // Instead of using the standard runtime calling convention (input
738 // and output in A0 and V0 respectively):
739 //
740 // A0 <- ref
741 // V0 <- ReadBarrierMark(A0)
742 // ref <- V0
743 //
744 // we just use rX (the register containing `ref`) as input and output
745 // of a dedicated entrypoint:
746 //
747 // rX <- ReadBarrierMarkRegX(rX)
748 //
749 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100750 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800751 // This runtime call does not require a stack map.
752 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
753 instruction_,
754 this,
755 /* direct */ false);
756
757 // If the new reference is different from the old reference,
758 // update the field in the holder (`*(obj_ + field_offset_)`).
759 //
760 // Note that this field could also hold a different object, if
761 // another thread had concurrently changed it. In that case, the
762 // the compare-and-set (CAS) loop below would abort, leaving the
763 // field as-is.
764 MipsLabel done;
765 __ Beq(temp1_, ref_reg, &done);
766
767 // Update the the holder's field atomically. This may fail if
768 // mutator updates before us, but it's OK. This is achieved
769 // using a strong compare-and-set (CAS) operation with relaxed
770 // memory synchronization ordering, where the expected value is
771 // the old reference and the desired value is the new reference.
772
773 // Convenience aliases.
774 Register base = obj_;
775 // The UnsafeCASObject intrinsic uses a register pair as field
776 // offset ("long offset"), of which only the low part contains
777 // data.
778 Register offset = field_offset_.AsRegisterPairLow<Register>();
779 Register expected = temp1_;
780 Register value = ref_reg;
781 Register tmp_ptr = TMP; // Pointer to actual memory.
782 Register tmp = AT; // Value in memory.
783
784 __ Addu(tmp_ptr, base, offset);
785
786 if (kPoisonHeapReferences) {
787 __ PoisonHeapReference(expected);
788 // Do not poison `value` if it is the same register as
789 // `expected`, which has just been poisoned.
790 if (value != expected) {
791 __ PoisonHeapReference(value);
792 }
793 }
794
795 // do {
796 // tmp = [r_ptr] - expected;
797 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
798
799 bool is_r6 = mips_codegen->GetInstructionSetFeatures().IsR6();
800 MipsLabel loop_head, exit_loop;
801 __ Bind(&loop_head);
802 if (is_r6) {
803 __ LlR6(tmp, tmp_ptr);
804 } else {
805 __ LlR2(tmp, tmp_ptr);
806 }
807 __ Bne(tmp, expected, &exit_loop);
808 __ Move(tmp, value);
809 if (is_r6) {
810 __ ScR6(tmp, tmp_ptr);
811 } else {
812 __ ScR2(tmp, tmp_ptr);
813 }
814 __ Beqz(tmp, &loop_head);
815 __ Bind(&exit_loop);
816
817 if (kPoisonHeapReferences) {
818 __ UnpoisonHeapReference(expected);
819 // Do not unpoison `value` if it is the same register as
820 // `expected`, which has just been unpoisoned.
821 if (value != expected) {
822 __ UnpoisonHeapReference(value);
823 }
824 }
825
826 __ Bind(&done);
827 __ B(GetExitLabel());
828 }
829
830 private:
831 // The location (register) of the marked object reference.
832 const Location ref_;
833 // The register containing the object holding the marked object reference field.
834 const Register obj_;
835 // The location of the offset of the marked reference field within `obj_`.
836 Location field_offset_;
837
838 const Register temp1_;
839
840 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS);
841};
842
843// Slow path generating a read barrier for a heap reference.
844class ReadBarrierForHeapReferenceSlowPathMIPS : public SlowPathCodeMIPS {
845 public:
846 ReadBarrierForHeapReferenceSlowPathMIPS(HInstruction* instruction,
847 Location out,
848 Location ref,
849 Location obj,
850 uint32_t offset,
851 Location index)
852 : SlowPathCodeMIPS(instruction),
853 out_(out),
854 ref_(ref),
855 obj_(obj),
856 offset_(offset),
857 index_(index) {
858 DCHECK(kEmitCompilerReadBarrier);
859 // If `obj` is equal to `out` or `ref`, it means the initial object
860 // has been overwritten by (or after) the heap object reference load
861 // to be instrumented, e.g.:
862 //
863 // __ LoadFromOffset(kLoadWord, out, out, offset);
864 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
865 //
866 // In that case, we have lost the information about the original
867 // object, and the emitted read barrier cannot work properly.
868 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
869 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
870 }
871
872 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
873 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
874 LocationSummary* locations = instruction_->GetLocations();
875 Register reg_out = out_.AsRegister<Register>();
876 DCHECK(locations->CanCall());
877 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
878 DCHECK(instruction_->IsInstanceFieldGet() ||
879 instruction_->IsStaticFieldGet() ||
880 instruction_->IsArrayGet() ||
881 instruction_->IsInstanceOf() ||
882 instruction_->IsCheckCast() ||
883 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
884 << "Unexpected instruction in read barrier for heap reference slow path: "
885 << instruction_->DebugName();
886
887 __ Bind(GetEntryLabel());
888 SaveLiveRegisters(codegen, locations);
889
890 // We may have to change the index's value, but as `index_` is a
891 // constant member (like other "inputs" of this slow path),
892 // introduce a copy of it, `index`.
893 Location index = index_;
894 if (index_.IsValid()) {
895 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
896 if (instruction_->IsArrayGet()) {
897 // Compute the actual memory offset and store it in `index`.
898 Register index_reg = index_.AsRegister<Register>();
899 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
900 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
901 // We are about to change the value of `index_reg` (see the
902 // calls to art::mips::MipsAssembler::Sll and
903 // art::mips::MipsAssembler::Addiu32 below), but it has
904 // not been saved by the previous call to
905 // art::SlowPathCode::SaveLiveRegisters, as it is a
906 // callee-save register --
907 // art::SlowPathCode::SaveLiveRegisters does not consider
908 // callee-save registers, as it has been designed with the
909 // assumption that callee-save registers are supposed to be
910 // handled by the called function. So, as a callee-save
911 // register, `index_reg` _would_ eventually be saved onto
912 // the stack, but it would be too late: we would have
913 // changed its value earlier. Therefore, we manually save
914 // it here into another freely available register,
915 // `free_reg`, chosen of course among the caller-save
916 // registers (as a callee-save `free_reg` register would
917 // exhibit the same problem).
918 //
919 // Note we could have requested a temporary register from
920 // the register allocator instead; but we prefer not to, as
921 // this is a slow path, and we know we can find a
922 // caller-save register that is available.
923 Register free_reg = FindAvailableCallerSaveRegister(codegen);
924 __ Move(free_reg, index_reg);
925 index_reg = free_reg;
926 index = Location::RegisterLocation(index_reg);
927 } else {
928 // The initial register stored in `index_` has already been
929 // saved in the call to art::SlowPathCode::SaveLiveRegisters
930 // (as it is not a callee-save register), so we can freely
931 // use it.
932 }
933 // Shifting the index value contained in `index_reg` by the scale
934 // factor (2) cannot overflow in practice, as the runtime is
935 // unable to allocate object arrays with a size larger than
936 // 2^26 - 1 (that is, 2^28 - 4 bytes).
937 __ Sll(index_reg, index_reg, TIMES_4);
938 static_assert(
939 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
940 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
941 __ Addiu32(index_reg, index_reg, offset_);
942 } else {
943 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
944 // intrinsics, `index_` is not shifted by a scale factor of 2
945 // (as in the case of ArrayGet), as it is actually an offset
946 // to an object field within an object.
947 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
948 DCHECK(instruction_->GetLocations()->Intrinsified());
949 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
950 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
951 << instruction_->AsInvoke()->GetIntrinsic();
952 DCHECK_EQ(offset_, 0U);
953 DCHECK(index_.IsRegisterPair());
954 // UnsafeGet's offset location is a register pair, the low
955 // part contains the correct offset.
956 index = index_.ToLow();
957 }
958 }
959
960 // We're moving two or three locations to locations that could
961 // overlap, so we need a parallel move resolver.
962 InvokeRuntimeCallingConvention calling_convention;
963 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
964 parallel_move.AddMove(ref_,
965 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
966 Primitive::kPrimNot,
967 nullptr);
968 parallel_move.AddMove(obj_,
969 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
970 Primitive::kPrimNot,
971 nullptr);
972 if (index.IsValid()) {
973 parallel_move.AddMove(index,
974 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
975 Primitive::kPrimInt,
976 nullptr);
977 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
978 } else {
979 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
980 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
981 }
982 mips_codegen->InvokeRuntime(kQuickReadBarrierSlow,
983 instruction_,
984 instruction_->GetDexPc(),
985 this);
986 CheckEntrypointTypes<
987 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
Lena Djokic8098da92017-06-28 12:07:50 +0200988 mips_codegen->MoveLocation(out_,
989 calling_convention.GetReturnLocation(Primitive::kPrimNot),
990 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -0800991
992 RestoreLiveRegisters(codegen, locations);
993 __ B(GetExitLabel());
994 }
995
996 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathMIPS"; }
997
998 private:
999 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
1000 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
1001 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
1002 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1003 if (i != ref &&
1004 i != obj &&
1005 !codegen->IsCoreCalleeSaveRegister(i) &&
1006 !codegen->IsBlockedCoreRegister(i)) {
1007 return static_cast<Register>(i);
1008 }
1009 }
1010 // We shall never fail to find a free caller-save register, as
1011 // there are more than two core caller-save registers on MIPS
1012 // (meaning it is possible to find one which is different from
1013 // `ref` and `obj`).
1014 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1015 LOG(FATAL) << "Could not find a free caller-save register";
1016 UNREACHABLE();
1017 }
1018
1019 const Location out_;
1020 const Location ref_;
1021 const Location obj_;
1022 const uint32_t offset_;
1023 // An additional location containing an index to an array.
1024 // Only used for HArrayGet and the UnsafeGetObject &
1025 // UnsafeGetObjectVolatile intrinsics.
1026 const Location index_;
1027
1028 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS);
1029};
1030
1031// Slow path generating a read barrier for a GC root.
1032class ReadBarrierForRootSlowPathMIPS : public SlowPathCodeMIPS {
1033 public:
1034 ReadBarrierForRootSlowPathMIPS(HInstruction* instruction, Location out, Location root)
1035 : SlowPathCodeMIPS(instruction), out_(out), root_(root) {
1036 DCHECK(kEmitCompilerReadBarrier);
1037 }
1038
1039 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1040 LocationSummary* locations = instruction_->GetLocations();
1041 Register reg_out = out_.AsRegister<Register>();
1042 DCHECK(locations->CanCall());
1043 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
1044 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1045 << "Unexpected instruction in read barrier for GC root slow path: "
1046 << instruction_->DebugName();
1047
1048 __ Bind(GetEntryLabel());
1049 SaveLiveRegisters(codegen, locations);
1050
1051 InvokeRuntimeCallingConvention calling_convention;
1052 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Lena Djokic8098da92017-06-28 12:07:50 +02001053 mips_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
1054 root_,
1055 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -08001056 mips_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
1057 instruction_,
1058 instruction_->GetDexPc(),
1059 this);
1060 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
Lena Djokic8098da92017-06-28 12:07:50 +02001061 mips_codegen->MoveLocation(out_,
1062 calling_convention.GetReturnLocation(Primitive::kPrimNot),
1063 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -08001064
1065 RestoreLiveRegisters(codegen, locations);
1066 __ B(GetExitLabel());
1067 }
1068
1069 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS"; }
1070
1071 private:
1072 const Location out_;
1073 const Location root_;
1074
1075 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS);
1076};
1077
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001078CodeGeneratorMIPS::CodeGeneratorMIPS(HGraph* graph,
1079 const MipsInstructionSetFeatures& isa_features,
1080 const CompilerOptions& compiler_options,
1081 OptimizingCompilerStats* stats)
1082 : CodeGenerator(graph,
1083 kNumberOfCoreRegisters,
1084 kNumberOfFRegisters,
1085 kNumberOfRegisterPairs,
1086 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1087 arraysize(kCoreCalleeSaves)),
1088 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1089 arraysize(kFpuCalleeSaves)),
1090 compiler_options,
1091 stats),
1092 block_labels_(nullptr),
1093 location_builder_(graph, this),
1094 instruction_visitor_(graph, this),
1095 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001096 assembler_(graph->GetArena(), &isa_features),
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001097 isa_features_(isa_features),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001098 uint32_literals_(std::less<uint32_t>(),
1099 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001100 pc_relative_method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001101 method_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001102 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001103 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001104 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001105 string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001106 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1107 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001108 clobbered_ra_(false) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001109 // Save RA (containing the return address) to mimic Quick.
1110 AddAllocatedRegister(Location::RegisterLocation(RA));
1111}
1112
1113#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001114// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1115#define __ down_cast<MipsAssembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001116#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001117
1118void CodeGeneratorMIPS::Finalize(CodeAllocator* allocator) {
1119 // Ensure that we fix up branches.
1120 __ FinalizeCode();
1121
1122 // Adjust native pc offsets in stack maps.
1123 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001124 uint32_t old_position =
1125 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001126 uint32_t new_position = __ GetAdjustedPosition(old_position);
1127 DCHECK_GE(new_position, old_position);
1128 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
1129 }
1130
1131 // Adjust pc offsets for the disassembly information.
1132 if (disasm_info_ != nullptr) {
1133 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1134 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1135 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1136 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1137 it.second.start = __ GetAdjustedPosition(it.second.start);
1138 it.second.end = __ GetAdjustedPosition(it.second.end);
1139 }
1140 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1141 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1142 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1143 }
1144 }
1145
1146 CodeGenerator::Finalize(allocator);
1147}
1148
1149MipsAssembler* ParallelMoveResolverMIPS::GetAssembler() const {
1150 return codegen_->GetAssembler();
1151}
1152
1153void ParallelMoveResolverMIPS::EmitMove(size_t index) {
1154 DCHECK_LT(index, moves_.size());
1155 MoveOperands* move = moves_[index];
1156 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1157}
1158
1159void ParallelMoveResolverMIPS::EmitSwap(size_t index) {
1160 DCHECK_LT(index, moves_.size());
1161 MoveOperands* move = moves_[index];
1162 Primitive::Type type = move->GetType();
1163 Location loc1 = move->GetDestination();
1164 Location loc2 = move->GetSource();
1165
1166 DCHECK(!loc1.IsConstant());
1167 DCHECK(!loc2.IsConstant());
1168
1169 if (loc1.Equals(loc2)) {
1170 return;
1171 }
1172
1173 if (loc1.IsRegister() && loc2.IsRegister()) {
1174 // Swap 2 GPRs.
1175 Register r1 = loc1.AsRegister<Register>();
1176 Register r2 = loc2.AsRegister<Register>();
1177 __ Move(TMP, r2);
1178 __ Move(r2, r1);
1179 __ Move(r1, TMP);
1180 } else if (loc1.IsFpuRegister() && loc2.IsFpuRegister()) {
1181 FRegister f1 = loc1.AsFpuRegister<FRegister>();
1182 FRegister f2 = loc2.AsFpuRegister<FRegister>();
1183 if (type == Primitive::kPrimFloat) {
1184 __ MovS(FTMP, f2);
1185 __ MovS(f2, f1);
1186 __ MovS(f1, FTMP);
1187 } else {
1188 DCHECK_EQ(type, Primitive::kPrimDouble);
1189 __ MovD(FTMP, f2);
1190 __ MovD(f2, f1);
1191 __ MovD(f1, FTMP);
1192 }
1193 } else if ((loc1.IsRegister() && loc2.IsFpuRegister()) ||
1194 (loc1.IsFpuRegister() && loc2.IsRegister())) {
1195 // Swap FPR and GPR.
1196 DCHECK_EQ(type, Primitive::kPrimFloat); // Can only swap a float.
1197 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1198 : loc2.AsFpuRegister<FRegister>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001199 Register r2 = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001200 __ Move(TMP, r2);
1201 __ Mfc1(r2, f1);
1202 __ Mtc1(TMP, f1);
1203 } else if (loc1.IsRegisterPair() && loc2.IsRegisterPair()) {
1204 // Swap 2 GPR register pairs.
1205 Register r1 = loc1.AsRegisterPairLow<Register>();
1206 Register r2 = loc2.AsRegisterPairLow<Register>();
1207 __ Move(TMP, r2);
1208 __ Move(r2, r1);
1209 __ Move(r1, TMP);
1210 r1 = loc1.AsRegisterPairHigh<Register>();
1211 r2 = loc2.AsRegisterPairHigh<Register>();
1212 __ Move(TMP, r2);
1213 __ Move(r2, r1);
1214 __ Move(r1, TMP);
1215 } else if ((loc1.IsRegisterPair() && loc2.IsFpuRegister()) ||
1216 (loc1.IsFpuRegister() && loc2.IsRegisterPair())) {
1217 // Swap FPR and GPR register pair.
1218 DCHECK_EQ(type, Primitive::kPrimDouble);
1219 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1220 : loc2.AsFpuRegister<FRegister>();
1221 Register r2_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1222 : loc2.AsRegisterPairLow<Register>();
1223 Register r2_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1224 : loc2.AsRegisterPairHigh<Register>();
1225 // Use 2 temporary registers because we can't first swap the low 32 bits of an FPR and
1226 // then swap the high 32 bits of the same FPR. mtc1 makes the high 32 bits of an FPR
1227 // unpredictable and the following mfch1 will fail.
1228 __ Mfc1(TMP, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001229 __ MoveFromFpuHigh(AT, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001230 __ Mtc1(r2_l, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001231 __ MoveToFpuHigh(r2_h, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001232 __ Move(r2_l, TMP);
1233 __ Move(r2_h, AT);
1234 } else if (loc1.IsStackSlot() && loc2.IsStackSlot()) {
1235 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ false);
1236 } else if (loc1.IsDoubleStackSlot() && loc2.IsDoubleStackSlot()) {
1237 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ true);
David Brazdilcc0f3112016-01-28 17:14:52 +00001238 } else if ((loc1.IsRegister() && loc2.IsStackSlot()) ||
1239 (loc1.IsStackSlot() && loc2.IsRegister())) {
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001240 Register reg = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
1241 intptr_t offset = loc1.IsStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001242 __ Move(TMP, reg);
1243 __ LoadFromOffset(kLoadWord, reg, SP, offset);
1244 __ StoreToOffset(kStoreWord, TMP, SP, offset);
1245 } else if ((loc1.IsRegisterPair() && loc2.IsDoubleStackSlot()) ||
1246 (loc1.IsDoubleStackSlot() && loc2.IsRegisterPair())) {
1247 Register reg_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1248 : loc2.AsRegisterPairLow<Register>();
1249 Register reg_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1250 : loc2.AsRegisterPairHigh<Register>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001251 intptr_t offset_l = loc1.IsDoubleStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001252 intptr_t offset_h = loc1.IsDoubleStackSlot() ? loc1.GetHighStackIndex(kMipsWordSize)
1253 : loc2.GetHighStackIndex(kMipsWordSize);
1254 __ Move(TMP, reg_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001255 __ LoadFromOffset(kLoadWord, reg_l, SP, offset_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001256 __ StoreToOffset(kStoreWord, TMP, SP, offset_l);
David Brazdil04d3e872016-01-29 09:50:09 +00001257 __ Move(TMP, reg_h);
1258 __ LoadFromOffset(kLoadWord, reg_h, SP, offset_h);
1259 __ StoreToOffset(kStoreWord, TMP, SP, offset_h);
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001260 } else if (loc1.IsFpuRegister() || loc2.IsFpuRegister()) {
1261 FRegister reg = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1262 : loc2.AsFpuRegister<FRegister>();
1263 intptr_t offset = loc1.IsFpuRegister() ? loc2.GetStackIndex() : loc1.GetStackIndex();
1264 if (type == Primitive::kPrimFloat) {
1265 __ MovS(FTMP, reg);
1266 __ LoadSFromOffset(reg, SP, offset);
1267 __ StoreSToOffset(FTMP, SP, offset);
1268 } else {
1269 DCHECK_EQ(type, Primitive::kPrimDouble);
1270 __ MovD(FTMP, reg);
1271 __ LoadDFromOffset(reg, SP, offset);
1272 __ StoreDToOffset(FTMP, SP, offset);
1273 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001274 } else {
1275 LOG(FATAL) << "Swap between " << loc1 << " and " << loc2 << " is unsupported";
1276 }
1277}
1278
1279void ParallelMoveResolverMIPS::RestoreScratch(int reg) {
1280 __ Pop(static_cast<Register>(reg));
1281}
1282
1283void ParallelMoveResolverMIPS::SpillScratch(int reg) {
1284 __ Push(static_cast<Register>(reg));
1285}
1286
1287void ParallelMoveResolverMIPS::Exchange(int index1, int index2, bool double_slot) {
1288 // Allocate a scratch register other than TMP, if available.
1289 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1290 // automatically unspilled when the scratch scope object is destroyed).
1291 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1292 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
1293 int stack_offset = ensure_scratch.IsSpilled() ? kMipsWordSize : 0;
1294 for (int i = 0; i <= (double_slot ? 1 : 0); i++, stack_offset += kMipsWordSize) {
1295 __ LoadFromOffset(kLoadWord,
1296 Register(ensure_scratch.GetRegister()),
1297 SP,
1298 index1 + stack_offset);
1299 __ LoadFromOffset(kLoadWord,
1300 TMP,
1301 SP,
1302 index2 + stack_offset);
1303 __ StoreToOffset(kStoreWord,
1304 Register(ensure_scratch.GetRegister()),
1305 SP,
1306 index2 + stack_offset);
1307 __ StoreToOffset(kStoreWord, TMP, SP, index1 + stack_offset);
1308 }
1309}
1310
Alexey Frunze73296a72016-06-03 22:51:46 -07001311void CodeGeneratorMIPS::ComputeSpillMask() {
1312 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
1313 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
1314 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
1315 // If there're FPU callee-saved registers and there's an odd number of GPR callee-saved
1316 // registers, include the ZERO register to force alignment of FPU callee-saved registers
1317 // within the stack frame.
1318 if ((fpu_spill_mask_ != 0) && (POPCOUNT(core_spill_mask_) % 2 != 0)) {
1319 core_spill_mask_ |= (1 << ZERO);
1320 }
Alexey Frunze58320ce2016-08-30 21:40:46 -07001321}
1322
1323bool CodeGeneratorMIPS::HasAllocatedCalleeSaveRegisters() const {
Alexey Frunze06a46c42016-07-19 15:00:40 -07001324 // If RA is clobbered by PC-relative operations on R2 and it's the only spilled register
Alexey Frunze58320ce2016-08-30 21:40:46 -07001325 // (this can happen in leaf methods), force CodeGenerator::InitializeCodeGeneration()
1326 // into the path that creates a stack frame so that RA can be explicitly saved and restored.
1327 // RA can't otherwise be saved/restored when it's the only spilled register.
Alexey Frunze58320ce2016-08-30 21:40:46 -07001328 return CodeGenerator::HasAllocatedCalleeSaveRegisters() || clobbered_ra_;
Alexey Frunze73296a72016-06-03 22:51:46 -07001329}
1330
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001331static dwarf::Reg DWARFReg(Register reg) {
1332 return dwarf::Reg::MipsCore(static_cast<int>(reg));
1333}
1334
1335// TODO: mapping of floating-point registers to DWARF.
1336
1337void CodeGeneratorMIPS::GenerateFrameEntry() {
1338 __ Bind(&frame_entry_label_);
1339
1340 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips) || !IsLeafMethod();
1341
1342 if (do_overflow_check) {
1343 __ LoadFromOffset(kLoadWord,
1344 ZERO,
1345 SP,
1346 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips)));
1347 RecordPcInfo(nullptr, 0);
1348 }
1349
1350 if (HasEmptyFrame()) {
Alexey Frunze58320ce2016-08-30 21:40:46 -07001351 CHECK_EQ(fpu_spill_mask_, 0u);
1352 CHECK_EQ(core_spill_mask_, 1u << RA);
1353 CHECK(!clobbered_ra_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001354 return;
1355 }
1356
1357 // Make sure the frame size isn't unreasonably large.
1358 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips)) {
1359 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips) << " bytes";
1360 }
1361
1362 // Spill callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001363
Alexey Frunze73296a72016-06-03 22:51:46 -07001364 uint32_t ofs = GetFrameSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001365 __ IncreaseFrameSize(ofs);
1366
Alexey Frunze73296a72016-06-03 22:51:46 -07001367 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1368 Register reg = static_cast<Register>(MostSignificantBit(mask));
1369 mask ^= 1u << reg;
1370 ofs -= kMipsWordSize;
1371 // The ZERO register is only included for alignment.
1372 if (reg != ZERO) {
1373 __ StoreToOffset(kStoreWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001374 __ cfi().RelOffset(DWARFReg(reg), ofs);
1375 }
1376 }
1377
Alexey Frunze73296a72016-06-03 22:51:46 -07001378 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1379 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1380 mask ^= 1u << reg;
1381 ofs -= kMipsDoublewordSize;
1382 __ StoreDToOffset(reg, SP, ofs);
1383 // TODO: __ cfi().RelOffset(DWARFReg(reg), ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001384 }
1385
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001386 // Save the current method if we need it. Note that we do not
1387 // do this in HCurrentMethod, as the instruction might have been removed
1388 // in the SSA graph.
1389 if (RequiresCurrentMethod()) {
1390 __ StoreToOffset(kStoreWord, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
1391 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001392
1393 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1394 // Initialize should deoptimize flag to 0.
1395 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1396 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001397}
1398
1399void CodeGeneratorMIPS::GenerateFrameExit() {
1400 __ cfi().RememberState();
1401
1402 if (!HasEmptyFrame()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001403 // Restore callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001404
Alexey Frunze73296a72016-06-03 22:51:46 -07001405 // For better instruction scheduling restore RA before other registers.
1406 uint32_t ofs = GetFrameSize();
1407 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1408 Register reg = static_cast<Register>(MostSignificantBit(mask));
1409 mask ^= 1u << reg;
1410 ofs -= kMipsWordSize;
1411 // The ZERO register is only included for alignment.
1412 if (reg != ZERO) {
1413 __ LoadFromOffset(kLoadWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001414 __ cfi().Restore(DWARFReg(reg));
1415 }
1416 }
1417
Alexey Frunze73296a72016-06-03 22:51:46 -07001418 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1419 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1420 mask ^= 1u << reg;
1421 ofs -= kMipsDoublewordSize;
1422 __ LoadDFromOffset(reg, SP, ofs);
1423 // TODO: __ cfi().Restore(DWARFReg(reg));
1424 }
1425
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001426 size_t frame_size = GetFrameSize();
1427 // Adjust the stack pointer in the delay slot if doing so doesn't break CFI.
1428 bool exchange = IsInt<16>(static_cast<int32_t>(frame_size));
1429 bool reordering = __ SetReorder(false);
1430 if (exchange) {
1431 __ Jr(RA);
1432 __ DecreaseFrameSize(frame_size); // Single instruction in delay slot.
1433 } else {
1434 __ DecreaseFrameSize(frame_size);
1435 __ Jr(RA);
1436 __ Nop(); // In delay slot.
1437 }
1438 __ SetReorder(reordering);
1439 } else {
1440 __ Jr(RA);
1441 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001442 }
1443
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001444 __ cfi().RestoreState();
1445 __ cfi().DefCFAOffset(GetFrameSize());
1446}
1447
1448void CodeGeneratorMIPS::Bind(HBasicBlock* block) {
1449 __ Bind(GetLabelOf(block));
1450}
1451
Lena Djokicca8c2952017-05-29 11:31:46 +02001452VectorRegister VectorRegisterFrom(Location location) {
1453 DCHECK(location.IsFpuRegister());
1454 return static_cast<VectorRegister>(location.AsFpuRegister<FRegister>());
1455}
1456
Lena Djokic8098da92017-06-28 12:07:50 +02001457void CodeGeneratorMIPS::MoveLocation(Location destination,
1458 Location source,
1459 Primitive::Type dst_type) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001460 if (source.Equals(destination)) {
1461 return;
1462 }
1463
Lena Djokic8098da92017-06-28 12:07:50 +02001464 if (source.IsConstant()) {
1465 MoveConstant(destination, source.GetConstant());
1466 } else {
1467 if (destination.IsRegister()) {
1468 if (source.IsRegister()) {
1469 __ Move(destination.AsRegister<Register>(), source.AsRegister<Register>());
1470 } else if (source.IsFpuRegister()) {
1471 __ Mfc1(destination.AsRegister<Register>(), source.AsFpuRegister<FRegister>());
1472 } else {
1473 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001474 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
Lena Djokic8098da92017-06-28 12:07:50 +02001475 }
1476 } else if (destination.IsRegisterPair()) {
1477 if (source.IsRegisterPair()) {
1478 __ Move(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
1479 __ Move(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
1480 } else if (source.IsFpuRegister()) {
1481 Register dst_high = destination.AsRegisterPairHigh<Register>();
1482 Register dst_low = destination.AsRegisterPairLow<Register>();
1483 FRegister src = source.AsFpuRegister<FRegister>();
1484 __ Mfc1(dst_low, src);
1485 __ MoveFromFpuHigh(dst_high, src);
1486 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001487 DCHECK(source.IsDoubleStackSlot())
1488 << "Cannot move from " << source << " to " << destination;
Lena Djokic8098da92017-06-28 12:07:50 +02001489 int32_t off = source.GetStackIndex();
1490 Register r = destination.AsRegisterPairLow<Register>();
1491 __ LoadFromOffset(kLoadDoubleword, r, SP, off);
1492 }
1493 } else if (destination.IsFpuRegister()) {
1494 if (source.IsRegister()) {
1495 DCHECK(!Primitive::Is64BitType(dst_type));
1496 __ Mtc1(source.AsRegister<Register>(), destination.AsFpuRegister<FRegister>());
1497 } else if (source.IsRegisterPair()) {
1498 DCHECK(Primitive::Is64BitType(dst_type));
1499 FRegister dst = destination.AsFpuRegister<FRegister>();
1500 Register src_high = source.AsRegisterPairHigh<Register>();
1501 Register src_low = source.AsRegisterPairLow<Register>();
1502 __ Mtc1(src_low, dst);
1503 __ MoveToFpuHigh(src_high, dst);
1504 } else if (source.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001505 if (GetGraph()->HasSIMD()) {
1506 __ MoveV(VectorRegisterFrom(destination),
1507 VectorRegisterFrom(source));
Lena Djokic8098da92017-06-28 12:07:50 +02001508 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001509 if (Primitive::Is64BitType(dst_type)) {
1510 __ MovD(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1511 } else {
1512 DCHECK_EQ(dst_type, Primitive::kPrimFloat);
1513 __ MovS(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1514 }
Lena Djokic8098da92017-06-28 12:07:50 +02001515 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001516 } else if (source.IsSIMDStackSlot()) {
1517 __ LoadQFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
Lena Djokic8098da92017-06-28 12:07:50 +02001518 } else if (source.IsDoubleStackSlot()) {
1519 DCHECK(Primitive::Is64BitType(dst_type));
1520 __ LoadDFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1521 } else {
1522 DCHECK(!Primitive::Is64BitType(dst_type));
1523 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1524 __ LoadSFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1525 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001526 } else if (destination.IsSIMDStackSlot()) {
1527 if (source.IsFpuRegister()) {
1528 __ StoreQToOffset(source.AsFpuRegister<FRegister>(), SP, destination.GetStackIndex());
1529 } else {
1530 DCHECK(source.IsSIMDStackSlot());
1531 __ LoadQFromOffset(FTMP, SP, source.GetStackIndex());
1532 __ StoreQToOffset(FTMP, SP, destination.GetStackIndex());
1533 }
Lena Djokic8098da92017-06-28 12:07:50 +02001534 } else if (destination.IsDoubleStackSlot()) {
1535 int32_t dst_offset = destination.GetStackIndex();
1536 if (source.IsRegisterPair()) {
1537 __ StoreToOffset(kStoreDoubleword, source.AsRegisterPairLow<Register>(), SP, dst_offset);
1538 } else if (source.IsFpuRegister()) {
1539 __ StoreDToOffset(source.AsFpuRegister<FRegister>(), SP, dst_offset);
1540 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001541 DCHECK(source.IsDoubleStackSlot())
1542 << "Cannot move from " << source << " to " << destination;
Lena Djokic8098da92017-06-28 12:07:50 +02001543 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1544 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset);
1545 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex() + 4);
1546 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset + 4);
1547 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001548 } else {
Lena Djokic8098da92017-06-28 12:07:50 +02001549 DCHECK(destination.IsStackSlot()) << destination;
1550 int32_t dst_offset = destination.GetStackIndex();
1551 if (source.IsRegister()) {
1552 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, dst_offset);
1553 } else if (source.IsFpuRegister()) {
1554 __ StoreSToOffset(source.AsFpuRegister<FRegister>(), SP, dst_offset);
1555 } else {
1556 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1557 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1558 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset);
1559 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001560 }
1561 }
1562}
1563
1564void CodeGeneratorMIPS::MoveConstant(Location destination, HConstant* c) {
1565 if (c->IsIntConstant() || c->IsNullConstant()) {
1566 // Move 32 bit constant.
1567 int32_t value = GetInt32ValueOf(c);
1568 if (destination.IsRegister()) {
1569 Register dst = destination.AsRegister<Register>();
1570 __ LoadConst32(dst, value);
1571 } else {
1572 DCHECK(destination.IsStackSlot())
1573 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001574 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001575 }
1576 } else if (c->IsLongConstant()) {
1577 // Move 64 bit constant.
1578 int64_t value = GetInt64ValueOf(c);
1579 if (destination.IsRegisterPair()) {
1580 Register r_h = destination.AsRegisterPairHigh<Register>();
1581 Register r_l = destination.AsRegisterPairLow<Register>();
1582 __ LoadConst64(r_h, r_l, value);
1583 } else {
1584 DCHECK(destination.IsDoubleStackSlot())
1585 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001586 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001587 }
1588 } else if (c->IsFloatConstant()) {
1589 // Move 32 bit float constant.
1590 int32_t value = GetInt32ValueOf(c);
1591 if (destination.IsFpuRegister()) {
1592 __ LoadSConst32(destination.AsFpuRegister<FRegister>(), value, TMP);
1593 } else {
1594 DCHECK(destination.IsStackSlot())
1595 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001596 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001597 }
1598 } else {
1599 // Move 64 bit double constant.
1600 DCHECK(c->IsDoubleConstant()) << c->DebugName();
1601 int64_t value = GetInt64ValueOf(c);
1602 if (destination.IsFpuRegister()) {
1603 FRegister fd = destination.AsFpuRegister<FRegister>();
1604 __ LoadDConst64(fd, value, TMP);
1605 } else {
1606 DCHECK(destination.IsDoubleStackSlot())
1607 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001608 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001609 }
1610 }
1611}
1612
1613void CodeGeneratorMIPS::MoveConstant(Location destination, int32_t value) {
1614 DCHECK(destination.IsRegister());
1615 Register dst = destination.AsRegister<Register>();
1616 __ LoadConst32(dst, value);
1617}
1618
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001619void CodeGeneratorMIPS::AddLocationAsTemp(Location location, LocationSummary* locations) {
1620 if (location.IsRegister()) {
1621 locations->AddTemp(location);
Alexey Frunzec9e94f32015-10-26 16:11:39 -07001622 } else if (location.IsRegisterPair()) {
1623 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1624 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001625 } else {
1626 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1627 }
1628}
1629
Vladimir Markoaad75c62016-10-03 08:46:48 +00001630template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1631inline void CodeGeneratorMIPS::EmitPcRelativeLinkerPatches(
1632 const ArenaDeque<PcRelativePatchInfo>& infos,
1633 ArenaVector<LinkerPatch>* linker_patches) {
1634 for (const PcRelativePatchInfo& info : infos) {
1635 const DexFile& dex_file = info.target_dex_file;
1636 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001637 DCHECK(info.label.IsBound());
1638 uint32_t literal_offset = __ GetLabelLocation(&info.label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001639 // On R2 we use HMipsComputeBaseMethodAddress and patch relative to
1640 // the assembler's base label used for PC-relative addressing.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001641 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1642 uint32_t pc_rel_offset = info_high.pc_rel_label.IsBound()
1643 ? __ GetLabelLocation(&info_high.pc_rel_label)
Vladimir Markoaad75c62016-10-03 08:46:48 +00001644 : __ GetPcRelBaseLabelLocation();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001645 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001646 }
1647}
1648
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001649void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1650 DCHECK(linker_patches->empty());
1651 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001652 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001653 method_bss_entry_patches_.size() +
Alexey Frunze06a46c42016-07-19 15:00:40 -07001654 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001655 type_bss_entry_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001656 pc_relative_string_patches_.size() +
1657 string_bss_entry_patches_.size();
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001658 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001659 if (GetCompilerOptions().IsBootImage()) {
1660 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
Vladimir Markoaad75c62016-10-03 08:46:48 +00001661 linker_patches);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001662 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
1663 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001664 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
1665 linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001666 } else {
1667 DCHECK(pc_relative_method_patches_.empty());
Vladimir Marko94ec2db2017-09-06 17:21:03 +01001668 EmitPcRelativeLinkerPatches<LinkerPatch::TypeClassTablePatch>(pc_relative_type_patches_,
1669 linker_patches);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001670 EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
1671 linker_patches);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001672 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001673 EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
1674 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001675 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1676 linker_patches);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001677 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
1678 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001679 DCHECK_EQ(size, linker_patches->size());
Alexey Frunze06a46c42016-07-19 15:00:40 -07001680}
1681
Vladimir Marko65979462017-05-19 17:25:12 +01001682CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001683 MethodReference target_method,
1684 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001685 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001686 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001687 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001688 &pc_relative_method_patches_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001689}
1690
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001691CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001692 MethodReference target_method,
1693 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001694 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001695 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001696 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001697 &method_bss_entry_patches_);
1698}
1699
Alexey Frunze06a46c42016-07-19 15:00:40 -07001700CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001701 const DexFile& dex_file,
1702 dex::TypeIndex type_index,
1703 const PcRelativePatchInfo* info_high) {
1704 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001705}
1706
Vladimir Marko1998cd02017-01-13 13:02:58 +00001707CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001708 const DexFile& dex_file,
1709 dex::TypeIndex type_index,
1710 const PcRelativePatchInfo* info_high) {
1711 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001712}
1713
Vladimir Marko65979462017-05-19 17:25:12 +01001714CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001715 const DexFile& dex_file,
1716 dex::StringIndex string_index,
1717 const PcRelativePatchInfo* info_high) {
1718 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001719}
1720
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001721CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewStringBssEntryPatch(
1722 const DexFile& dex_file,
1723 dex::StringIndex string_index,
1724 const PcRelativePatchInfo* info_high) {
1725 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
1726}
1727
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001728CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001729 const DexFile& dex_file,
1730 uint32_t offset_or_index,
1731 const PcRelativePatchInfo* info_high,
1732 ArenaDeque<PcRelativePatchInfo>* patches) {
1733 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001734 return &patches->back();
1735}
1736
Alexey Frunze06a46c42016-07-19 15:00:40 -07001737Literal* CodeGeneratorMIPS::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1738 return map->GetOrCreate(
1739 value,
1740 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1741}
1742
Alexey Frunze06a46c42016-07-19 15:00:40 -07001743Literal* CodeGeneratorMIPS::DeduplicateBootImageAddressLiteral(uint32_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001744 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001745}
1746
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001747void CodeGeneratorMIPS::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001748 Register out,
Alexey Frunzea663d9d2017-07-31 18:43:18 -07001749 Register base) {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001750 DCHECK(!info_high->patch_info_high);
Alexey Frunze6079dca2017-05-28 19:10:28 -07001751 DCHECK_NE(out, base);
Alexey Frunzea663d9d2017-07-31 18:43:18 -07001752 bool reordering = __ SetReorder(false);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001753 if (GetInstructionSetFeatures().IsR6()) {
1754 DCHECK_EQ(base, ZERO);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001755 __ Bind(&info_high->label);
1756 __ Bind(&info_high->pc_rel_label);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001757 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001758 __ Auipc(out, /* placeholder */ 0x1234);
Alexey Frunzea663d9d2017-07-31 18:43:18 -07001759 __ SetReorder(reordering);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001760 } else {
1761 // If base is ZERO, emit NAL to obtain the actual base.
1762 if (base == ZERO) {
1763 // Generate a dummy PC-relative call to obtain PC.
1764 __ Nal();
1765 }
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001766 __ Bind(&info_high->label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001767 __ Lui(out, /* placeholder */ 0x1234);
1768 // If we emitted the NAL, bind the pc_rel_label, otherwise base is a register holding
1769 // the HMipsComputeBaseMethodAddress which has its own label stored in MipsAssembler.
1770 if (base == ZERO) {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001771 __ Bind(&info_high->pc_rel_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001772 }
Alexey Frunzea663d9d2017-07-31 18:43:18 -07001773 __ SetReorder(reordering);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001774 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001775 __ Addu(out, out, (base == ZERO) ? RA : base);
1776 }
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001777 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001778 // offset to `out` (e.g. lw, jialc, addiu).
Vladimir Markoaad75c62016-10-03 08:46:48 +00001779}
1780
Alexey Frunze627c1a02017-01-30 19:28:14 -08001781CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootStringPatch(
1782 const DexFile& dex_file,
1783 dex::StringIndex dex_index,
1784 Handle<mirror::String> handle) {
1785 jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index),
1786 reinterpret_cast64<uint64_t>(handle.GetReference()));
1787 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
1788 return &jit_string_patches_.back();
1789}
1790
1791CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootClassPatch(
1792 const DexFile& dex_file,
1793 dex::TypeIndex dex_index,
1794 Handle<mirror::Class> handle) {
1795 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index),
1796 reinterpret_cast64<uint64_t>(handle.GetReference()));
1797 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
1798 return &jit_class_patches_.back();
1799}
1800
1801void CodeGeneratorMIPS::PatchJitRootUse(uint8_t* code,
1802 const uint8_t* roots_data,
1803 const CodeGeneratorMIPS::JitPatchInfo& info,
1804 uint64_t index_in_table) const {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001805 uint32_t high_literal_offset = GetAssembler().GetLabelLocation(&info.high_label);
1806 uint32_t low_literal_offset = GetAssembler().GetLabelLocation(&info.low_label);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001807 uintptr_t address =
1808 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1809 uint32_t addr32 = dchecked_integral_cast<uint32_t>(address);
1810 // lui reg, addr32_high
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001811 DCHECK_EQ(code[high_literal_offset + 0], 0x34);
1812 DCHECK_EQ(code[high_literal_offset + 1], 0x12);
1813 DCHECK_EQ((code[high_literal_offset + 2] & 0xE0), 0x00);
1814 DCHECK_EQ(code[high_literal_offset + 3], 0x3C);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001815 // instr reg, reg, addr32_low
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001816 DCHECK_EQ(code[low_literal_offset + 0], 0x78);
1817 DCHECK_EQ(code[low_literal_offset + 1], 0x56);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001818 addr32 += (addr32 & 0x8000) << 1; // Account for sign extension in "instr reg, reg, addr32_low".
Alexey Frunze627c1a02017-01-30 19:28:14 -08001819 // lui reg, addr32_high
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001820 code[high_literal_offset + 0] = static_cast<uint8_t>(addr32 >> 16);
1821 code[high_literal_offset + 1] = static_cast<uint8_t>(addr32 >> 24);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001822 // instr reg, reg, addr32_low
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001823 code[low_literal_offset + 0] = static_cast<uint8_t>(addr32 >> 0);
1824 code[low_literal_offset + 1] = static_cast<uint8_t>(addr32 >> 8);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001825}
1826
1827void CodeGeneratorMIPS::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1828 for (const JitPatchInfo& info : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001829 const auto it = jit_string_roots_.find(StringReference(&info.target_dex_file,
1830 dex::StringIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001831 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001832 uint64_t index_in_table = it->second;
1833 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001834 }
1835 for (const JitPatchInfo& info : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001836 const auto it = jit_class_roots_.find(TypeReference(&info.target_dex_file,
1837 dex::TypeIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001838 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001839 uint64_t index_in_table = it->second;
1840 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001841 }
1842}
1843
Goran Jakovljevice114da22016-12-26 14:21:43 +01001844void CodeGeneratorMIPS::MarkGCCard(Register object,
1845 Register value,
1846 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001847 MipsLabel done;
1848 Register card = AT;
1849 Register temp = TMP;
Goran Jakovljevice114da22016-12-26 14:21:43 +01001850 if (value_can_be_null) {
1851 __ Beqz(value, &done);
1852 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001853 __ LoadFromOffset(kLoadWord,
1854 card,
1855 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001856 Thread::CardTableOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001857 __ Srl(temp, object, gc::accounting::CardTable::kCardShift);
1858 __ Addu(temp, card, temp);
1859 __ Sb(card, temp, 0);
Goran Jakovljevice114da22016-12-26 14:21:43 +01001860 if (value_can_be_null) {
1861 __ Bind(&done);
1862 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001863}
1864
David Brazdil58282f42016-01-14 12:45:10 +00001865void CodeGeneratorMIPS::SetupBlockedRegisters() const {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001866 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1867 blocked_core_registers_[ZERO] = true;
1868 blocked_core_registers_[K0] = true;
1869 blocked_core_registers_[K1] = true;
1870 blocked_core_registers_[GP] = true;
1871 blocked_core_registers_[SP] = true;
1872 blocked_core_registers_[RA] = true;
1873
1874 // AT and TMP(T8) are used as temporary/scratch registers
1875 // (similar to how AT is used by MIPS assemblers).
1876 blocked_core_registers_[AT] = true;
1877 blocked_core_registers_[TMP] = true;
1878 blocked_fpu_registers_[FTMP] = true;
1879
1880 // Reserve suspend and thread registers.
1881 blocked_core_registers_[S0] = true;
1882 blocked_core_registers_[TR] = true;
1883
1884 // Reserve T9 for function calls
1885 blocked_core_registers_[T9] = true;
1886
1887 // Reserve odd-numbered FPU registers.
1888 for (size_t i = 1; i < kNumberOfFRegisters; i += 2) {
1889 blocked_fpu_registers_[i] = true;
1890 }
1891
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02001892 if (GetGraph()->IsDebuggable()) {
1893 // Stubs do not save callee-save floating point registers. If the graph
1894 // is debuggable, we need to deal with these registers differently. For
1895 // now, just block them.
1896 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1897 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1898 }
1899 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001900}
1901
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001902size_t CodeGeneratorMIPS::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1903 __ StoreToOffset(kStoreWord, Register(reg_id), SP, stack_index);
1904 return kMipsWordSize;
1905}
1906
1907size_t CodeGeneratorMIPS::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1908 __ LoadFromOffset(kLoadWord, Register(reg_id), SP, stack_index);
1909 return kMipsWordSize;
1910}
1911
1912size_t CodeGeneratorMIPS::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001913 if (GetGraph()->HasSIMD()) {
1914 __ StoreQToOffset(FRegister(reg_id), SP, stack_index);
1915 } else {
1916 __ StoreDToOffset(FRegister(reg_id), SP, stack_index);
1917 }
1918 return GetFloatingPointSpillSlotSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001919}
1920
1921size_t CodeGeneratorMIPS::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001922 if (GetGraph()->HasSIMD()) {
1923 __ LoadQFromOffset(FRegister(reg_id), SP, stack_index);
1924 } else {
1925 __ LoadDFromOffset(FRegister(reg_id), SP, stack_index);
1926 }
1927 return GetFloatingPointSpillSlotSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001928}
1929
1930void CodeGeneratorMIPS::DumpCoreRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001931 stream << Register(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001932}
1933
1934void CodeGeneratorMIPS::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001935 stream << FRegister(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001936}
1937
Serban Constantinescufca16662016-07-14 09:21:59 +01001938constexpr size_t kMipsDirectEntrypointRuntimeOffset = 16;
1939
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001940void CodeGeneratorMIPS::InvokeRuntime(QuickEntrypointEnum entrypoint,
1941 HInstruction* instruction,
1942 uint32_t dex_pc,
1943 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001944 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001945 GenerateInvokeRuntime(GetThreadOffset<kMipsPointerSize>(entrypoint).Int32Value(),
1946 IsDirectEntrypoint(entrypoint));
1947 if (EntrypointRequiresStackMap(entrypoint)) {
1948 RecordPcInfo(instruction, dex_pc, slow_path);
1949 }
1950}
1951
1952void CodeGeneratorMIPS::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1953 HInstruction* instruction,
1954 SlowPathCode* slow_path,
1955 bool direct) {
1956 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1957 GenerateInvokeRuntime(entry_point_offset, direct);
1958}
1959
1960void CodeGeneratorMIPS::GenerateInvokeRuntime(int32_t entry_point_offset, bool direct) {
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001961 bool reordering = __ SetReorder(false);
Alexey Frunze15958152017-02-09 19:08:30 -08001962 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001963 __ Jalr(T9);
Alexey Frunze15958152017-02-09 19:08:30 -08001964 if (direct) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001965 // Reserve argument space on stack (for $a0-$a3) for
1966 // entrypoints that directly reference native implementations.
1967 // Called function may use this space to store $a0-$a3 regs.
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001968 __ IncreaseFrameSize(kMipsDirectEntrypointRuntimeOffset); // Single instruction in delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001969 __ DecreaseFrameSize(kMipsDirectEntrypointRuntimeOffset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001970 } else {
1971 __ Nop(); // In delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001972 }
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001973 __ SetReorder(reordering);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001974}
1975
1976void InstructionCodeGeneratorMIPS::GenerateClassInitializationCheck(SlowPathCodeMIPS* slow_path,
1977 Register class_reg) {
1978 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1979 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1980 __ Blt(TMP, AT, slow_path->GetEntryLabel());
1981 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1982 __ Sync(0);
1983 __ Bind(slow_path->GetExitLabel());
1984}
1985
1986void InstructionCodeGeneratorMIPS::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1987 __ Sync(0); // Only stype 0 is supported.
1988}
1989
1990void InstructionCodeGeneratorMIPS::GenerateSuspendCheck(HSuspendCheck* instruction,
1991 HBasicBlock* successor) {
1992 SuspendCheckSlowPathMIPS* slow_path =
1993 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS(instruction, successor);
1994 codegen_->AddSlowPath(slow_path);
1995
1996 __ LoadFromOffset(kLoadUnsignedHalfword,
1997 TMP,
1998 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001999 Thread::ThreadFlagsOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002000 if (successor == nullptr) {
2001 __ Bnez(TMP, slow_path->GetEntryLabel());
2002 __ Bind(slow_path->GetReturnLabel());
2003 } else {
2004 __ Beqz(TMP, codegen_->GetLabelOf(successor));
2005 __ B(slow_path->GetEntryLabel());
2006 // slow_path will return to GetLabelOf(successor).
2007 }
2008}
2009
2010InstructionCodeGeneratorMIPS::InstructionCodeGeneratorMIPS(HGraph* graph,
2011 CodeGeneratorMIPS* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08002012 : InstructionCodeGenerator(graph, codegen),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002013 assembler_(codegen->GetAssembler()),
2014 codegen_(codegen) {}
2015
2016void LocationsBuilderMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
2017 DCHECK_EQ(instruction->InputCount(), 2U);
2018 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2019 Primitive::Type type = instruction->GetResultType();
2020 switch (type) {
2021 case Primitive::kPrimInt: {
2022 locations->SetInAt(0, Location::RequiresRegister());
2023 HInstruction* right = instruction->InputAt(1);
2024 bool can_use_imm = false;
2025 if (right->IsConstant()) {
2026 int32_t imm = CodeGenerator::GetInt32ValueOf(right->AsConstant());
2027 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
2028 can_use_imm = IsUint<16>(imm);
2029 } else if (instruction->IsAdd()) {
2030 can_use_imm = IsInt<16>(imm);
2031 } else {
2032 DCHECK(instruction->IsSub());
2033 can_use_imm = IsInt<16>(-imm);
2034 }
2035 }
2036 if (can_use_imm)
2037 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
2038 else
2039 locations->SetInAt(1, Location::RequiresRegister());
2040 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2041 break;
2042 }
2043
2044 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002045 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002046 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2047 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002048 break;
2049 }
2050
2051 case Primitive::kPrimFloat:
2052 case Primitive::kPrimDouble:
2053 DCHECK(instruction->IsAdd() || instruction->IsSub());
2054 locations->SetInAt(0, Location::RequiresFpuRegister());
2055 locations->SetInAt(1, Location::RequiresFpuRegister());
2056 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2057 break;
2058
2059 default:
2060 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
2061 }
2062}
2063
2064void InstructionCodeGeneratorMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
2065 Primitive::Type type = instruction->GetType();
2066 LocationSummary* locations = instruction->GetLocations();
2067
2068 switch (type) {
2069 case Primitive::kPrimInt: {
2070 Register dst = locations->Out().AsRegister<Register>();
2071 Register lhs = locations->InAt(0).AsRegister<Register>();
2072 Location rhs_location = locations->InAt(1);
2073
2074 Register rhs_reg = ZERO;
2075 int32_t rhs_imm = 0;
2076 bool use_imm = rhs_location.IsConstant();
2077 if (use_imm) {
2078 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2079 } else {
2080 rhs_reg = rhs_location.AsRegister<Register>();
2081 }
2082
2083 if (instruction->IsAnd()) {
2084 if (use_imm)
2085 __ Andi(dst, lhs, rhs_imm);
2086 else
2087 __ And(dst, lhs, rhs_reg);
2088 } else if (instruction->IsOr()) {
2089 if (use_imm)
2090 __ Ori(dst, lhs, rhs_imm);
2091 else
2092 __ Or(dst, lhs, rhs_reg);
2093 } else if (instruction->IsXor()) {
2094 if (use_imm)
2095 __ Xori(dst, lhs, rhs_imm);
2096 else
2097 __ Xor(dst, lhs, rhs_reg);
2098 } else if (instruction->IsAdd()) {
2099 if (use_imm)
2100 __ Addiu(dst, lhs, rhs_imm);
2101 else
2102 __ Addu(dst, lhs, rhs_reg);
2103 } else {
2104 DCHECK(instruction->IsSub());
2105 if (use_imm)
2106 __ Addiu(dst, lhs, -rhs_imm);
2107 else
2108 __ Subu(dst, lhs, rhs_reg);
2109 }
2110 break;
2111 }
2112
2113 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002114 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2115 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2116 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2117 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002118 Location rhs_location = locations->InAt(1);
2119 bool use_imm = rhs_location.IsConstant();
2120 if (!use_imm) {
2121 Register rhs_high = rhs_location.AsRegisterPairHigh<Register>();
2122 Register rhs_low = rhs_location.AsRegisterPairLow<Register>();
2123 if (instruction->IsAnd()) {
2124 __ And(dst_low, lhs_low, rhs_low);
2125 __ And(dst_high, lhs_high, rhs_high);
2126 } else if (instruction->IsOr()) {
2127 __ Or(dst_low, lhs_low, rhs_low);
2128 __ Or(dst_high, lhs_high, rhs_high);
2129 } else if (instruction->IsXor()) {
2130 __ Xor(dst_low, lhs_low, rhs_low);
2131 __ Xor(dst_high, lhs_high, rhs_high);
2132 } else if (instruction->IsAdd()) {
2133 if (lhs_low == rhs_low) {
2134 // Special case for lhs = rhs and the sum potentially overwriting both lhs and rhs.
2135 __ Slt(TMP, lhs_low, ZERO);
2136 __ Addu(dst_low, lhs_low, rhs_low);
2137 } else {
2138 __ Addu(dst_low, lhs_low, rhs_low);
2139 // If the sum overwrites rhs, lhs remains unchanged, otherwise rhs remains unchanged.
2140 __ Sltu(TMP, dst_low, (dst_low == rhs_low) ? lhs_low : rhs_low);
2141 }
2142 __ Addu(dst_high, lhs_high, rhs_high);
2143 __ Addu(dst_high, dst_high, TMP);
2144 } else {
2145 DCHECK(instruction->IsSub());
2146 __ Sltu(TMP, lhs_low, rhs_low);
2147 __ Subu(dst_low, lhs_low, rhs_low);
2148 __ Subu(dst_high, lhs_high, rhs_high);
2149 __ Subu(dst_high, dst_high, TMP);
2150 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002151 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002152 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
2153 if (instruction->IsOr()) {
2154 uint32_t low = Low32Bits(value);
2155 uint32_t high = High32Bits(value);
2156 if (IsUint<16>(low)) {
2157 if (dst_low != lhs_low || low != 0) {
2158 __ Ori(dst_low, lhs_low, low);
2159 }
2160 } else {
2161 __ LoadConst32(TMP, low);
2162 __ Or(dst_low, lhs_low, TMP);
2163 }
2164 if (IsUint<16>(high)) {
2165 if (dst_high != lhs_high || high != 0) {
2166 __ Ori(dst_high, lhs_high, high);
2167 }
2168 } else {
2169 if (high != low) {
2170 __ LoadConst32(TMP, high);
2171 }
2172 __ Or(dst_high, lhs_high, TMP);
2173 }
2174 } else if (instruction->IsXor()) {
2175 uint32_t low = Low32Bits(value);
2176 uint32_t high = High32Bits(value);
2177 if (IsUint<16>(low)) {
2178 if (dst_low != lhs_low || low != 0) {
2179 __ Xori(dst_low, lhs_low, low);
2180 }
2181 } else {
2182 __ LoadConst32(TMP, low);
2183 __ Xor(dst_low, lhs_low, TMP);
2184 }
2185 if (IsUint<16>(high)) {
2186 if (dst_high != lhs_high || high != 0) {
2187 __ Xori(dst_high, lhs_high, high);
2188 }
2189 } else {
2190 if (high != low) {
2191 __ LoadConst32(TMP, high);
2192 }
2193 __ Xor(dst_high, lhs_high, TMP);
2194 }
2195 } else if (instruction->IsAnd()) {
2196 uint32_t low = Low32Bits(value);
2197 uint32_t high = High32Bits(value);
2198 if (IsUint<16>(low)) {
2199 __ Andi(dst_low, lhs_low, low);
2200 } else if (low != 0xFFFFFFFF) {
2201 __ LoadConst32(TMP, low);
2202 __ And(dst_low, lhs_low, TMP);
2203 } else if (dst_low != lhs_low) {
2204 __ Move(dst_low, lhs_low);
2205 }
2206 if (IsUint<16>(high)) {
2207 __ Andi(dst_high, lhs_high, high);
2208 } else if (high != 0xFFFFFFFF) {
2209 if (high != low) {
2210 __ LoadConst32(TMP, high);
2211 }
2212 __ And(dst_high, lhs_high, TMP);
2213 } else if (dst_high != lhs_high) {
2214 __ Move(dst_high, lhs_high);
2215 }
2216 } else {
2217 if (instruction->IsSub()) {
2218 value = -value;
2219 } else {
2220 DCHECK(instruction->IsAdd());
2221 }
2222 int32_t low = Low32Bits(value);
2223 int32_t high = High32Bits(value);
2224 if (IsInt<16>(low)) {
2225 if (dst_low != lhs_low || low != 0) {
2226 __ Addiu(dst_low, lhs_low, low);
2227 }
2228 if (low != 0) {
2229 __ Sltiu(AT, dst_low, low);
2230 }
2231 } else {
2232 __ LoadConst32(TMP, low);
2233 __ Addu(dst_low, lhs_low, TMP);
2234 __ Sltu(AT, dst_low, TMP);
2235 }
2236 if (IsInt<16>(high)) {
2237 if (dst_high != lhs_high || high != 0) {
2238 __ Addiu(dst_high, lhs_high, high);
2239 }
2240 } else {
2241 if (high != low) {
2242 __ LoadConst32(TMP, high);
2243 }
2244 __ Addu(dst_high, lhs_high, TMP);
2245 }
2246 if (low != 0) {
2247 __ Addu(dst_high, dst_high, AT);
2248 }
2249 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002250 }
2251 break;
2252 }
2253
2254 case Primitive::kPrimFloat:
2255 case Primitive::kPrimDouble: {
2256 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
2257 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
2258 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
2259 if (instruction->IsAdd()) {
2260 if (type == Primitive::kPrimFloat) {
2261 __ AddS(dst, lhs, rhs);
2262 } else {
2263 __ AddD(dst, lhs, rhs);
2264 }
2265 } else {
2266 DCHECK(instruction->IsSub());
2267 if (type == Primitive::kPrimFloat) {
2268 __ SubS(dst, lhs, rhs);
2269 } else {
2270 __ SubD(dst, lhs, rhs);
2271 }
2272 }
2273 break;
2274 }
2275
2276 default:
2277 LOG(FATAL) << "Unexpected binary operation type " << type;
2278 }
2279}
2280
2281void LocationsBuilderMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002282 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002283
2284 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2285 Primitive::Type type = instr->GetResultType();
2286 switch (type) {
2287 case Primitive::kPrimInt:
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002288 locations->SetInAt(0, Location::RequiresRegister());
2289 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2290 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2291 break;
2292 case Primitive::kPrimLong:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002293 locations->SetInAt(0, Location::RequiresRegister());
2294 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2295 locations->SetOut(Location::RequiresRegister());
2296 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002297 default:
2298 LOG(FATAL) << "Unexpected shift type " << type;
2299 }
2300}
2301
2302static constexpr size_t kMipsBitsPerWord = kMipsWordSize * kBitsPerByte;
2303
2304void InstructionCodeGeneratorMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002305 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002306 LocationSummary* locations = instr->GetLocations();
2307 Primitive::Type type = instr->GetType();
2308
2309 Location rhs_location = locations->InAt(1);
2310 bool use_imm = rhs_location.IsConstant();
2311 Register rhs_reg = use_imm ? ZERO : rhs_location.AsRegister<Register>();
2312 int64_t rhs_imm = use_imm ? CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()) : 0;
Roland Levillain5b5b9312016-03-22 14:57:31 +00002313 const uint32_t shift_mask =
2314 (type == Primitive::kPrimInt) ? kMaxIntShiftDistance : kMaxLongShiftDistance;
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002315 const uint32_t shift_value = rhs_imm & shift_mask;
Alexey Frunze92d90602015-12-18 18:16:36 -08002316 // Are the INS (Insert Bit Field) and ROTR instructions supported?
2317 bool has_ins_rotr = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002318
2319 switch (type) {
2320 case Primitive::kPrimInt: {
2321 Register dst = locations->Out().AsRegister<Register>();
2322 Register lhs = locations->InAt(0).AsRegister<Register>();
2323 if (use_imm) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002324 if (shift_value == 0) {
2325 if (dst != lhs) {
2326 __ Move(dst, lhs);
2327 }
2328 } else if (instr->IsShl()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002329 __ Sll(dst, lhs, shift_value);
2330 } else if (instr->IsShr()) {
2331 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002332 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002333 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002334 } else {
2335 if (has_ins_rotr) {
2336 __ Rotr(dst, lhs, shift_value);
2337 } else {
2338 __ Sll(TMP, lhs, (kMipsBitsPerWord - shift_value) & shift_mask);
2339 __ Srl(dst, lhs, shift_value);
2340 __ Or(dst, dst, TMP);
2341 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002342 }
2343 } else {
2344 if (instr->IsShl()) {
2345 __ Sllv(dst, lhs, rhs_reg);
2346 } else if (instr->IsShr()) {
2347 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002348 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002349 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002350 } else {
2351 if (has_ins_rotr) {
2352 __ Rotrv(dst, lhs, rhs_reg);
2353 } else {
2354 __ Subu(TMP, ZERO, rhs_reg);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002355 // 32-bit shift instructions use the 5 least significant bits of the shift count, so
2356 // shifting by `-rhs_reg` is equivalent to shifting by `(32 - rhs_reg) & 31`. The case
2357 // when `rhs_reg & 31 == 0` is OK even though we don't shift `lhs` left all the way out
2358 // by 32, because the result in this case is computed as `(lhs >> 0) | (lhs << 0)`,
2359 // IOW, the OR'd values are equal.
Alexey Frunze92d90602015-12-18 18:16:36 -08002360 __ Sllv(TMP, lhs, TMP);
2361 __ Srlv(dst, lhs, rhs_reg);
2362 __ Or(dst, dst, TMP);
2363 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002364 }
2365 }
2366 break;
2367 }
2368
2369 case Primitive::kPrimLong: {
2370 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2371 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2372 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2373 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
2374 if (use_imm) {
2375 if (shift_value == 0) {
Lena Djokic8098da92017-06-28 12:07:50 +02002376 codegen_->MoveLocation(locations->Out(), locations->InAt(0), type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002377 } else if (shift_value < kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002378 if (has_ins_rotr) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002379 if (instr->IsShl()) {
2380 __ Srl(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2381 __ Ins(dst_high, lhs_high, shift_value, kMipsBitsPerWord - shift_value);
2382 __ Sll(dst_low, lhs_low, shift_value);
2383 } else if (instr->IsShr()) {
2384 __ Srl(dst_low, lhs_low, shift_value);
2385 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2386 __ Sra(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002387 } else if (instr->IsUShr()) {
2388 __ Srl(dst_low, lhs_low, shift_value);
2389 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2390 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002391 } else {
2392 __ Srl(dst_low, lhs_low, shift_value);
2393 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2394 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002395 __ Ins(dst_high, lhs_low, kMipsBitsPerWord - shift_value, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002396 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002397 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002398 if (instr->IsShl()) {
2399 __ Sll(dst_low, lhs_low, shift_value);
2400 __ Srl(TMP, lhs_low, kMipsBitsPerWord - shift_value);
2401 __ Sll(dst_high, lhs_high, shift_value);
2402 __ Or(dst_high, dst_high, TMP);
2403 } else if (instr->IsShr()) {
2404 __ Sra(dst_high, lhs_high, shift_value);
2405 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2406 __ Srl(dst_low, lhs_low, shift_value);
2407 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002408 } else if (instr->IsUShr()) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002409 __ Srl(dst_high, lhs_high, shift_value);
2410 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2411 __ Srl(dst_low, lhs_low, shift_value);
2412 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002413 } else {
2414 __ Srl(TMP, lhs_low, shift_value);
2415 __ Sll(dst_low, lhs_high, kMipsBitsPerWord - shift_value);
2416 __ Or(dst_low, dst_low, TMP);
2417 __ Srl(TMP, lhs_high, shift_value);
2418 __ Sll(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2419 __ Or(dst_high, dst_high, TMP);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002420 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002421 }
2422 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002423 const uint32_t shift_value_high = shift_value - kMipsBitsPerWord;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002424 if (instr->IsShl()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002425 __ Sll(dst_high, lhs_low, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002426 __ Move(dst_low, ZERO);
2427 } else if (instr->IsShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002428 __ Sra(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002429 __ Sra(dst_high, dst_low, kMipsBitsPerWord - 1);
Alexey Frunze92d90602015-12-18 18:16:36 -08002430 } else if (instr->IsUShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002431 __ Srl(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002432 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002433 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002434 if (shift_value == kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002435 // 64-bit rotation by 32 is just a swap.
2436 __ Move(dst_low, lhs_high);
2437 __ Move(dst_high, lhs_low);
2438 } else {
2439 if (has_ins_rotr) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002440 __ Srl(dst_low, lhs_high, shift_value_high);
2441 __ Ins(dst_low, lhs_low, kMipsBitsPerWord - shift_value_high, shift_value_high);
2442 __ Srl(dst_high, lhs_low, shift_value_high);
2443 __ Ins(dst_high, lhs_high, kMipsBitsPerWord - shift_value_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002444 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002445 __ Sll(TMP, lhs_low, kMipsBitsPerWord - shift_value_high);
2446 __ Srl(dst_low, lhs_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002447 __ Or(dst_low, dst_low, TMP);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002448 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value_high);
2449 __ Srl(dst_high, lhs_low, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002450 __ Or(dst_high, dst_high, TMP);
2451 }
2452 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002453 }
2454 }
2455 } else {
2456 MipsLabel done;
2457 if (instr->IsShl()) {
2458 __ Sllv(dst_low, lhs_low, rhs_reg);
2459 __ Nor(AT, ZERO, rhs_reg);
2460 __ Srl(TMP, lhs_low, 1);
2461 __ Srlv(TMP, TMP, AT);
2462 __ Sllv(dst_high, lhs_high, rhs_reg);
2463 __ Or(dst_high, dst_high, TMP);
2464 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2465 __ Beqz(TMP, &done);
2466 __ Move(dst_high, dst_low);
2467 __ Move(dst_low, ZERO);
2468 } else if (instr->IsShr()) {
2469 __ Srav(dst_high, lhs_high, rhs_reg);
2470 __ Nor(AT, ZERO, rhs_reg);
2471 __ Sll(TMP, lhs_high, 1);
2472 __ Sllv(TMP, TMP, AT);
2473 __ Srlv(dst_low, lhs_low, rhs_reg);
2474 __ Or(dst_low, dst_low, TMP);
2475 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2476 __ Beqz(TMP, &done);
2477 __ Move(dst_low, dst_high);
2478 __ Sra(dst_high, dst_high, 31);
Alexey Frunze92d90602015-12-18 18:16:36 -08002479 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002480 __ Srlv(dst_high, lhs_high, rhs_reg);
2481 __ Nor(AT, ZERO, rhs_reg);
2482 __ Sll(TMP, lhs_high, 1);
2483 __ Sllv(TMP, TMP, AT);
2484 __ Srlv(dst_low, lhs_low, rhs_reg);
2485 __ Or(dst_low, dst_low, TMP);
2486 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2487 __ Beqz(TMP, &done);
2488 __ Move(dst_low, dst_high);
2489 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002490 } else {
2491 __ Nor(AT, ZERO, rhs_reg);
2492 __ Srlv(TMP, lhs_low, rhs_reg);
2493 __ Sll(dst_low, lhs_high, 1);
2494 __ Sllv(dst_low, dst_low, AT);
2495 __ Or(dst_low, dst_low, TMP);
2496 __ Srlv(TMP, lhs_high, rhs_reg);
2497 __ Sll(dst_high, lhs_low, 1);
2498 __ Sllv(dst_high, dst_high, AT);
2499 __ Or(dst_high, dst_high, TMP);
2500 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2501 __ Beqz(TMP, &done);
2502 __ Move(TMP, dst_high);
2503 __ Move(dst_high, dst_low);
2504 __ Move(dst_low, TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002505 }
2506 __ Bind(&done);
2507 }
2508 break;
2509 }
2510
2511 default:
2512 LOG(FATAL) << "Unexpected shift operation type " << type;
2513 }
2514}
2515
2516void LocationsBuilderMIPS::VisitAdd(HAdd* instruction) {
2517 HandleBinaryOp(instruction);
2518}
2519
2520void InstructionCodeGeneratorMIPS::VisitAdd(HAdd* instruction) {
2521 HandleBinaryOp(instruction);
2522}
2523
2524void LocationsBuilderMIPS::VisitAnd(HAnd* instruction) {
2525 HandleBinaryOp(instruction);
2526}
2527
2528void InstructionCodeGeneratorMIPS::VisitAnd(HAnd* instruction) {
2529 HandleBinaryOp(instruction);
2530}
2531
2532void LocationsBuilderMIPS::VisitArrayGet(HArrayGet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002533 Primitive::Type type = instruction->GetType();
2534 bool object_array_get_with_read_barrier =
2535 kEmitCompilerReadBarrier && (type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002536 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08002537 new (GetGraph()->GetArena()) LocationSummary(instruction,
2538 object_array_get_with_read_barrier
2539 ? LocationSummary::kCallOnSlowPath
2540 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002541 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2542 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2543 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002544 locations->SetInAt(0, Location::RequiresRegister());
2545 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexey Frunze15958152017-02-09 19:08:30 -08002546 if (Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002547 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2548 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002549 // The output overlaps in the case of an object array get with
2550 // read barriers enabled: we do not want the move to overwrite the
2551 // array's location, as we need it to emit the read barrier.
2552 locations->SetOut(Location::RequiresRegister(),
2553 object_array_get_with_read_barrier
2554 ? Location::kOutputOverlap
2555 : Location::kNoOutputOverlap);
2556 }
2557 // We need a temporary register for the read barrier marking slow
2558 // path in CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier.
2559 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002560 bool temp_needed = instruction->GetIndex()->IsConstant()
2561 ? !kBakerReadBarrierThunksEnableForFields
2562 : !kBakerReadBarrierThunksEnableForArrays;
2563 if (temp_needed) {
2564 locations->AddTemp(Location::RequiresRegister());
2565 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002566 }
2567}
2568
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002569static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS* codegen) {
2570 auto null_checker = [codegen, instruction]() {
2571 codegen->MaybeRecordImplicitNullCheck(instruction);
Alexey Frunze2923db72016-08-20 01:55:47 -07002572 };
2573 return null_checker;
2574}
2575
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002576void InstructionCodeGeneratorMIPS::VisitArrayGet(HArrayGet* instruction) {
2577 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002578 Location obj_loc = locations->InAt(0);
2579 Register obj = obj_loc.AsRegister<Register>();
2580 Location out_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002581 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002582 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002583 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002584
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002585 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002586 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2587 instruction->IsStringCharAt();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002588 switch (type) {
2589 case Primitive::kPrimBoolean: {
Alexey Frunze15958152017-02-09 19:08:30 -08002590 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002591 if (index.IsConstant()) {
2592 size_t offset =
2593 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002594 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002595 } else {
2596 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002597 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002598 }
2599 break;
2600 }
2601
2602 case Primitive::kPrimByte: {
Alexey Frunze15958152017-02-09 19:08:30 -08002603 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002604 if (index.IsConstant()) {
2605 size_t offset =
2606 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002607 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002608 } else {
2609 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002610 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002611 }
2612 break;
2613 }
2614
2615 case Primitive::kPrimShort: {
Alexey Frunze15958152017-02-09 19:08:30 -08002616 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002617 if (index.IsConstant()) {
2618 size_t offset =
2619 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002620 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002621 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002622 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_2, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002623 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002624 }
2625 break;
2626 }
2627
2628 case Primitive::kPrimChar: {
Alexey Frunze15958152017-02-09 19:08:30 -08002629 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002630 if (maybe_compressed_char_at) {
2631 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2632 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
2633 __ Sll(TMP, TMP, 31); // Extract compression flag into the most significant bit of TMP.
2634 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2635 "Expecting 0=compressed, 1=uncompressed");
2636 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002637 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002638 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2639 if (maybe_compressed_char_at) {
2640 MipsLabel uncompressed_load, done;
2641 __ Bnez(TMP, &uncompressed_load);
2642 __ LoadFromOffset(kLoadUnsignedByte,
2643 out,
2644 obj,
2645 data_offset + (const_index << TIMES_1));
2646 __ B(&done);
2647 __ Bind(&uncompressed_load);
2648 __ LoadFromOffset(kLoadUnsignedHalfword,
2649 out,
2650 obj,
2651 data_offset + (const_index << TIMES_2));
2652 __ Bind(&done);
2653 } else {
2654 __ LoadFromOffset(kLoadUnsignedHalfword,
2655 out,
2656 obj,
2657 data_offset + (const_index << TIMES_2),
2658 null_checker);
2659 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002660 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002661 Register index_reg = index.AsRegister<Register>();
2662 if (maybe_compressed_char_at) {
2663 MipsLabel uncompressed_load, done;
2664 __ Bnez(TMP, &uncompressed_load);
2665 __ Addu(TMP, obj, index_reg);
2666 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2667 __ B(&done);
2668 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002669 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002670 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2671 __ Bind(&done);
2672 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002673 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002674 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
2675 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002676 }
2677 break;
2678 }
2679
Alexey Frunze15958152017-02-09 19:08:30 -08002680 case Primitive::kPrimInt: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002681 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002682 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002683 if (index.IsConstant()) {
2684 size_t offset =
2685 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002686 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002687 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002688 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002689 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002690 }
2691 break;
2692 }
2693
Alexey Frunze15958152017-02-09 19:08:30 -08002694 case Primitive::kPrimNot: {
2695 static_assert(
2696 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2697 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2698 // /* HeapReference<Object> */ out =
2699 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2700 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002701 bool temp_needed = index.IsConstant()
2702 ? !kBakerReadBarrierThunksEnableForFields
2703 : !kBakerReadBarrierThunksEnableForArrays;
2704 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002705 // Note that a potential implicit null check is handled in this
2706 // CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002707 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2708 if (index.IsConstant()) {
2709 // Array load with a constant index can be treated as a field load.
2710 size_t offset =
2711 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2712 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2713 out_loc,
2714 obj,
2715 offset,
2716 temp,
2717 /* needs_null_check */ false);
2718 } else {
2719 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2720 out_loc,
2721 obj,
2722 data_offset,
2723 index,
2724 temp,
2725 /* needs_null_check */ false);
2726 }
Alexey Frunze15958152017-02-09 19:08:30 -08002727 } else {
2728 Register out = out_loc.AsRegister<Register>();
2729 if (index.IsConstant()) {
2730 size_t offset =
2731 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2732 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
2733 // If read barriers are enabled, emit read barriers other than
2734 // Baker's using a slow path (and also unpoison the loaded
2735 // reference, if heap poisoning is enabled).
2736 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2737 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002738 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08002739 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
2740 // If read barriers are enabled, emit read barriers other than
2741 // Baker's using a slow path (and also unpoison the loaded
2742 // reference, if heap poisoning is enabled).
2743 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2744 out_loc,
2745 out_loc,
2746 obj_loc,
2747 data_offset,
2748 index);
2749 }
2750 }
2751 break;
2752 }
2753
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002754 case Primitive::kPrimLong: {
Alexey Frunze15958152017-02-09 19:08:30 -08002755 Register out = out_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002756 if (index.IsConstant()) {
2757 size_t offset =
2758 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002759 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002760 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002761 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002762 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002763 }
2764 break;
2765 }
2766
2767 case Primitive::kPrimFloat: {
Alexey Frunze15958152017-02-09 19:08:30 -08002768 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002769 if (index.IsConstant()) {
2770 size_t offset =
2771 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002772 __ LoadSFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002773 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002774 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002775 __ LoadSFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002776 }
2777 break;
2778 }
2779
2780 case Primitive::kPrimDouble: {
Alexey Frunze15958152017-02-09 19:08:30 -08002781 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002782 if (index.IsConstant()) {
2783 size_t offset =
2784 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002785 __ LoadDFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002786 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002787 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002788 __ LoadDFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002789 }
2790 break;
2791 }
2792
2793 case Primitive::kPrimVoid:
2794 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2795 UNREACHABLE();
2796 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002797}
2798
2799void LocationsBuilderMIPS::VisitArrayLength(HArrayLength* instruction) {
2800 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2801 locations->SetInAt(0, Location::RequiresRegister());
2802 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2803}
2804
2805void InstructionCodeGeneratorMIPS::VisitArrayLength(HArrayLength* instruction) {
2806 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002807 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002808 Register obj = locations->InAt(0).AsRegister<Register>();
2809 Register out = locations->Out().AsRegister<Register>();
2810 __ LoadFromOffset(kLoadWord, out, obj, offset);
2811 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002812 // Mask out compression flag from String's array length.
2813 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2814 __ Srl(out, out, 1u);
2815 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002816}
2817
Alexey Frunzef58b2482016-09-02 22:14:06 -07002818Location LocationsBuilderMIPS::RegisterOrZeroConstant(HInstruction* instruction) {
2819 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2820 ? Location::ConstantLocation(instruction->AsConstant())
2821 : Location::RequiresRegister();
2822}
2823
2824Location LocationsBuilderMIPS::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2825 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2826 // We can store a non-zero float or double constant without first loading it into the FPU,
2827 // but we should only prefer this if the constant has a single use.
2828 if (instruction->IsConstant() &&
2829 (instruction->AsConstant()->IsZeroBitPattern() ||
2830 instruction->GetUses().HasExactlyOneElement())) {
2831 return Location::ConstantLocation(instruction->AsConstant());
2832 // Otherwise fall through and require an FPU register for the constant.
2833 }
2834 return Location::RequiresFpuRegister();
2835}
2836
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002837void LocationsBuilderMIPS::VisitArraySet(HArraySet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002838 Primitive::Type value_type = instruction->GetComponentType();
2839
2840 bool needs_write_barrier =
2841 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2842 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2843
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002844 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2845 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002846 may_need_runtime_call_for_type_check ?
2847 LocationSummary::kCallOnSlowPath :
2848 LocationSummary::kNoCall);
2849
2850 locations->SetInAt(0, Location::RequiresRegister());
2851 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2852 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
2853 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002854 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002855 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2856 }
2857 if (needs_write_barrier) {
2858 // Temporary register for the write barrier.
2859 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002860 }
2861}
2862
2863void InstructionCodeGeneratorMIPS::VisitArraySet(HArraySet* instruction) {
2864 LocationSummary* locations = instruction->GetLocations();
2865 Register obj = locations->InAt(0).AsRegister<Register>();
2866 Location index = locations->InAt(1);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002867 Location value_location = locations->InAt(2);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002868 Primitive::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002869 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002870 bool needs_write_barrier =
2871 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002872 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002873 Register base_reg = index.IsConstant() ? obj : TMP;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002874
2875 switch (value_type) {
2876 case Primitive::kPrimBoolean:
2877 case Primitive::kPrimByte: {
2878 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002879 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002880 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002881 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002882 __ Addu(base_reg, obj, index.AsRegister<Register>());
2883 }
2884 if (value_location.IsConstant()) {
2885 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2886 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2887 } else {
2888 Register value = value_location.AsRegister<Register>();
2889 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002890 }
2891 break;
2892 }
2893
2894 case Primitive::kPrimShort:
2895 case Primitive::kPrimChar: {
2896 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002897 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002898 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002899 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002900 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_2, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002901 }
2902 if (value_location.IsConstant()) {
2903 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2904 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2905 } else {
2906 Register value = value_location.AsRegister<Register>();
2907 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002908 }
2909 break;
2910 }
2911
Alexey Frunze15958152017-02-09 19:08:30 -08002912 case Primitive::kPrimInt: {
2913 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2914 if (index.IsConstant()) {
2915 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2916 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002917 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08002918 }
2919 if (value_location.IsConstant()) {
2920 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2921 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2922 } else {
2923 Register value = value_location.AsRegister<Register>();
2924 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2925 }
2926 break;
2927 }
2928
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002929 case Primitive::kPrimNot: {
Alexey Frunze15958152017-02-09 19:08:30 -08002930 if (value_location.IsConstant()) {
2931 // Just setting null.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002932 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002933 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002934 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002935 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002936 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002937 }
Alexey Frunze15958152017-02-09 19:08:30 -08002938 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2939 DCHECK_EQ(value, 0);
2940 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2941 DCHECK(!needs_write_barrier);
2942 DCHECK(!may_need_runtime_call_for_type_check);
2943 break;
2944 }
2945
2946 DCHECK(needs_write_barrier);
2947 Register value = value_location.AsRegister<Register>();
2948 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
2949 Register temp2 = TMP; // Doesn't need to survive slow path.
2950 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2951 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2952 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2953 MipsLabel done;
2954 SlowPathCodeMIPS* slow_path = nullptr;
2955
2956 if (may_need_runtime_call_for_type_check) {
2957 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS(instruction);
2958 codegen_->AddSlowPath(slow_path);
2959 if (instruction->GetValueCanBeNull()) {
2960 MipsLabel non_zero;
2961 __ Bnez(value, &non_zero);
2962 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2963 if (index.IsConstant()) {
2964 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunzec061de12017-02-14 13:27:23 -08002965 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002966 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzec061de12017-02-14 13:27:23 -08002967 }
Alexey Frunze15958152017-02-09 19:08:30 -08002968 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2969 __ B(&done);
2970 __ Bind(&non_zero);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002971 }
Alexey Frunze15958152017-02-09 19:08:30 -08002972
2973 // Note that when read barriers are enabled, the type checks
2974 // are performed without read barriers. This is fine, even in
2975 // the case where a class object is in the from-space after
2976 // the flip, as a comparison involving such a type would not
2977 // produce a false positive; it may of course produce a false
2978 // negative, in which case we would take the ArraySet slow
2979 // path.
2980
2981 // /* HeapReference<Class> */ temp1 = obj->klass_
2982 __ LoadFromOffset(kLoadWord, temp1, obj, class_offset, null_checker);
2983 __ MaybeUnpoisonHeapReference(temp1);
2984
2985 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2986 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
2987 // /* HeapReference<Class> */ temp2 = value->klass_
2988 __ LoadFromOffset(kLoadWord, temp2, value, class_offset);
2989 // If heap poisoning is enabled, no need to unpoison `temp1`
2990 // nor `temp2`, as we are comparing two poisoned references.
2991
2992 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2993 MipsLabel do_put;
2994 __ Beq(temp1, temp2, &do_put);
2995 // If heap poisoning is enabled, the `temp1` reference has
2996 // not been unpoisoned yet; unpoison it now.
2997 __ MaybeUnpoisonHeapReference(temp1);
2998
2999 // /* HeapReference<Class> */ temp1 = temp1->super_class_
3000 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
3001 // If heap poisoning is enabled, no need to unpoison
3002 // `temp1`, as we are comparing against null below.
3003 __ Bnez(temp1, slow_path->GetEntryLabel());
3004 __ Bind(&do_put);
3005 } else {
3006 __ Bne(temp1, temp2, slow_path->GetEntryLabel());
3007 }
3008 }
3009
3010 Register source = value;
3011 if (kPoisonHeapReferences) {
3012 // Note that in the case where `value` is a null reference,
3013 // we do not enter this block, as a null reference does not
3014 // need poisoning.
3015 __ Move(temp1, value);
3016 __ PoisonHeapReference(temp1);
3017 source = temp1;
3018 }
3019
3020 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3021 if (index.IsConstant()) {
3022 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003023 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003024 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08003025 }
3026 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
3027
3028 if (!may_need_runtime_call_for_type_check) {
3029 codegen_->MaybeRecordImplicitNullCheck(instruction);
3030 }
3031
3032 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
3033
3034 if (done.IsLinked()) {
3035 __ Bind(&done);
3036 }
3037
3038 if (slow_path != nullptr) {
3039 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003040 }
3041 break;
3042 }
3043
3044 case Primitive::kPrimLong: {
3045 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003046 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003047 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003048 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003049 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003050 }
3051 if (value_location.IsConstant()) {
3052 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
3053 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
3054 } else {
3055 Register value = value_location.AsRegisterPairLow<Register>();
3056 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003057 }
3058 break;
3059 }
3060
3061 case Primitive::kPrimFloat: {
3062 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003063 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003064 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003065 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003066 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003067 }
3068 if (value_location.IsConstant()) {
3069 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
3070 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
3071 } else {
3072 FRegister value = value_location.AsFpuRegister<FRegister>();
3073 __ StoreSToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003074 }
3075 break;
3076 }
3077
3078 case Primitive::kPrimDouble: {
3079 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003080 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003081 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003082 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003083 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003084 }
3085 if (value_location.IsConstant()) {
3086 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
3087 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
3088 } else {
3089 FRegister value = value_location.AsFpuRegister<FRegister>();
3090 __ StoreDToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003091 }
3092 break;
3093 }
3094
3095 case Primitive::kPrimVoid:
3096 LOG(FATAL) << "Unreachable type " << instruction->GetType();
3097 UNREACHABLE();
3098 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003099}
3100
3101void LocationsBuilderMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003102 RegisterSet caller_saves = RegisterSet::Empty();
3103 InvokeRuntimeCallingConvention calling_convention;
3104 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3105 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3106 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003107 locations->SetInAt(0, Location::RequiresRegister());
3108 locations->SetInAt(1, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003109}
3110
3111void InstructionCodeGeneratorMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
3112 LocationSummary* locations = instruction->GetLocations();
3113 BoundsCheckSlowPathMIPS* slow_path =
3114 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS(instruction);
3115 codegen_->AddSlowPath(slow_path);
3116
3117 Register index = locations->InAt(0).AsRegister<Register>();
3118 Register length = locations->InAt(1).AsRegister<Register>();
3119
3120 // length is limited by the maximum positive signed 32-bit integer.
3121 // Unsigned comparison of length and index checks for index < 0
3122 // and for length <= index simultaneously.
3123 __ Bgeu(index, length, slow_path->GetEntryLabel());
3124}
3125
Alexey Frunze15958152017-02-09 19:08:30 -08003126// Temp is used for read barrier.
3127static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3128 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07003129 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08003130 (kUseBakerReadBarrier ||
3131 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3132 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3133 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3134 return 1;
3135 }
3136 return 0;
3137}
3138
3139// Extra temp is used for read barrier.
3140static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3141 return 1 + NumberOfInstanceOfTemps(type_check_kind);
3142}
3143
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003144void LocationsBuilderMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003145 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3146 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3147
3148 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3149 switch (type_check_kind) {
3150 case TypeCheckKind::kExactCheck:
3151 case TypeCheckKind::kAbstractClassCheck:
3152 case TypeCheckKind::kClassHierarchyCheck:
3153 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08003154 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003155 ? LocationSummary::kCallOnSlowPath
3156 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
3157 break;
3158 case TypeCheckKind::kArrayCheck:
3159 case TypeCheckKind::kUnresolvedCheck:
3160 case TypeCheckKind::kInterfaceCheck:
3161 call_kind = LocationSummary::kCallOnSlowPath;
3162 break;
3163 }
3164
3165 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003166 locations->SetInAt(0, Location::RequiresRegister());
3167 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08003168 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003169}
3170
3171void InstructionCodeGeneratorMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003172 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003173 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08003174 Location obj_loc = locations->InAt(0);
3175 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003176 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08003177 Location temp_loc = locations->GetTemp(0);
3178 Register temp = temp_loc.AsRegister<Register>();
3179 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3180 DCHECK_LE(num_temps, 2u);
3181 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003182 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3183 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3184 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3185 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3186 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3187 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3188 const uint32_t object_array_data_offset =
3189 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
3190 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003191
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003192 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
3193 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
3194 // read barriers is done for performance and code size reasons.
3195 bool is_type_check_slow_path_fatal = false;
3196 if (!kEmitCompilerReadBarrier) {
3197 is_type_check_slow_path_fatal =
3198 (type_check_kind == TypeCheckKind::kExactCheck ||
3199 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3200 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3201 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3202 !instruction->CanThrowIntoCatchBlock();
3203 }
3204 SlowPathCodeMIPS* slow_path =
3205 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
3206 is_type_check_slow_path_fatal);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003207 codegen_->AddSlowPath(slow_path);
3208
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003209 // Avoid this check if we know `obj` is not null.
3210 if (instruction->MustDoNullCheck()) {
3211 __ Beqz(obj, &done);
3212 }
3213
3214 switch (type_check_kind) {
3215 case TypeCheckKind::kExactCheck:
3216 case TypeCheckKind::kArrayCheck: {
3217 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003218 GenerateReferenceLoadTwoRegisters(instruction,
3219 temp_loc,
3220 obj_loc,
3221 class_offset,
3222 maybe_temp2_loc,
3223 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003224 // Jump to slow path for throwing the exception or doing a
3225 // more involved array check.
3226 __ Bne(temp, cls, slow_path->GetEntryLabel());
3227 break;
3228 }
3229
3230 case TypeCheckKind::kAbstractClassCheck: {
3231 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003232 GenerateReferenceLoadTwoRegisters(instruction,
3233 temp_loc,
3234 obj_loc,
3235 class_offset,
3236 maybe_temp2_loc,
3237 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003238 // If the class is abstract, we eagerly fetch the super class of the
3239 // object to avoid doing a comparison we know will fail.
3240 MipsLabel loop;
3241 __ Bind(&loop);
3242 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003243 GenerateReferenceLoadOneRegister(instruction,
3244 temp_loc,
3245 super_offset,
3246 maybe_temp2_loc,
3247 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003248 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3249 // exception.
3250 __ Beqz(temp, slow_path->GetEntryLabel());
3251 // Otherwise, compare the classes.
3252 __ Bne(temp, cls, &loop);
3253 break;
3254 }
3255
3256 case TypeCheckKind::kClassHierarchyCheck: {
3257 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003258 GenerateReferenceLoadTwoRegisters(instruction,
3259 temp_loc,
3260 obj_loc,
3261 class_offset,
3262 maybe_temp2_loc,
3263 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003264 // Walk over the class hierarchy to find a match.
3265 MipsLabel loop;
3266 __ Bind(&loop);
3267 __ Beq(temp, cls, &done);
3268 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003269 GenerateReferenceLoadOneRegister(instruction,
3270 temp_loc,
3271 super_offset,
3272 maybe_temp2_loc,
3273 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003274 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3275 // exception. Otherwise, jump to the beginning of the loop.
3276 __ Bnez(temp, &loop);
3277 __ B(slow_path->GetEntryLabel());
3278 break;
3279 }
3280
3281 case TypeCheckKind::kArrayObjectCheck: {
3282 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003283 GenerateReferenceLoadTwoRegisters(instruction,
3284 temp_loc,
3285 obj_loc,
3286 class_offset,
3287 maybe_temp2_loc,
3288 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003289 // Do an exact check.
3290 __ Beq(temp, cls, &done);
3291 // Otherwise, we need to check that the object's class is a non-primitive array.
3292 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08003293 GenerateReferenceLoadOneRegister(instruction,
3294 temp_loc,
3295 component_offset,
3296 maybe_temp2_loc,
3297 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003298 // If the component type is null, jump to the slow path to throw the exception.
3299 __ Beqz(temp, slow_path->GetEntryLabel());
3300 // Otherwise, the object is indeed an array, further check that this component
3301 // type is not a primitive type.
3302 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
3303 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3304 __ Bnez(temp, slow_path->GetEntryLabel());
3305 break;
3306 }
3307
3308 case TypeCheckKind::kUnresolvedCheck:
3309 // We always go into the type check slow path for the unresolved check case.
3310 // We cannot directly call the CheckCast runtime entry point
3311 // without resorting to a type checking slow path here (i.e. by
3312 // calling InvokeRuntime directly), as it would require to
3313 // assign fixed registers for the inputs of this HInstanceOf
3314 // instruction (following the runtime calling convention), which
3315 // might be cluttered by the potential first read barrier
3316 // emission at the beginning of this method.
3317 __ B(slow_path->GetEntryLabel());
3318 break;
3319
3320 case TypeCheckKind::kInterfaceCheck: {
3321 // Avoid read barriers to improve performance of the fast path. We can not get false
3322 // positives by doing this.
3323 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003324 GenerateReferenceLoadTwoRegisters(instruction,
3325 temp_loc,
3326 obj_loc,
3327 class_offset,
3328 maybe_temp2_loc,
3329 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003330 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003331 GenerateReferenceLoadTwoRegisters(instruction,
3332 temp_loc,
3333 temp_loc,
3334 iftable_offset,
3335 maybe_temp2_loc,
3336 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003337 // Iftable is never null.
3338 __ Lw(TMP, temp, array_length_offset);
3339 // Loop through the iftable and check if any class matches.
3340 MipsLabel loop;
3341 __ Bind(&loop);
3342 __ Addiu(temp, temp, 2 * kHeapReferenceSize); // Possibly in delay slot on R2.
3343 __ Beqz(TMP, slow_path->GetEntryLabel());
3344 __ Lw(AT, temp, object_array_data_offset - 2 * kHeapReferenceSize);
3345 __ MaybeUnpoisonHeapReference(AT);
3346 // Go to next interface.
3347 __ Addiu(TMP, TMP, -2);
3348 // Compare the classes and continue the loop if they do not match.
3349 __ Bne(AT, cls, &loop);
3350 break;
3351 }
3352 }
3353
3354 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003355 __ Bind(slow_path->GetExitLabel());
3356}
3357
3358void LocationsBuilderMIPS::VisitClinitCheck(HClinitCheck* check) {
3359 LocationSummary* locations =
3360 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3361 locations->SetInAt(0, Location::RequiresRegister());
3362 if (check->HasUses()) {
3363 locations->SetOut(Location::SameAsFirstInput());
3364 }
3365}
3366
3367void InstructionCodeGeneratorMIPS::VisitClinitCheck(HClinitCheck* check) {
3368 // We assume the class is not null.
3369 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
3370 check->GetLoadClass(),
3371 check,
3372 check->GetDexPc(),
3373 true);
3374 codegen_->AddSlowPath(slow_path);
3375 GenerateClassInitializationCheck(slow_path,
3376 check->GetLocations()->InAt(0).AsRegister<Register>());
3377}
3378
3379void LocationsBuilderMIPS::VisitCompare(HCompare* compare) {
3380 Primitive::Type in_type = compare->InputAt(0)->GetType();
3381
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003382 LocationSummary* locations =
3383 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003384
3385 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003386 case Primitive::kPrimBoolean:
3387 case Primitive::kPrimByte:
3388 case Primitive::kPrimShort:
3389 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003390 case Primitive::kPrimInt:
Alexey Frunzee7697712016-09-15 21:37:49 -07003391 locations->SetInAt(0, Location::RequiresRegister());
3392 locations->SetInAt(1, Location::RequiresRegister());
3393 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3394 break;
3395
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003396 case Primitive::kPrimLong:
3397 locations->SetInAt(0, Location::RequiresRegister());
3398 locations->SetInAt(1, Location::RequiresRegister());
3399 // Output overlaps because it is written before doing the low comparison.
3400 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3401 break;
3402
3403 case Primitive::kPrimFloat:
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003404 case Primitive::kPrimDouble:
3405 locations->SetInAt(0, Location::RequiresFpuRegister());
3406 locations->SetInAt(1, Location::RequiresFpuRegister());
3407 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003408 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003409
3410 default:
3411 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3412 }
3413}
3414
3415void InstructionCodeGeneratorMIPS::VisitCompare(HCompare* instruction) {
3416 LocationSummary* locations = instruction->GetLocations();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003417 Register res = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003418 Primitive::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003419 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003420
3421 // 0 if: left == right
3422 // 1 if: left > right
3423 // -1 if: left < right
3424 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003425 case Primitive::kPrimBoolean:
3426 case Primitive::kPrimByte:
3427 case Primitive::kPrimShort:
3428 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003429 case Primitive::kPrimInt: {
3430 Register lhs = locations->InAt(0).AsRegister<Register>();
3431 Register rhs = locations->InAt(1).AsRegister<Register>();
3432 __ Slt(TMP, lhs, rhs);
3433 __ Slt(res, rhs, lhs);
3434 __ Subu(res, res, TMP);
3435 break;
3436 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003437 case Primitive::kPrimLong: {
3438 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003439 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
3440 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
3441 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
3442 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
3443 // TODO: more efficient (direct) comparison with a constant.
3444 __ Slt(TMP, lhs_high, rhs_high);
3445 __ Slt(AT, rhs_high, lhs_high); // Inverted: is actually gt.
3446 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3447 __ Bnez(res, &done); // If we compared ==, check if lower bits are also equal.
3448 __ Sltu(TMP, lhs_low, rhs_low);
3449 __ Sltu(AT, rhs_low, lhs_low); // Inverted: is actually gt.
3450 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3451 __ Bind(&done);
3452 break;
3453 }
3454
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003455 case Primitive::kPrimFloat: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003456 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003457 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3458 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3459 MipsLabel done;
3460 if (isR6) {
3461 __ CmpEqS(FTMP, lhs, rhs);
3462 __ LoadConst32(res, 0);
3463 __ Bc1nez(FTMP, &done);
3464 if (gt_bias) {
3465 __ CmpLtS(FTMP, lhs, rhs);
3466 __ LoadConst32(res, -1);
3467 __ Bc1nez(FTMP, &done);
3468 __ LoadConst32(res, 1);
3469 } else {
3470 __ CmpLtS(FTMP, rhs, lhs);
3471 __ LoadConst32(res, 1);
3472 __ Bc1nez(FTMP, &done);
3473 __ LoadConst32(res, -1);
3474 }
3475 } else {
3476 if (gt_bias) {
3477 __ ColtS(0, lhs, rhs);
3478 __ LoadConst32(res, -1);
3479 __ Bc1t(0, &done);
3480 __ CeqS(0, lhs, rhs);
3481 __ LoadConst32(res, 1);
3482 __ Movt(res, ZERO, 0);
3483 } else {
3484 __ ColtS(0, rhs, lhs);
3485 __ LoadConst32(res, 1);
3486 __ Bc1t(0, &done);
3487 __ CeqS(0, lhs, rhs);
3488 __ LoadConst32(res, -1);
3489 __ Movt(res, ZERO, 0);
3490 }
3491 }
3492 __ Bind(&done);
3493 break;
3494 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003495 case Primitive::kPrimDouble: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003496 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003497 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3498 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3499 MipsLabel done;
3500 if (isR6) {
3501 __ CmpEqD(FTMP, lhs, rhs);
3502 __ LoadConst32(res, 0);
3503 __ Bc1nez(FTMP, &done);
3504 if (gt_bias) {
3505 __ CmpLtD(FTMP, lhs, rhs);
3506 __ LoadConst32(res, -1);
3507 __ Bc1nez(FTMP, &done);
3508 __ LoadConst32(res, 1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003509 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003510 __ CmpLtD(FTMP, rhs, lhs);
3511 __ LoadConst32(res, 1);
3512 __ Bc1nez(FTMP, &done);
3513 __ LoadConst32(res, -1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003514 }
3515 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003516 if (gt_bias) {
3517 __ ColtD(0, lhs, rhs);
3518 __ LoadConst32(res, -1);
3519 __ Bc1t(0, &done);
3520 __ CeqD(0, lhs, rhs);
3521 __ LoadConst32(res, 1);
3522 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003523 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003524 __ ColtD(0, rhs, lhs);
3525 __ LoadConst32(res, 1);
3526 __ Bc1t(0, &done);
3527 __ CeqD(0, lhs, rhs);
3528 __ LoadConst32(res, -1);
3529 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003530 }
3531 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003532 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003533 break;
3534 }
3535
3536 default:
3537 LOG(FATAL) << "Unimplemented compare type " << in_type;
3538 }
3539}
3540
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003541void LocationsBuilderMIPS::HandleCondition(HCondition* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003542 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003543 switch (instruction->InputAt(0)->GetType()) {
3544 default:
3545 case Primitive::kPrimLong:
3546 locations->SetInAt(0, Location::RequiresRegister());
3547 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3548 break;
3549
3550 case Primitive::kPrimFloat:
3551 case Primitive::kPrimDouble:
3552 locations->SetInAt(0, Location::RequiresFpuRegister());
3553 locations->SetInAt(1, Location::RequiresFpuRegister());
3554 break;
3555 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003556 if (!instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003557 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3558 }
3559}
3560
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003561void InstructionCodeGeneratorMIPS::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003562 if (instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003563 return;
3564 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003565
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003566 Primitive::Type type = instruction->InputAt(0)->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003567 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003568
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003569 switch (type) {
3570 default:
3571 // Integer case.
3572 GenerateIntCompare(instruction->GetCondition(), locations);
3573 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003574
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003575 case Primitive::kPrimLong:
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01003576 GenerateLongCompare(instruction->GetCondition(), locations);
3577 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003578
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003579 case Primitive::kPrimFloat:
3580 case Primitive::kPrimDouble:
Alexey Frunze2ddb7172016-09-06 17:04:55 -07003581 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3582 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003583 }
3584}
3585
Alexey Frunze7e99e052015-11-24 19:28:01 -08003586void InstructionCodeGeneratorMIPS::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3587 DCHECK(instruction->IsDiv() || instruction->IsRem());
3588 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3589
3590 LocationSummary* locations = instruction->GetLocations();
3591 Location second = locations->InAt(1);
3592 DCHECK(second.IsConstant());
3593
3594 Register out = locations->Out().AsRegister<Register>();
3595 Register dividend = locations->InAt(0).AsRegister<Register>();
3596 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3597 DCHECK(imm == 1 || imm == -1);
3598
3599 if (instruction->IsRem()) {
3600 __ Move(out, ZERO);
3601 } else {
3602 if (imm == -1) {
3603 __ Subu(out, ZERO, dividend);
3604 } else if (out != dividend) {
3605 __ Move(out, dividend);
3606 }
3607 }
3608}
3609
3610void InstructionCodeGeneratorMIPS::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3611 DCHECK(instruction->IsDiv() || instruction->IsRem());
3612 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3613
3614 LocationSummary* locations = instruction->GetLocations();
3615 Location second = locations->InAt(1);
3616 DCHECK(second.IsConstant());
3617
3618 Register out = locations->Out().AsRegister<Register>();
3619 Register dividend = locations->InAt(0).AsRegister<Register>();
3620 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003621 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Alexey Frunze7e99e052015-11-24 19:28:01 -08003622 int ctz_imm = CTZ(abs_imm);
3623
3624 if (instruction->IsDiv()) {
3625 if (ctz_imm == 1) {
3626 // Fast path for division by +/-2, which is very common.
3627 __ Srl(TMP, dividend, 31);
3628 } else {
3629 __ Sra(TMP, dividend, 31);
3630 __ Srl(TMP, TMP, 32 - ctz_imm);
3631 }
3632 __ Addu(out, dividend, TMP);
3633 __ Sra(out, out, ctz_imm);
3634 if (imm < 0) {
3635 __ Subu(out, ZERO, out);
3636 }
3637 } else {
3638 if (ctz_imm == 1) {
3639 // Fast path for modulo +/-2, which is very common.
3640 __ Sra(TMP, dividend, 31);
3641 __ Subu(out, dividend, TMP);
3642 __ Andi(out, out, 1);
3643 __ Addu(out, out, TMP);
3644 } else {
3645 __ Sra(TMP, dividend, 31);
3646 __ Srl(TMP, TMP, 32 - ctz_imm);
3647 __ Addu(out, dividend, TMP);
3648 if (IsUint<16>(abs_imm - 1)) {
3649 __ Andi(out, out, abs_imm - 1);
3650 } else {
3651 __ Sll(out, out, 32 - ctz_imm);
3652 __ Srl(out, out, 32 - ctz_imm);
3653 }
3654 __ Subu(out, out, TMP);
3655 }
3656 }
3657}
3658
3659void InstructionCodeGeneratorMIPS::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3660 DCHECK(instruction->IsDiv() || instruction->IsRem());
3661 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3662
3663 LocationSummary* locations = instruction->GetLocations();
3664 Location second = locations->InAt(1);
3665 DCHECK(second.IsConstant());
3666
3667 Register out = locations->Out().AsRegister<Register>();
3668 Register dividend = locations->InAt(0).AsRegister<Register>();
3669 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3670
3671 int64_t magic;
3672 int shift;
3673 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3674
3675 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3676
3677 __ LoadConst32(TMP, magic);
3678 if (isR6) {
3679 __ MuhR6(TMP, dividend, TMP);
3680 } else {
3681 __ MultR2(dividend, TMP);
3682 __ Mfhi(TMP);
3683 }
3684 if (imm > 0 && magic < 0) {
3685 __ Addu(TMP, TMP, dividend);
3686 } else if (imm < 0 && magic > 0) {
3687 __ Subu(TMP, TMP, dividend);
3688 }
3689
3690 if (shift != 0) {
3691 __ Sra(TMP, TMP, shift);
3692 }
3693
3694 if (instruction->IsDiv()) {
3695 __ Sra(out, TMP, 31);
3696 __ Subu(out, TMP, out);
3697 } else {
3698 __ Sra(AT, TMP, 31);
3699 __ Subu(AT, TMP, AT);
3700 __ LoadConst32(TMP, imm);
3701 if (isR6) {
3702 __ MulR6(TMP, AT, TMP);
3703 } else {
3704 __ MulR2(TMP, AT, TMP);
3705 }
3706 __ Subu(out, dividend, TMP);
3707 }
3708}
3709
3710void InstructionCodeGeneratorMIPS::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3711 DCHECK(instruction->IsDiv() || instruction->IsRem());
3712 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3713
3714 LocationSummary* locations = instruction->GetLocations();
3715 Register out = locations->Out().AsRegister<Register>();
3716 Location second = locations->InAt(1);
3717
3718 if (second.IsConstant()) {
3719 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3720 if (imm == 0) {
3721 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3722 } else if (imm == 1 || imm == -1) {
3723 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003724 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003725 DivRemByPowerOfTwo(instruction);
3726 } else {
3727 DCHECK(imm <= -2 || imm >= 2);
3728 GenerateDivRemWithAnyConstant(instruction);
3729 }
3730 } else {
3731 Register dividend = locations->InAt(0).AsRegister<Register>();
3732 Register divisor = second.AsRegister<Register>();
3733 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3734 if (instruction->IsDiv()) {
3735 if (isR6) {
3736 __ DivR6(out, dividend, divisor);
3737 } else {
3738 __ DivR2(out, dividend, divisor);
3739 }
3740 } else {
3741 if (isR6) {
3742 __ ModR6(out, dividend, divisor);
3743 } else {
3744 __ ModR2(out, dividend, divisor);
3745 }
3746 }
3747 }
3748}
3749
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003750void LocationsBuilderMIPS::VisitDiv(HDiv* div) {
3751 Primitive::Type type = div->GetResultType();
3752 LocationSummary::CallKind call_kind = (type == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003753 ? LocationSummary::kCallOnMainOnly
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003754 : LocationSummary::kNoCall;
3755
3756 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
3757
3758 switch (type) {
3759 case Primitive::kPrimInt:
3760 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08003761 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003762 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3763 break;
3764
3765 case Primitive::kPrimLong: {
3766 InvokeRuntimeCallingConvention calling_convention;
3767 locations->SetInAt(0, Location::RegisterPairLocation(
3768 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3769 locations->SetInAt(1, Location::RegisterPairLocation(
3770 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3771 locations->SetOut(calling_convention.GetReturnLocation(type));
3772 break;
3773 }
3774
3775 case Primitive::kPrimFloat:
3776 case Primitive::kPrimDouble:
3777 locations->SetInAt(0, Location::RequiresFpuRegister());
3778 locations->SetInAt(1, Location::RequiresFpuRegister());
3779 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3780 break;
3781
3782 default:
3783 LOG(FATAL) << "Unexpected div type " << type;
3784 }
3785}
3786
3787void InstructionCodeGeneratorMIPS::VisitDiv(HDiv* instruction) {
3788 Primitive::Type type = instruction->GetType();
3789 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003790
3791 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003792 case Primitive::kPrimInt:
3793 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003794 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003795 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01003796 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003797 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
3798 break;
3799 }
3800 case Primitive::kPrimFloat:
3801 case Primitive::kPrimDouble: {
3802 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
3803 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3804 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3805 if (type == Primitive::kPrimFloat) {
3806 __ DivS(dst, lhs, rhs);
3807 } else {
3808 __ DivD(dst, lhs, rhs);
3809 }
3810 break;
3811 }
3812 default:
3813 LOG(FATAL) << "Unexpected div type " << type;
3814 }
3815}
3816
3817void LocationsBuilderMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003818 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003819 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003820}
3821
3822void InstructionCodeGeneratorMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3823 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS(instruction);
3824 codegen_->AddSlowPath(slow_path);
3825 Location value = instruction->GetLocations()->InAt(0);
3826 Primitive::Type type = instruction->GetType();
3827
3828 switch (type) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003829 case Primitive::kPrimBoolean:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003830 case Primitive::kPrimByte:
3831 case Primitive::kPrimChar:
3832 case Primitive::kPrimShort:
3833 case Primitive::kPrimInt: {
3834 if (value.IsConstant()) {
3835 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3836 __ B(slow_path->GetEntryLabel());
3837 } else {
3838 // A division by a non-null constant is valid. We don't need to perform
3839 // any check, so simply fall through.
3840 }
3841 } else {
3842 DCHECK(value.IsRegister()) << value;
3843 __ Beqz(value.AsRegister<Register>(), slow_path->GetEntryLabel());
3844 }
3845 break;
3846 }
3847 case Primitive::kPrimLong: {
3848 if (value.IsConstant()) {
3849 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3850 __ B(slow_path->GetEntryLabel());
3851 } else {
3852 // A division by a non-null constant is valid. We don't need to perform
3853 // any check, so simply fall through.
3854 }
3855 } else {
3856 DCHECK(value.IsRegisterPair()) << value;
3857 __ Or(TMP, value.AsRegisterPairHigh<Register>(), value.AsRegisterPairLow<Register>());
3858 __ Beqz(TMP, slow_path->GetEntryLabel());
3859 }
3860 break;
3861 }
3862 default:
3863 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
3864 }
3865}
3866
3867void LocationsBuilderMIPS::VisitDoubleConstant(HDoubleConstant* constant) {
3868 LocationSummary* locations =
3869 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3870 locations->SetOut(Location::ConstantLocation(constant));
3871}
3872
3873void InstructionCodeGeneratorMIPS::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3874 // Will be generated at use site.
3875}
3876
3877void LocationsBuilderMIPS::VisitExit(HExit* exit) {
3878 exit->SetLocations(nullptr);
3879}
3880
3881void InstructionCodeGeneratorMIPS::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3882}
3883
3884void LocationsBuilderMIPS::VisitFloatConstant(HFloatConstant* constant) {
3885 LocationSummary* locations =
3886 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3887 locations->SetOut(Location::ConstantLocation(constant));
3888}
3889
3890void InstructionCodeGeneratorMIPS::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3891 // Will be generated at use site.
3892}
3893
3894void LocationsBuilderMIPS::VisitGoto(HGoto* got) {
3895 got->SetLocations(nullptr);
3896}
3897
3898void InstructionCodeGeneratorMIPS::HandleGoto(HInstruction* got, HBasicBlock* successor) {
3899 DCHECK(!successor->IsExitBlock());
3900 HBasicBlock* block = got->GetBlock();
3901 HInstruction* previous = got->GetPrevious();
3902 HLoopInformation* info = block->GetLoopInformation();
3903
3904 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3905 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3906 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3907 return;
3908 }
3909 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3910 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3911 }
3912 if (!codegen_->GoesToNextBlock(block, successor)) {
3913 __ B(codegen_->GetLabelOf(successor));
3914 }
3915}
3916
3917void InstructionCodeGeneratorMIPS::VisitGoto(HGoto* got) {
3918 HandleGoto(got, got->GetSuccessor());
3919}
3920
3921void LocationsBuilderMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3922 try_boundary->SetLocations(nullptr);
3923}
3924
3925void InstructionCodeGeneratorMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3926 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3927 if (!successor->IsExitBlock()) {
3928 HandleGoto(try_boundary, successor);
3929 }
3930}
3931
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003932void InstructionCodeGeneratorMIPS::GenerateIntCompare(IfCondition cond,
3933 LocationSummary* locations) {
3934 Register dst = locations->Out().AsRegister<Register>();
3935 Register lhs = locations->InAt(0).AsRegister<Register>();
3936 Location rhs_location = locations->InAt(1);
3937 Register rhs_reg = ZERO;
3938 int64_t rhs_imm = 0;
3939 bool use_imm = rhs_location.IsConstant();
3940 if (use_imm) {
3941 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3942 } else {
3943 rhs_reg = rhs_location.AsRegister<Register>();
3944 }
3945
3946 switch (cond) {
3947 case kCondEQ:
3948 case kCondNE:
Alexey Frunzee7697712016-09-15 21:37:49 -07003949 if (use_imm && IsInt<16>(-rhs_imm)) {
3950 if (rhs_imm == 0) {
3951 if (cond == kCondEQ) {
3952 __ Sltiu(dst, lhs, 1);
3953 } else {
3954 __ Sltu(dst, ZERO, lhs);
3955 }
3956 } else {
3957 __ Addiu(dst, lhs, -rhs_imm);
3958 if (cond == kCondEQ) {
3959 __ Sltiu(dst, dst, 1);
3960 } else {
3961 __ Sltu(dst, ZERO, dst);
3962 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003963 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003964 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07003965 if (use_imm && IsUint<16>(rhs_imm)) {
3966 __ Xori(dst, lhs, rhs_imm);
3967 } else {
3968 if (use_imm) {
3969 rhs_reg = TMP;
3970 __ LoadConst32(rhs_reg, rhs_imm);
3971 }
3972 __ Xor(dst, lhs, rhs_reg);
3973 }
3974 if (cond == kCondEQ) {
3975 __ Sltiu(dst, dst, 1);
3976 } else {
3977 __ Sltu(dst, ZERO, dst);
3978 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003979 }
3980 break;
3981
3982 case kCondLT:
3983 case kCondGE:
3984 if (use_imm && IsInt<16>(rhs_imm)) {
3985 __ Slti(dst, lhs, rhs_imm);
3986 } else {
3987 if (use_imm) {
3988 rhs_reg = TMP;
3989 __ LoadConst32(rhs_reg, rhs_imm);
3990 }
3991 __ Slt(dst, lhs, rhs_reg);
3992 }
3993 if (cond == kCondGE) {
3994 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3995 // only the slt instruction but no sge.
3996 __ Xori(dst, dst, 1);
3997 }
3998 break;
3999
4000 case kCondLE:
4001 case kCondGT:
4002 if (use_imm && IsInt<16>(rhs_imm + 1)) {
4003 // Simulate lhs <= rhs via lhs < rhs + 1.
4004 __ Slti(dst, lhs, rhs_imm + 1);
4005 if (cond == kCondGT) {
4006 // Simulate lhs > rhs via !(lhs <= rhs) since there's
4007 // only the slti instruction but no sgti.
4008 __ Xori(dst, dst, 1);
4009 }
4010 } else {
4011 if (use_imm) {
4012 rhs_reg = TMP;
4013 __ LoadConst32(rhs_reg, rhs_imm);
4014 }
4015 __ Slt(dst, rhs_reg, lhs);
4016 if (cond == kCondLE) {
4017 // Simulate lhs <= rhs via !(rhs < lhs) since there's
4018 // only the slt instruction but no sle.
4019 __ Xori(dst, dst, 1);
4020 }
4021 }
4022 break;
4023
4024 case kCondB:
4025 case kCondAE:
4026 if (use_imm && IsInt<16>(rhs_imm)) {
4027 // Sltiu sign-extends its 16-bit immediate operand before
4028 // the comparison and thus lets us compare directly with
4029 // unsigned values in the ranges [0, 0x7fff] and
4030 // [0xffff8000, 0xffffffff].
4031 __ Sltiu(dst, lhs, rhs_imm);
4032 } else {
4033 if (use_imm) {
4034 rhs_reg = TMP;
4035 __ LoadConst32(rhs_reg, rhs_imm);
4036 }
4037 __ Sltu(dst, lhs, rhs_reg);
4038 }
4039 if (cond == kCondAE) {
4040 // Simulate lhs >= rhs via !(lhs < rhs) since there's
4041 // only the sltu instruction but no sgeu.
4042 __ Xori(dst, dst, 1);
4043 }
4044 break;
4045
4046 case kCondBE:
4047 case kCondA:
4048 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4049 // Simulate lhs <= rhs via lhs < rhs + 1.
4050 // Note that this only works if rhs + 1 does not overflow
4051 // to 0, hence the check above.
4052 // Sltiu sign-extends its 16-bit immediate operand before
4053 // the comparison and thus lets us compare directly with
4054 // unsigned values in the ranges [0, 0x7fff] and
4055 // [0xffff8000, 0xffffffff].
4056 __ Sltiu(dst, lhs, rhs_imm + 1);
4057 if (cond == kCondA) {
4058 // Simulate lhs > rhs via !(lhs <= rhs) since there's
4059 // only the sltiu instruction but no sgtiu.
4060 __ Xori(dst, dst, 1);
4061 }
4062 } else {
4063 if (use_imm) {
4064 rhs_reg = TMP;
4065 __ LoadConst32(rhs_reg, rhs_imm);
4066 }
4067 __ Sltu(dst, rhs_reg, lhs);
4068 if (cond == kCondBE) {
4069 // Simulate lhs <= rhs via !(rhs < lhs) since there's
4070 // only the sltu instruction but no sleu.
4071 __ Xori(dst, dst, 1);
4072 }
4073 }
4074 break;
4075 }
4076}
4077
Alexey Frunze674b9ee2016-09-20 14:54:15 -07004078bool InstructionCodeGeneratorMIPS::MaterializeIntCompare(IfCondition cond,
4079 LocationSummary* input_locations,
4080 Register dst) {
4081 Register lhs = input_locations->InAt(0).AsRegister<Register>();
4082 Location rhs_location = input_locations->InAt(1);
4083 Register rhs_reg = ZERO;
4084 int64_t rhs_imm = 0;
4085 bool use_imm = rhs_location.IsConstant();
4086 if (use_imm) {
4087 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4088 } else {
4089 rhs_reg = rhs_location.AsRegister<Register>();
4090 }
4091
4092 switch (cond) {
4093 case kCondEQ:
4094 case kCondNE:
4095 if (use_imm && IsInt<16>(-rhs_imm)) {
4096 __ Addiu(dst, lhs, -rhs_imm);
4097 } else if (use_imm && IsUint<16>(rhs_imm)) {
4098 __ Xori(dst, lhs, rhs_imm);
4099 } else {
4100 if (use_imm) {
4101 rhs_reg = TMP;
4102 __ LoadConst32(rhs_reg, rhs_imm);
4103 }
4104 __ Xor(dst, lhs, rhs_reg);
4105 }
4106 return (cond == kCondEQ);
4107
4108 case kCondLT:
4109 case kCondGE:
4110 if (use_imm && IsInt<16>(rhs_imm)) {
4111 __ Slti(dst, lhs, rhs_imm);
4112 } else {
4113 if (use_imm) {
4114 rhs_reg = TMP;
4115 __ LoadConst32(rhs_reg, rhs_imm);
4116 }
4117 __ Slt(dst, lhs, rhs_reg);
4118 }
4119 return (cond == kCondGE);
4120
4121 case kCondLE:
4122 case kCondGT:
4123 if (use_imm && IsInt<16>(rhs_imm + 1)) {
4124 // Simulate lhs <= rhs via lhs < rhs + 1.
4125 __ Slti(dst, lhs, rhs_imm + 1);
4126 return (cond == kCondGT);
4127 } else {
4128 if (use_imm) {
4129 rhs_reg = TMP;
4130 __ LoadConst32(rhs_reg, rhs_imm);
4131 }
4132 __ Slt(dst, rhs_reg, lhs);
4133 return (cond == kCondLE);
4134 }
4135
4136 case kCondB:
4137 case kCondAE:
4138 if (use_imm && IsInt<16>(rhs_imm)) {
4139 // Sltiu sign-extends its 16-bit immediate operand before
4140 // the comparison and thus lets us compare directly with
4141 // unsigned values in the ranges [0, 0x7fff] and
4142 // [0xffff8000, 0xffffffff].
4143 __ Sltiu(dst, lhs, rhs_imm);
4144 } else {
4145 if (use_imm) {
4146 rhs_reg = TMP;
4147 __ LoadConst32(rhs_reg, rhs_imm);
4148 }
4149 __ Sltu(dst, lhs, rhs_reg);
4150 }
4151 return (cond == kCondAE);
4152
4153 case kCondBE:
4154 case kCondA:
4155 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4156 // Simulate lhs <= rhs via lhs < rhs + 1.
4157 // Note that this only works if rhs + 1 does not overflow
4158 // to 0, hence the check above.
4159 // Sltiu sign-extends its 16-bit immediate operand before
4160 // the comparison and thus lets us compare directly with
4161 // unsigned values in the ranges [0, 0x7fff] and
4162 // [0xffff8000, 0xffffffff].
4163 __ Sltiu(dst, lhs, rhs_imm + 1);
4164 return (cond == kCondA);
4165 } else {
4166 if (use_imm) {
4167 rhs_reg = TMP;
4168 __ LoadConst32(rhs_reg, rhs_imm);
4169 }
4170 __ Sltu(dst, rhs_reg, lhs);
4171 return (cond == kCondBE);
4172 }
4173 }
4174}
4175
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004176void InstructionCodeGeneratorMIPS::GenerateIntCompareAndBranch(IfCondition cond,
4177 LocationSummary* locations,
4178 MipsLabel* label) {
4179 Register lhs = locations->InAt(0).AsRegister<Register>();
4180 Location rhs_location = locations->InAt(1);
4181 Register rhs_reg = ZERO;
Alexey Frunzee7697712016-09-15 21:37:49 -07004182 int64_t rhs_imm = 0;
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004183 bool use_imm = rhs_location.IsConstant();
4184 if (use_imm) {
4185 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4186 } else {
4187 rhs_reg = rhs_location.AsRegister<Register>();
4188 }
4189
4190 if (use_imm && rhs_imm == 0) {
4191 switch (cond) {
4192 case kCondEQ:
4193 case kCondBE: // <= 0 if zero
4194 __ Beqz(lhs, label);
4195 break;
4196 case kCondNE:
4197 case kCondA: // > 0 if non-zero
4198 __ Bnez(lhs, label);
4199 break;
4200 case kCondLT:
4201 __ Bltz(lhs, label);
4202 break;
4203 case kCondGE:
4204 __ Bgez(lhs, label);
4205 break;
4206 case kCondLE:
4207 __ Blez(lhs, label);
4208 break;
4209 case kCondGT:
4210 __ Bgtz(lhs, label);
4211 break;
4212 case kCondB: // always false
4213 break;
4214 case kCondAE: // always true
4215 __ B(label);
4216 break;
4217 }
4218 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07004219 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4220 if (isR6 || !use_imm) {
4221 if (use_imm) {
4222 rhs_reg = TMP;
4223 __ LoadConst32(rhs_reg, rhs_imm);
4224 }
4225 switch (cond) {
4226 case kCondEQ:
4227 __ Beq(lhs, rhs_reg, label);
4228 break;
4229 case kCondNE:
4230 __ Bne(lhs, rhs_reg, label);
4231 break;
4232 case kCondLT:
4233 __ Blt(lhs, rhs_reg, label);
4234 break;
4235 case kCondGE:
4236 __ Bge(lhs, rhs_reg, label);
4237 break;
4238 case kCondLE:
4239 __ Bge(rhs_reg, lhs, label);
4240 break;
4241 case kCondGT:
4242 __ Blt(rhs_reg, lhs, label);
4243 break;
4244 case kCondB:
4245 __ Bltu(lhs, rhs_reg, label);
4246 break;
4247 case kCondAE:
4248 __ Bgeu(lhs, rhs_reg, label);
4249 break;
4250 case kCondBE:
4251 __ Bgeu(rhs_reg, lhs, label);
4252 break;
4253 case kCondA:
4254 __ Bltu(rhs_reg, lhs, label);
4255 break;
4256 }
4257 } else {
4258 // Special cases for more efficient comparison with constants on R2.
4259 switch (cond) {
4260 case kCondEQ:
4261 __ LoadConst32(TMP, rhs_imm);
4262 __ Beq(lhs, TMP, label);
4263 break;
4264 case kCondNE:
4265 __ LoadConst32(TMP, rhs_imm);
4266 __ Bne(lhs, TMP, label);
4267 break;
4268 case kCondLT:
4269 if (IsInt<16>(rhs_imm)) {
4270 __ Slti(TMP, lhs, rhs_imm);
4271 __ Bnez(TMP, label);
4272 } else {
4273 __ LoadConst32(TMP, rhs_imm);
4274 __ Blt(lhs, TMP, label);
4275 }
4276 break;
4277 case kCondGE:
4278 if (IsInt<16>(rhs_imm)) {
4279 __ Slti(TMP, lhs, rhs_imm);
4280 __ Beqz(TMP, label);
4281 } else {
4282 __ LoadConst32(TMP, rhs_imm);
4283 __ Bge(lhs, TMP, label);
4284 }
4285 break;
4286 case kCondLE:
4287 if (IsInt<16>(rhs_imm + 1)) {
4288 // Simulate lhs <= rhs via lhs < rhs + 1.
4289 __ Slti(TMP, lhs, rhs_imm + 1);
4290 __ Bnez(TMP, label);
4291 } else {
4292 __ LoadConst32(TMP, rhs_imm);
4293 __ Bge(TMP, lhs, label);
4294 }
4295 break;
4296 case kCondGT:
4297 if (IsInt<16>(rhs_imm + 1)) {
4298 // Simulate lhs > rhs via !(lhs < rhs + 1).
4299 __ Slti(TMP, lhs, rhs_imm + 1);
4300 __ Beqz(TMP, label);
4301 } else {
4302 __ LoadConst32(TMP, rhs_imm);
4303 __ Blt(TMP, lhs, label);
4304 }
4305 break;
4306 case kCondB:
4307 if (IsInt<16>(rhs_imm)) {
4308 __ Sltiu(TMP, lhs, rhs_imm);
4309 __ Bnez(TMP, label);
4310 } else {
4311 __ LoadConst32(TMP, rhs_imm);
4312 __ Bltu(lhs, TMP, label);
4313 }
4314 break;
4315 case kCondAE:
4316 if (IsInt<16>(rhs_imm)) {
4317 __ Sltiu(TMP, lhs, rhs_imm);
4318 __ Beqz(TMP, label);
4319 } else {
4320 __ LoadConst32(TMP, rhs_imm);
4321 __ Bgeu(lhs, TMP, label);
4322 }
4323 break;
4324 case kCondBE:
4325 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4326 // Simulate lhs <= rhs via lhs < rhs + 1.
4327 // Note that this only works if rhs + 1 does not overflow
4328 // to 0, hence the check above.
4329 __ Sltiu(TMP, lhs, rhs_imm + 1);
4330 __ Bnez(TMP, label);
4331 } else {
4332 __ LoadConst32(TMP, rhs_imm);
4333 __ Bgeu(TMP, lhs, label);
4334 }
4335 break;
4336 case kCondA:
4337 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4338 // Simulate lhs > rhs via !(lhs < rhs + 1).
4339 // Note that this only works if rhs + 1 does not overflow
4340 // to 0, hence the check above.
4341 __ Sltiu(TMP, lhs, rhs_imm + 1);
4342 __ Beqz(TMP, label);
4343 } else {
4344 __ LoadConst32(TMP, rhs_imm);
4345 __ Bltu(TMP, lhs, label);
4346 }
4347 break;
4348 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004349 }
4350 }
4351}
4352
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01004353void InstructionCodeGeneratorMIPS::GenerateLongCompare(IfCondition cond,
4354 LocationSummary* locations) {
4355 Register dst = locations->Out().AsRegister<Register>();
4356 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4357 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4358 Location rhs_location = locations->InAt(1);
4359 Register rhs_high = ZERO;
4360 Register rhs_low = ZERO;
4361 int64_t imm = 0;
4362 uint32_t imm_high = 0;
4363 uint32_t imm_low = 0;
4364 bool use_imm = rhs_location.IsConstant();
4365 if (use_imm) {
4366 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4367 imm_high = High32Bits(imm);
4368 imm_low = Low32Bits(imm);
4369 } else {
4370 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4371 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4372 }
4373 if (use_imm && imm == 0) {
4374 switch (cond) {
4375 case kCondEQ:
4376 case kCondBE: // <= 0 if zero
4377 __ Or(dst, lhs_high, lhs_low);
4378 __ Sltiu(dst, dst, 1);
4379 break;
4380 case kCondNE:
4381 case kCondA: // > 0 if non-zero
4382 __ Or(dst, lhs_high, lhs_low);
4383 __ Sltu(dst, ZERO, dst);
4384 break;
4385 case kCondLT:
4386 __ Slt(dst, lhs_high, ZERO);
4387 break;
4388 case kCondGE:
4389 __ Slt(dst, lhs_high, ZERO);
4390 __ Xori(dst, dst, 1);
4391 break;
4392 case kCondLE:
4393 __ Or(TMP, lhs_high, lhs_low);
4394 __ Sra(AT, lhs_high, 31);
4395 __ Sltu(dst, AT, TMP);
4396 __ Xori(dst, dst, 1);
4397 break;
4398 case kCondGT:
4399 __ Or(TMP, lhs_high, lhs_low);
4400 __ Sra(AT, lhs_high, 31);
4401 __ Sltu(dst, AT, TMP);
4402 break;
4403 case kCondB: // always false
4404 __ Andi(dst, dst, 0);
4405 break;
4406 case kCondAE: // always true
4407 __ Ori(dst, ZERO, 1);
4408 break;
4409 }
4410 } else if (use_imm) {
4411 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4412 switch (cond) {
4413 case kCondEQ:
4414 __ LoadConst32(TMP, imm_high);
4415 __ Xor(TMP, TMP, lhs_high);
4416 __ LoadConst32(AT, imm_low);
4417 __ Xor(AT, AT, lhs_low);
4418 __ Or(dst, TMP, AT);
4419 __ Sltiu(dst, dst, 1);
4420 break;
4421 case kCondNE:
4422 __ LoadConst32(TMP, imm_high);
4423 __ Xor(TMP, TMP, lhs_high);
4424 __ LoadConst32(AT, imm_low);
4425 __ Xor(AT, AT, lhs_low);
4426 __ Or(dst, TMP, AT);
4427 __ Sltu(dst, ZERO, dst);
4428 break;
4429 case kCondLT:
4430 case kCondGE:
4431 if (dst == lhs_low) {
4432 __ LoadConst32(TMP, imm_low);
4433 __ Sltu(dst, lhs_low, TMP);
4434 }
4435 __ LoadConst32(TMP, imm_high);
4436 __ Slt(AT, lhs_high, TMP);
4437 __ Slt(TMP, TMP, lhs_high);
4438 if (dst != lhs_low) {
4439 __ LoadConst32(dst, imm_low);
4440 __ Sltu(dst, lhs_low, dst);
4441 }
4442 __ Slt(dst, TMP, dst);
4443 __ Or(dst, dst, AT);
4444 if (cond == kCondGE) {
4445 __ Xori(dst, dst, 1);
4446 }
4447 break;
4448 case kCondGT:
4449 case kCondLE:
4450 if (dst == lhs_low) {
4451 __ LoadConst32(TMP, imm_low);
4452 __ Sltu(dst, TMP, lhs_low);
4453 }
4454 __ LoadConst32(TMP, imm_high);
4455 __ Slt(AT, TMP, lhs_high);
4456 __ Slt(TMP, lhs_high, TMP);
4457 if (dst != lhs_low) {
4458 __ LoadConst32(dst, imm_low);
4459 __ Sltu(dst, dst, lhs_low);
4460 }
4461 __ Slt(dst, TMP, dst);
4462 __ Or(dst, dst, AT);
4463 if (cond == kCondLE) {
4464 __ Xori(dst, dst, 1);
4465 }
4466 break;
4467 case kCondB:
4468 case kCondAE:
4469 if (dst == lhs_low) {
4470 __ LoadConst32(TMP, imm_low);
4471 __ Sltu(dst, lhs_low, TMP);
4472 }
4473 __ LoadConst32(TMP, imm_high);
4474 __ Sltu(AT, lhs_high, TMP);
4475 __ Sltu(TMP, TMP, lhs_high);
4476 if (dst != lhs_low) {
4477 __ LoadConst32(dst, imm_low);
4478 __ Sltu(dst, lhs_low, dst);
4479 }
4480 __ Slt(dst, TMP, dst);
4481 __ Or(dst, dst, AT);
4482 if (cond == kCondAE) {
4483 __ Xori(dst, dst, 1);
4484 }
4485 break;
4486 case kCondA:
4487 case kCondBE:
4488 if (dst == lhs_low) {
4489 __ LoadConst32(TMP, imm_low);
4490 __ Sltu(dst, TMP, lhs_low);
4491 }
4492 __ LoadConst32(TMP, imm_high);
4493 __ Sltu(AT, TMP, lhs_high);
4494 __ Sltu(TMP, lhs_high, TMP);
4495 if (dst != lhs_low) {
4496 __ LoadConst32(dst, imm_low);
4497 __ Sltu(dst, dst, lhs_low);
4498 }
4499 __ Slt(dst, TMP, dst);
4500 __ Or(dst, dst, AT);
4501 if (cond == kCondBE) {
4502 __ Xori(dst, dst, 1);
4503 }
4504 break;
4505 }
4506 } else {
4507 switch (cond) {
4508 case kCondEQ:
4509 __ Xor(TMP, lhs_high, rhs_high);
4510 __ Xor(AT, lhs_low, rhs_low);
4511 __ Or(dst, TMP, AT);
4512 __ Sltiu(dst, dst, 1);
4513 break;
4514 case kCondNE:
4515 __ Xor(TMP, lhs_high, rhs_high);
4516 __ Xor(AT, lhs_low, rhs_low);
4517 __ Or(dst, TMP, AT);
4518 __ Sltu(dst, ZERO, dst);
4519 break;
4520 case kCondLT:
4521 case kCondGE:
4522 __ Slt(TMP, rhs_high, lhs_high);
4523 __ Sltu(AT, lhs_low, rhs_low);
4524 __ Slt(TMP, TMP, AT);
4525 __ Slt(AT, lhs_high, rhs_high);
4526 __ Or(dst, AT, TMP);
4527 if (cond == kCondGE) {
4528 __ Xori(dst, dst, 1);
4529 }
4530 break;
4531 case kCondGT:
4532 case kCondLE:
4533 __ Slt(TMP, lhs_high, rhs_high);
4534 __ Sltu(AT, rhs_low, lhs_low);
4535 __ Slt(TMP, TMP, AT);
4536 __ Slt(AT, rhs_high, lhs_high);
4537 __ Or(dst, AT, TMP);
4538 if (cond == kCondLE) {
4539 __ Xori(dst, dst, 1);
4540 }
4541 break;
4542 case kCondB:
4543 case kCondAE:
4544 __ Sltu(TMP, rhs_high, lhs_high);
4545 __ Sltu(AT, lhs_low, rhs_low);
4546 __ Slt(TMP, TMP, AT);
4547 __ Sltu(AT, lhs_high, rhs_high);
4548 __ Or(dst, AT, TMP);
4549 if (cond == kCondAE) {
4550 __ Xori(dst, dst, 1);
4551 }
4552 break;
4553 case kCondA:
4554 case kCondBE:
4555 __ Sltu(TMP, lhs_high, rhs_high);
4556 __ Sltu(AT, rhs_low, lhs_low);
4557 __ Slt(TMP, TMP, AT);
4558 __ Sltu(AT, rhs_high, lhs_high);
4559 __ Or(dst, AT, TMP);
4560 if (cond == kCondBE) {
4561 __ Xori(dst, dst, 1);
4562 }
4563 break;
4564 }
4565 }
4566}
4567
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004568void InstructionCodeGeneratorMIPS::GenerateLongCompareAndBranch(IfCondition cond,
4569 LocationSummary* locations,
4570 MipsLabel* label) {
4571 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4572 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4573 Location rhs_location = locations->InAt(1);
4574 Register rhs_high = ZERO;
4575 Register rhs_low = ZERO;
4576 int64_t imm = 0;
4577 uint32_t imm_high = 0;
4578 uint32_t imm_low = 0;
4579 bool use_imm = rhs_location.IsConstant();
4580 if (use_imm) {
4581 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4582 imm_high = High32Bits(imm);
4583 imm_low = Low32Bits(imm);
4584 } else {
4585 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4586 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4587 }
4588
4589 if (use_imm && imm == 0) {
4590 switch (cond) {
4591 case kCondEQ:
4592 case kCondBE: // <= 0 if zero
4593 __ Or(TMP, lhs_high, lhs_low);
4594 __ Beqz(TMP, label);
4595 break;
4596 case kCondNE:
4597 case kCondA: // > 0 if non-zero
4598 __ Or(TMP, lhs_high, lhs_low);
4599 __ Bnez(TMP, label);
4600 break;
4601 case kCondLT:
4602 __ Bltz(lhs_high, label);
4603 break;
4604 case kCondGE:
4605 __ Bgez(lhs_high, label);
4606 break;
4607 case kCondLE:
4608 __ Or(TMP, lhs_high, lhs_low);
4609 __ Sra(AT, lhs_high, 31);
4610 __ Bgeu(AT, TMP, label);
4611 break;
4612 case kCondGT:
4613 __ Or(TMP, lhs_high, lhs_low);
4614 __ Sra(AT, lhs_high, 31);
4615 __ Bltu(AT, TMP, label);
4616 break;
4617 case kCondB: // always false
4618 break;
4619 case kCondAE: // always true
4620 __ B(label);
4621 break;
4622 }
4623 } else if (use_imm) {
4624 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4625 switch (cond) {
4626 case kCondEQ:
4627 __ LoadConst32(TMP, imm_high);
4628 __ Xor(TMP, TMP, lhs_high);
4629 __ LoadConst32(AT, imm_low);
4630 __ Xor(AT, AT, lhs_low);
4631 __ Or(TMP, TMP, AT);
4632 __ Beqz(TMP, label);
4633 break;
4634 case kCondNE:
4635 __ LoadConst32(TMP, imm_high);
4636 __ Xor(TMP, TMP, lhs_high);
4637 __ LoadConst32(AT, imm_low);
4638 __ Xor(AT, AT, lhs_low);
4639 __ Or(TMP, TMP, AT);
4640 __ Bnez(TMP, label);
4641 break;
4642 case kCondLT:
4643 __ LoadConst32(TMP, imm_high);
4644 __ Blt(lhs_high, TMP, label);
4645 __ Slt(TMP, TMP, lhs_high);
4646 __ LoadConst32(AT, imm_low);
4647 __ Sltu(AT, lhs_low, AT);
4648 __ Blt(TMP, AT, label);
4649 break;
4650 case kCondGE:
4651 __ LoadConst32(TMP, imm_high);
4652 __ Blt(TMP, lhs_high, label);
4653 __ Slt(TMP, lhs_high, TMP);
4654 __ LoadConst32(AT, imm_low);
4655 __ Sltu(AT, lhs_low, AT);
4656 __ Or(TMP, TMP, AT);
4657 __ Beqz(TMP, label);
4658 break;
4659 case kCondLE:
4660 __ LoadConst32(TMP, imm_high);
4661 __ Blt(lhs_high, TMP, label);
4662 __ Slt(TMP, TMP, lhs_high);
4663 __ LoadConst32(AT, imm_low);
4664 __ Sltu(AT, AT, lhs_low);
4665 __ Or(TMP, TMP, AT);
4666 __ Beqz(TMP, label);
4667 break;
4668 case kCondGT:
4669 __ LoadConst32(TMP, imm_high);
4670 __ Blt(TMP, lhs_high, label);
4671 __ Slt(TMP, lhs_high, TMP);
4672 __ LoadConst32(AT, imm_low);
4673 __ Sltu(AT, AT, lhs_low);
4674 __ Blt(TMP, AT, label);
4675 break;
4676 case kCondB:
4677 __ LoadConst32(TMP, imm_high);
4678 __ Bltu(lhs_high, TMP, label);
4679 __ Sltu(TMP, TMP, lhs_high);
4680 __ LoadConst32(AT, imm_low);
4681 __ Sltu(AT, lhs_low, AT);
4682 __ Blt(TMP, AT, label);
4683 break;
4684 case kCondAE:
4685 __ LoadConst32(TMP, imm_high);
4686 __ Bltu(TMP, lhs_high, label);
4687 __ Sltu(TMP, lhs_high, TMP);
4688 __ LoadConst32(AT, imm_low);
4689 __ Sltu(AT, lhs_low, AT);
4690 __ Or(TMP, TMP, AT);
4691 __ Beqz(TMP, label);
4692 break;
4693 case kCondBE:
4694 __ LoadConst32(TMP, imm_high);
4695 __ Bltu(lhs_high, TMP, label);
4696 __ Sltu(TMP, TMP, lhs_high);
4697 __ LoadConst32(AT, imm_low);
4698 __ Sltu(AT, AT, lhs_low);
4699 __ Or(TMP, TMP, AT);
4700 __ Beqz(TMP, label);
4701 break;
4702 case kCondA:
4703 __ LoadConst32(TMP, imm_high);
4704 __ Bltu(TMP, lhs_high, label);
4705 __ Sltu(TMP, lhs_high, TMP);
4706 __ LoadConst32(AT, imm_low);
4707 __ Sltu(AT, AT, lhs_low);
4708 __ Blt(TMP, AT, label);
4709 break;
4710 }
4711 } else {
4712 switch (cond) {
4713 case kCondEQ:
4714 __ Xor(TMP, lhs_high, rhs_high);
4715 __ Xor(AT, lhs_low, rhs_low);
4716 __ Or(TMP, TMP, AT);
4717 __ Beqz(TMP, label);
4718 break;
4719 case kCondNE:
4720 __ Xor(TMP, lhs_high, rhs_high);
4721 __ Xor(AT, lhs_low, rhs_low);
4722 __ Or(TMP, TMP, AT);
4723 __ Bnez(TMP, label);
4724 break;
4725 case kCondLT:
4726 __ Blt(lhs_high, rhs_high, label);
4727 __ Slt(TMP, rhs_high, lhs_high);
4728 __ Sltu(AT, lhs_low, rhs_low);
4729 __ Blt(TMP, AT, label);
4730 break;
4731 case kCondGE:
4732 __ Blt(rhs_high, lhs_high, label);
4733 __ Slt(TMP, lhs_high, rhs_high);
4734 __ Sltu(AT, lhs_low, rhs_low);
4735 __ Or(TMP, TMP, AT);
4736 __ Beqz(TMP, label);
4737 break;
4738 case kCondLE:
4739 __ Blt(lhs_high, rhs_high, label);
4740 __ Slt(TMP, rhs_high, lhs_high);
4741 __ Sltu(AT, rhs_low, lhs_low);
4742 __ Or(TMP, TMP, AT);
4743 __ Beqz(TMP, label);
4744 break;
4745 case kCondGT:
4746 __ Blt(rhs_high, lhs_high, label);
4747 __ Slt(TMP, lhs_high, rhs_high);
4748 __ Sltu(AT, rhs_low, lhs_low);
4749 __ Blt(TMP, AT, label);
4750 break;
4751 case kCondB:
4752 __ Bltu(lhs_high, rhs_high, label);
4753 __ Sltu(TMP, rhs_high, lhs_high);
4754 __ Sltu(AT, lhs_low, rhs_low);
4755 __ Blt(TMP, AT, label);
4756 break;
4757 case kCondAE:
4758 __ Bltu(rhs_high, lhs_high, label);
4759 __ Sltu(TMP, lhs_high, rhs_high);
4760 __ Sltu(AT, lhs_low, rhs_low);
4761 __ Or(TMP, TMP, AT);
4762 __ Beqz(TMP, label);
4763 break;
4764 case kCondBE:
4765 __ Bltu(lhs_high, rhs_high, label);
4766 __ Sltu(TMP, rhs_high, lhs_high);
4767 __ Sltu(AT, rhs_low, lhs_low);
4768 __ Or(TMP, TMP, AT);
4769 __ Beqz(TMP, label);
4770 break;
4771 case kCondA:
4772 __ Bltu(rhs_high, lhs_high, label);
4773 __ Sltu(TMP, lhs_high, rhs_high);
4774 __ Sltu(AT, rhs_low, lhs_low);
4775 __ Blt(TMP, AT, label);
4776 break;
4777 }
4778 }
4779}
4780
Alexey Frunze2ddb7172016-09-06 17:04:55 -07004781void InstructionCodeGeneratorMIPS::GenerateFpCompare(IfCondition cond,
4782 bool gt_bias,
4783 Primitive::Type type,
4784 LocationSummary* locations) {
4785 Register dst = locations->Out().AsRegister<Register>();
4786 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
4787 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
4788 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4789 if (type == Primitive::kPrimFloat) {
4790 if (isR6) {
4791 switch (cond) {
4792 case kCondEQ:
4793 __ CmpEqS(FTMP, lhs, rhs);
4794 __ Mfc1(dst, FTMP);
4795 __ Andi(dst, dst, 1);
4796 break;
4797 case kCondNE:
4798 __ CmpEqS(FTMP, lhs, rhs);
4799 __ Mfc1(dst, FTMP);
4800 __ Addiu(dst, dst, 1);
4801 break;
4802 case kCondLT:
4803 if (gt_bias) {
4804 __ CmpLtS(FTMP, lhs, rhs);
4805 } else {
4806 __ CmpUltS(FTMP, lhs, rhs);
4807 }
4808 __ Mfc1(dst, FTMP);
4809 __ Andi(dst, dst, 1);
4810 break;
4811 case kCondLE:
4812 if (gt_bias) {
4813 __ CmpLeS(FTMP, lhs, rhs);
4814 } else {
4815 __ CmpUleS(FTMP, lhs, rhs);
4816 }
4817 __ Mfc1(dst, FTMP);
4818 __ Andi(dst, dst, 1);
4819 break;
4820 case kCondGT:
4821 if (gt_bias) {
4822 __ CmpUltS(FTMP, rhs, lhs);
4823 } else {
4824 __ CmpLtS(FTMP, rhs, lhs);
4825 }
4826 __ Mfc1(dst, FTMP);
4827 __ Andi(dst, dst, 1);
4828 break;
4829 case kCondGE:
4830 if (gt_bias) {
4831 __ CmpUleS(FTMP, rhs, lhs);
4832 } else {
4833 __ CmpLeS(FTMP, rhs, lhs);
4834 }
4835 __ Mfc1(dst, FTMP);
4836 __ Andi(dst, dst, 1);
4837 break;
4838 default:
4839 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4840 UNREACHABLE();
4841 }
4842 } else {
4843 switch (cond) {
4844 case kCondEQ:
4845 __ CeqS(0, lhs, rhs);
4846 __ LoadConst32(dst, 1);
4847 __ Movf(dst, ZERO, 0);
4848 break;
4849 case kCondNE:
4850 __ CeqS(0, lhs, rhs);
4851 __ LoadConst32(dst, 1);
4852 __ Movt(dst, ZERO, 0);
4853 break;
4854 case kCondLT:
4855 if (gt_bias) {
4856 __ ColtS(0, lhs, rhs);
4857 } else {
4858 __ CultS(0, lhs, rhs);
4859 }
4860 __ LoadConst32(dst, 1);
4861 __ Movf(dst, ZERO, 0);
4862 break;
4863 case kCondLE:
4864 if (gt_bias) {
4865 __ ColeS(0, lhs, rhs);
4866 } else {
4867 __ CuleS(0, lhs, rhs);
4868 }
4869 __ LoadConst32(dst, 1);
4870 __ Movf(dst, ZERO, 0);
4871 break;
4872 case kCondGT:
4873 if (gt_bias) {
4874 __ CultS(0, rhs, lhs);
4875 } else {
4876 __ ColtS(0, rhs, lhs);
4877 }
4878 __ LoadConst32(dst, 1);
4879 __ Movf(dst, ZERO, 0);
4880 break;
4881 case kCondGE:
4882 if (gt_bias) {
4883 __ CuleS(0, rhs, lhs);
4884 } else {
4885 __ ColeS(0, rhs, lhs);
4886 }
4887 __ LoadConst32(dst, 1);
4888 __ Movf(dst, ZERO, 0);
4889 break;
4890 default:
4891 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4892 UNREACHABLE();
4893 }
4894 }
4895 } else {
4896 DCHECK_EQ(type, Primitive::kPrimDouble);
4897 if (isR6) {
4898 switch (cond) {
4899 case kCondEQ:
4900 __ CmpEqD(FTMP, lhs, rhs);
4901 __ Mfc1(dst, FTMP);
4902 __ Andi(dst, dst, 1);
4903 break;
4904 case kCondNE:
4905 __ CmpEqD(FTMP, lhs, rhs);
4906 __ Mfc1(dst, FTMP);
4907 __ Addiu(dst, dst, 1);
4908 break;
4909 case kCondLT:
4910 if (gt_bias) {
4911 __ CmpLtD(FTMP, lhs, rhs);
4912 } else {
4913 __ CmpUltD(FTMP, lhs, rhs);
4914 }
4915 __ Mfc1(dst, FTMP);
4916 __ Andi(dst, dst, 1);
4917 break;
4918 case kCondLE:
4919 if (gt_bias) {
4920 __ CmpLeD(FTMP, lhs, rhs);
4921 } else {
4922 __ CmpUleD(FTMP, lhs, rhs);
4923 }
4924 __ Mfc1(dst, FTMP);
4925 __ Andi(dst, dst, 1);
4926 break;
4927 case kCondGT:
4928 if (gt_bias) {
4929 __ CmpUltD(FTMP, rhs, lhs);
4930 } else {
4931 __ CmpLtD(FTMP, rhs, lhs);
4932 }
4933 __ Mfc1(dst, FTMP);
4934 __ Andi(dst, dst, 1);
4935 break;
4936 case kCondGE:
4937 if (gt_bias) {
4938 __ CmpUleD(FTMP, rhs, lhs);
4939 } else {
4940 __ CmpLeD(FTMP, rhs, lhs);
4941 }
4942 __ Mfc1(dst, FTMP);
4943 __ Andi(dst, dst, 1);
4944 break;
4945 default:
4946 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4947 UNREACHABLE();
4948 }
4949 } else {
4950 switch (cond) {
4951 case kCondEQ:
4952 __ CeqD(0, lhs, rhs);
4953 __ LoadConst32(dst, 1);
4954 __ Movf(dst, ZERO, 0);
4955 break;
4956 case kCondNE:
4957 __ CeqD(0, lhs, rhs);
4958 __ LoadConst32(dst, 1);
4959 __ Movt(dst, ZERO, 0);
4960 break;
4961 case kCondLT:
4962 if (gt_bias) {
4963 __ ColtD(0, lhs, rhs);
4964 } else {
4965 __ CultD(0, lhs, rhs);
4966 }
4967 __ LoadConst32(dst, 1);
4968 __ Movf(dst, ZERO, 0);
4969 break;
4970 case kCondLE:
4971 if (gt_bias) {
4972 __ ColeD(0, lhs, rhs);
4973 } else {
4974 __ CuleD(0, lhs, rhs);
4975 }
4976 __ LoadConst32(dst, 1);
4977 __ Movf(dst, ZERO, 0);
4978 break;
4979 case kCondGT:
4980 if (gt_bias) {
4981 __ CultD(0, rhs, lhs);
4982 } else {
4983 __ ColtD(0, rhs, lhs);
4984 }
4985 __ LoadConst32(dst, 1);
4986 __ Movf(dst, ZERO, 0);
4987 break;
4988 case kCondGE:
4989 if (gt_bias) {
4990 __ CuleD(0, rhs, lhs);
4991 } else {
4992 __ ColeD(0, rhs, lhs);
4993 }
4994 __ LoadConst32(dst, 1);
4995 __ Movf(dst, ZERO, 0);
4996 break;
4997 default:
4998 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4999 UNREACHABLE();
5000 }
5001 }
5002 }
5003}
5004
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005005bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR2(IfCondition cond,
5006 bool gt_bias,
5007 Primitive::Type type,
5008 LocationSummary* input_locations,
5009 int cc) {
5010 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
5011 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
5012 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
5013 if (type == Primitive::kPrimFloat) {
5014 switch (cond) {
5015 case kCondEQ:
5016 __ CeqS(cc, lhs, rhs);
5017 return false;
5018 case kCondNE:
5019 __ CeqS(cc, lhs, rhs);
5020 return true;
5021 case kCondLT:
5022 if (gt_bias) {
5023 __ ColtS(cc, lhs, rhs);
5024 } else {
5025 __ CultS(cc, lhs, rhs);
5026 }
5027 return false;
5028 case kCondLE:
5029 if (gt_bias) {
5030 __ ColeS(cc, lhs, rhs);
5031 } else {
5032 __ CuleS(cc, lhs, rhs);
5033 }
5034 return false;
5035 case kCondGT:
5036 if (gt_bias) {
5037 __ CultS(cc, rhs, lhs);
5038 } else {
5039 __ ColtS(cc, rhs, lhs);
5040 }
5041 return false;
5042 case kCondGE:
5043 if (gt_bias) {
5044 __ CuleS(cc, rhs, lhs);
5045 } else {
5046 __ ColeS(cc, rhs, lhs);
5047 }
5048 return false;
5049 default:
5050 LOG(FATAL) << "Unexpected non-floating-point condition";
5051 UNREACHABLE();
5052 }
5053 } else {
5054 DCHECK_EQ(type, Primitive::kPrimDouble);
5055 switch (cond) {
5056 case kCondEQ:
5057 __ CeqD(cc, lhs, rhs);
5058 return false;
5059 case kCondNE:
5060 __ CeqD(cc, lhs, rhs);
5061 return true;
5062 case kCondLT:
5063 if (gt_bias) {
5064 __ ColtD(cc, lhs, rhs);
5065 } else {
5066 __ CultD(cc, lhs, rhs);
5067 }
5068 return false;
5069 case kCondLE:
5070 if (gt_bias) {
5071 __ ColeD(cc, lhs, rhs);
5072 } else {
5073 __ CuleD(cc, lhs, rhs);
5074 }
5075 return false;
5076 case kCondGT:
5077 if (gt_bias) {
5078 __ CultD(cc, rhs, lhs);
5079 } else {
5080 __ ColtD(cc, rhs, lhs);
5081 }
5082 return false;
5083 case kCondGE:
5084 if (gt_bias) {
5085 __ CuleD(cc, rhs, lhs);
5086 } else {
5087 __ ColeD(cc, rhs, lhs);
5088 }
5089 return false;
5090 default:
5091 LOG(FATAL) << "Unexpected non-floating-point condition";
5092 UNREACHABLE();
5093 }
5094 }
5095}
5096
5097bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR6(IfCondition cond,
5098 bool gt_bias,
5099 Primitive::Type type,
5100 LocationSummary* input_locations,
5101 FRegister dst) {
5102 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
5103 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
5104 CHECK(codegen_->GetInstructionSetFeatures().IsR6());
5105 if (type == Primitive::kPrimFloat) {
5106 switch (cond) {
5107 case kCondEQ:
5108 __ CmpEqS(dst, lhs, rhs);
5109 return false;
5110 case kCondNE:
5111 __ CmpEqS(dst, lhs, rhs);
5112 return true;
5113 case kCondLT:
5114 if (gt_bias) {
5115 __ CmpLtS(dst, lhs, rhs);
5116 } else {
5117 __ CmpUltS(dst, lhs, rhs);
5118 }
5119 return false;
5120 case kCondLE:
5121 if (gt_bias) {
5122 __ CmpLeS(dst, lhs, rhs);
5123 } else {
5124 __ CmpUleS(dst, lhs, rhs);
5125 }
5126 return false;
5127 case kCondGT:
5128 if (gt_bias) {
5129 __ CmpUltS(dst, rhs, lhs);
5130 } else {
5131 __ CmpLtS(dst, rhs, lhs);
5132 }
5133 return false;
5134 case kCondGE:
5135 if (gt_bias) {
5136 __ CmpUleS(dst, rhs, lhs);
5137 } else {
5138 __ CmpLeS(dst, rhs, lhs);
5139 }
5140 return false;
5141 default:
5142 LOG(FATAL) << "Unexpected non-floating-point condition";
5143 UNREACHABLE();
5144 }
5145 } else {
5146 DCHECK_EQ(type, Primitive::kPrimDouble);
5147 switch (cond) {
5148 case kCondEQ:
5149 __ CmpEqD(dst, lhs, rhs);
5150 return false;
5151 case kCondNE:
5152 __ CmpEqD(dst, lhs, rhs);
5153 return true;
5154 case kCondLT:
5155 if (gt_bias) {
5156 __ CmpLtD(dst, lhs, rhs);
5157 } else {
5158 __ CmpUltD(dst, lhs, rhs);
5159 }
5160 return false;
5161 case kCondLE:
5162 if (gt_bias) {
5163 __ CmpLeD(dst, lhs, rhs);
5164 } else {
5165 __ CmpUleD(dst, lhs, rhs);
5166 }
5167 return false;
5168 case kCondGT:
5169 if (gt_bias) {
5170 __ CmpUltD(dst, rhs, lhs);
5171 } else {
5172 __ CmpLtD(dst, rhs, lhs);
5173 }
5174 return false;
5175 case kCondGE:
5176 if (gt_bias) {
5177 __ CmpUleD(dst, rhs, lhs);
5178 } else {
5179 __ CmpLeD(dst, rhs, lhs);
5180 }
5181 return false;
5182 default:
5183 LOG(FATAL) << "Unexpected non-floating-point condition";
5184 UNREACHABLE();
5185 }
5186 }
5187}
5188
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005189void InstructionCodeGeneratorMIPS::GenerateFpCompareAndBranch(IfCondition cond,
5190 bool gt_bias,
5191 Primitive::Type type,
5192 LocationSummary* locations,
5193 MipsLabel* label) {
5194 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
5195 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
5196 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
5197 if (type == Primitive::kPrimFloat) {
5198 if (isR6) {
5199 switch (cond) {
5200 case kCondEQ:
5201 __ CmpEqS(FTMP, lhs, rhs);
5202 __ Bc1nez(FTMP, label);
5203 break;
5204 case kCondNE:
5205 __ CmpEqS(FTMP, lhs, rhs);
5206 __ Bc1eqz(FTMP, label);
5207 break;
5208 case kCondLT:
5209 if (gt_bias) {
5210 __ CmpLtS(FTMP, lhs, rhs);
5211 } else {
5212 __ CmpUltS(FTMP, lhs, rhs);
5213 }
5214 __ Bc1nez(FTMP, label);
5215 break;
5216 case kCondLE:
5217 if (gt_bias) {
5218 __ CmpLeS(FTMP, lhs, rhs);
5219 } else {
5220 __ CmpUleS(FTMP, lhs, rhs);
5221 }
5222 __ Bc1nez(FTMP, label);
5223 break;
5224 case kCondGT:
5225 if (gt_bias) {
5226 __ CmpUltS(FTMP, rhs, lhs);
5227 } else {
5228 __ CmpLtS(FTMP, rhs, lhs);
5229 }
5230 __ Bc1nez(FTMP, label);
5231 break;
5232 case kCondGE:
5233 if (gt_bias) {
5234 __ CmpUleS(FTMP, rhs, lhs);
5235 } else {
5236 __ CmpLeS(FTMP, rhs, lhs);
5237 }
5238 __ Bc1nez(FTMP, label);
5239 break;
5240 default:
5241 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005242 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005243 }
5244 } else {
5245 switch (cond) {
5246 case kCondEQ:
5247 __ CeqS(0, lhs, rhs);
5248 __ Bc1t(0, label);
5249 break;
5250 case kCondNE:
5251 __ CeqS(0, lhs, rhs);
5252 __ Bc1f(0, label);
5253 break;
5254 case kCondLT:
5255 if (gt_bias) {
5256 __ ColtS(0, lhs, rhs);
5257 } else {
5258 __ CultS(0, lhs, rhs);
5259 }
5260 __ Bc1t(0, label);
5261 break;
5262 case kCondLE:
5263 if (gt_bias) {
5264 __ ColeS(0, lhs, rhs);
5265 } else {
5266 __ CuleS(0, lhs, rhs);
5267 }
5268 __ Bc1t(0, label);
5269 break;
5270 case kCondGT:
5271 if (gt_bias) {
5272 __ CultS(0, rhs, lhs);
5273 } else {
5274 __ ColtS(0, rhs, lhs);
5275 }
5276 __ Bc1t(0, label);
5277 break;
5278 case kCondGE:
5279 if (gt_bias) {
5280 __ CuleS(0, rhs, lhs);
5281 } else {
5282 __ ColeS(0, rhs, lhs);
5283 }
5284 __ Bc1t(0, label);
5285 break;
5286 default:
5287 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005288 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005289 }
5290 }
5291 } else {
5292 DCHECK_EQ(type, Primitive::kPrimDouble);
5293 if (isR6) {
5294 switch (cond) {
5295 case kCondEQ:
5296 __ CmpEqD(FTMP, lhs, rhs);
5297 __ Bc1nez(FTMP, label);
5298 break;
5299 case kCondNE:
5300 __ CmpEqD(FTMP, lhs, rhs);
5301 __ Bc1eqz(FTMP, label);
5302 break;
5303 case kCondLT:
5304 if (gt_bias) {
5305 __ CmpLtD(FTMP, lhs, rhs);
5306 } else {
5307 __ CmpUltD(FTMP, lhs, rhs);
5308 }
5309 __ Bc1nez(FTMP, label);
5310 break;
5311 case kCondLE:
5312 if (gt_bias) {
5313 __ CmpLeD(FTMP, lhs, rhs);
5314 } else {
5315 __ CmpUleD(FTMP, lhs, rhs);
5316 }
5317 __ Bc1nez(FTMP, label);
5318 break;
5319 case kCondGT:
5320 if (gt_bias) {
5321 __ CmpUltD(FTMP, rhs, lhs);
5322 } else {
5323 __ CmpLtD(FTMP, rhs, lhs);
5324 }
5325 __ Bc1nez(FTMP, label);
5326 break;
5327 case kCondGE:
5328 if (gt_bias) {
5329 __ CmpUleD(FTMP, rhs, lhs);
5330 } else {
5331 __ CmpLeD(FTMP, rhs, lhs);
5332 }
5333 __ Bc1nez(FTMP, label);
5334 break;
5335 default:
5336 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005337 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005338 }
5339 } else {
5340 switch (cond) {
5341 case kCondEQ:
5342 __ CeqD(0, lhs, rhs);
5343 __ Bc1t(0, label);
5344 break;
5345 case kCondNE:
5346 __ CeqD(0, lhs, rhs);
5347 __ Bc1f(0, label);
5348 break;
5349 case kCondLT:
5350 if (gt_bias) {
5351 __ ColtD(0, lhs, rhs);
5352 } else {
5353 __ CultD(0, lhs, rhs);
5354 }
5355 __ Bc1t(0, label);
5356 break;
5357 case kCondLE:
5358 if (gt_bias) {
5359 __ ColeD(0, lhs, rhs);
5360 } else {
5361 __ CuleD(0, lhs, rhs);
5362 }
5363 __ Bc1t(0, label);
5364 break;
5365 case kCondGT:
5366 if (gt_bias) {
5367 __ CultD(0, rhs, lhs);
5368 } else {
5369 __ ColtD(0, rhs, lhs);
5370 }
5371 __ Bc1t(0, label);
5372 break;
5373 case kCondGE:
5374 if (gt_bias) {
5375 __ CuleD(0, rhs, lhs);
5376 } else {
5377 __ ColeD(0, rhs, lhs);
5378 }
5379 __ Bc1t(0, label);
5380 break;
5381 default:
5382 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005383 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005384 }
5385 }
5386 }
5387}
5388
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005389void InstructionCodeGeneratorMIPS::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00005390 size_t condition_input_index,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005391 MipsLabel* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00005392 MipsLabel* false_target) {
5393 HInstruction* cond = instruction->InputAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005394
David Brazdil0debae72015-11-12 18:37:00 +00005395 if (true_target == nullptr && false_target == nullptr) {
5396 // Nothing to do. The code always falls through.
5397 return;
5398 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00005399 // Constant condition, statically compared against "true" (integer value 1).
5400 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00005401 if (true_target != nullptr) {
5402 __ B(true_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005403 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005404 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00005405 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00005406 if (false_target != nullptr) {
5407 __ B(false_target);
5408 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005409 }
David Brazdil0debae72015-11-12 18:37:00 +00005410 return;
5411 }
5412
5413 // The following code generates these patterns:
5414 // (1) true_target == nullptr && false_target != nullptr
5415 // - opposite condition true => branch to false_target
5416 // (2) true_target != nullptr && false_target == nullptr
5417 // - condition true => branch to true_target
5418 // (3) true_target != nullptr && false_target != nullptr
5419 // - condition true => branch to true_target
5420 // - branch to false_target
5421 if (IsBooleanValueOrMaterializedCondition(cond)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005422 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00005423 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005424 DCHECK(cond_val.IsRegister());
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005425 if (true_target == nullptr) {
David Brazdil0debae72015-11-12 18:37:00 +00005426 __ Beqz(cond_val.AsRegister<Register>(), false_target);
5427 } else {
5428 __ Bnez(cond_val.AsRegister<Register>(), true_target);
5429 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005430 } else {
5431 // The condition instruction has not been materialized, use its inputs as
5432 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00005433 HCondition* condition = cond->AsCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005434 Primitive::Type type = condition->InputAt(0)->GetType();
5435 LocationSummary* locations = cond->GetLocations();
5436 IfCondition if_cond = condition->GetCondition();
5437 MipsLabel* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00005438
David Brazdil0debae72015-11-12 18:37:00 +00005439 if (true_target == nullptr) {
5440 if_cond = condition->GetOppositeCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005441 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00005442 }
5443
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005444 switch (type) {
5445 default:
5446 GenerateIntCompareAndBranch(if_cond, locations, branch_target);
5447 break;
5448 case Primitive::kPrimLong:
5449 GenerateLongCompareAndBranch(if_cond, locations, branch_target);
5450 break;
5451 case Primitive::kPrimFloat:
5452 case Primitive::kPrimDouble:
5453 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
5454 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005455 }
5456 }
David Brazdil0debae72015-11-12 18:37:00 +00005457
5458 // If neither branch falls through (case 3), the conditional branch to `true_target`
5459 // was already emitted (case 2) and we need to emit a jump to `false_target`.
5460 if (true_target != nullptr && false_target != nullptr) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005461 __ B(false_target);
5462 }
5463}
5464
5465void LocationsBuilderMIPS::VisitIf(HIf* if_instr) {
5466 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00005467 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005468 locations->SetInAt(0, Location::RequiresRegister());
5469 }
5470}
5471
5472void InstructionCodeGeneratorMIPS::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00005473 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
5474 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
5475 MipsLabel* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
5476 nullptr : codegen_->GetLabelOf(true_successor);
5477 MipsLabel* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
5478 nullptr : codegen_->GetLabelOf(false_successor);
5479 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005480}
5481
5482void LocationsBuilderMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
5483 LocationSummary* locations = new (GetGraph()->GetArena())
5484 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01005485 InvokeRuntimeCallingConvention calling_convention;
5486 RegisterSet caller_saves = RegisterSet::Empty();
5487 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5488 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00005489 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005490 locations->SetInAt(0, Location::RequiresRegister());
5491 }
5492}
5493
5494void InstructionCodeGeneratorMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08005495 SlowPathCodeMIPS* slow_path =
5496 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00005497 GenerateTestAndBranch(deoptimize,
5498 /* condition_input_index */ 0,
5499 slow_path->GetEntryLabel(),
5500 /* false_target */ nullptr);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005501}
5502
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005503// This function returns true if a conditional move can be generated for HSelect.
5504// Otherwise it returns false and HSelect must be implemented in terms of conditonal
5505// branches and regular moves.
5506//
5507// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
5508//
5509// While determining feasibility of a conditional move and setting inputs/outputs
5510// are two distinct tasks, this function does both because they share quite a bit
5511// of common logic.
5512static bool CanMoveConditionally(HSelect* select, bool is_r6, LocationSummary* locations_to_set) {
5513 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
5514 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5515 HCondition* condition = cond->AsCondition();
5516
5517 Primitive::Type cond_type = materialized ? Primitive::kPrimInt : condition->InputAt(0)->GetType();
5518 Primitive::Type dst_type = select->GetType();
5519
5520 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
5521 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
5522 bool is_true_value_zero_constant =
5523 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
5524 bool is_false_value_zero_constant =
5525 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
5526
5527 bool can_move_conditionally = false;
5528 bool use_const_for_false_in = false;
5529 bool use_const_for_true_in = false;
5530
5531 if (!cond->IsConstant()) {
5532 switch (cond_type) {
5533 default:
5534 switch (dst_type) {
5535 default:
5536 // Moving int on int condition.
5537 if (is_r6) {
5538 if (is_true_value_zero_constant) {
5539 // seleqz out_reg, false_reg, cond_reg
5540 can_move_conditionally = true;
5541 use_const_for_true_in = true;
5542 } else if (is_false_value_zero_constant) {
5543 // selnez out_reg, true_reg, cond_reg
5544 can_move_conditionally = true;
5545 use_const_for_false_in = true;
5546 } else if (materialized) {
5547 // Not materializing unmaterialized int conditions
5548 // to keep the instruction count low.
5549 // selnez AT, true_reg, cond_reg
5550 // seleqz TMP, false_reg, cond_reg
5551 // or out_reg, AT, TMP
5552 can_move_conditionally = true;
5553 }
5554 } else {
5555 // movn out_reg, true_reg/ZERO, cond_reg
5556 can_move_conditionally = true;
5557 use_const_for_true_in = is_true_value_zero_constant;
5558 }
5559 break;
5560 case Primitive::kPrimLong:
5561 // Moving long on int condition.
5562 if (is_r6) {
5563 if (is_true_value_zero_constant) {
5564 // seleqz out_reg_lo, false_reg_lo, cond_reg
5565 // seleqz out_reg_hi, false_reg_hi, cond_reg
5566 can_move_conditionally = true;
5567 use_const_for_true_in = true;
5568 } else if (is_false_value_zero_constant) {
5569 // selnez out_reg_lo, true_reg_lo, cond_reg
5570 // selnez out_reg_hi, true_reg_hi, cond_reg
5571 can_move_conditionally = true;
5572 use_const_for_false_in = true;
5573 }
5574 // Other long conditional moves would generate 6+ instructions,
5575 // which is too many.
5576 } else {
5577 // movn out_reg_lo, true_reg_lo/ZERO, cond_reg
5578 // movn out_reg_hi, true_reg_hi/ZERO, cond_reg
5579 can_move_conditionally = true;
5580 use_const_for_true_in = is_true_value_zero_constant;
5581 }
5582 break;
5583 case Primitive::kPrimFloat:
5584 case Primitive::kPrimDouble:
5585 // Moving float/double on int condition.
5586 if (is_r6) {
5587 if (materialized) {
5588 // Not materializing unmaterialized int conditions
5589 // to keep the instruction count low.
5590 can_move_conditionally = true;
5591 if (is_true_value_zero_constant) {
5592 // sltu TMP, ZERO, cond_reg
5593 // mtc1 TMP, temp_cond_reg
5594 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5595 use_const_for_true_in = true;
5596 } else if (is_false_value_zero_constant) {
5597 // sltu TMP, ZERO, cond_reg
5598 // mtc1 TMP, temp_cond_reg
5599 // selnez.fmt out_reg, true_reg, temp_cond_reg
5600 use_const_for_false_in = true;
5601 } else {
5602 // sltu TMP, ZERO, cond_reg
5603 // mtc1 TMP, temp_cond_reg
5604 // sel.fmt temp_cond_reg, false_reg, true_reg
5605 // mov.fmt out_reg, temp_cond_reg
5606 }
5607 }
5608 } else {
5609 // movn.fmt out_reg, true_reg, cond_reg
5610 can_move_conditionally = true;
5611 }
5612 break;
5613 }
5614 break;
5615 case Primitive::kPrimLong:
5616 // We don't materialize long comparison now
5617 // and use conditional branches instead.
5618 break;
5619 case Primitive::kPrimFloat:
5620 case Primitive::kPrimDouble:
5621 switch (dst_type) {
5622 default:
5623 // Moving int on float/double condition.
5624 if (is_r6) {
5625 if (is_true_value_zero_constant) {
5626 // mfc1 TMP, temp_cond_reg
5627 // seleqz out_reg, false_reg, TMP
5628 can_move_conditionally = true;
5629 use_const_for_true_in = true;
5630 } else if (is_false_value_zero_constant) {
5631 // mfc1 TMP, temp_cond_reg
5632 // selnez out_reg, true_reg, TMP
5633 can_move_conditionally = true;
5634 use_const_for_false_in = true;
5635 } else {
5636 // mfc1 TMP, temp_cond_reg
5637 // selnez AT, true_reg, TMP
5638 // seleqz TMP, false_reg, TMP
5639 // or out_reg, AT, TMP
5640 can_move_conditionally = true;
5641 }
5642 } else {
5643 // movt out_reg, true_reg/ZERO, cc
5644 can_move_conditionally = true;
5645 use_const_for_true_in = is_true_value_zero_constant;
5646 }
5647 break;
5648 case Primitive::kPrimLong:
5649 // Moving long on float/double condition.
5650 if (is_r6) {
5651 if (is_true_value_zero_constant) {
5652 // mfc1 TMP, temp_cond_reg
5653 // seleqz out_reg_lo, false_reg_lo, TMP
5654 // seleqz out_reg_hi, false_reg_hi, TMP
5655 can_move_conditionally = true;
5656 use_const_for_true_in = true;
5657 } else if (is_false_value_zero_constant) {
5658 // mfc1 TMP, temp_cond_reg
5659 // selnez out_reg_lo, true_reg_lo, TMP
5660 // selnez out_reg_hi, true_reg_hi, TMP
5661 can_move_conditionally = true;
5662 use_const_for_false_in = true;
5663 }
5664 // Other long conditional moves would generate 6+ instructions,
5665 // which is too many.
5666 } else {
5667 // movt out_reg_lo, true_reg_lo/ZERO, cc
5668 // movt out_reg_hi, true_reg_hi/ZERO, cc
5669 can_move_conditionally = true;
5670 use_const_for_true_in = is_true_value_zero_constant;
5671 }
5672 break;
5673 case Primitive::kPrimFloat:
5674 case Primitive::kPrimDouble:
5675 // Moving float/double on float/double condition.
5676 if (is_r6) {
5677 can_move_conditionally = true;
5678 if (is_true_value_zero_constant) {
5679 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5680 use_const_for_true_in = true;
5681 } else if (is_false_value_zero_constant) {
5682 // selnez.fmt out_reg, true_reg, temp_cond_reg
5683 use_const_for_false_in = true;
5684 } else {
5685 // sel.fmt temp_cond_reg, false_reg, true_reg
5686 // mov.fmt out_reg, temp_cond_reg
5687 }
5688 } else {
5689 // movt.fmt out_reg, true_reg, cc
5690 can_move_conditionally = true;
5691 }
5692 break;
5693 }
5694 break;
5695 }
5696 }
5697
5698 if (can_move_conditionally) {
5699 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
5700 } else {
5701 DCHECK(!use_const_for_false_in);
5702 DCHECK(!use_const_for_true_in);
5703 }
5704
5705 if (locations_to_set != nullptr) {
5706 if (use_const_for_false_in) {
5707 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
5708 } else {
5709 locations_to_set->SetInAt(0,
5710 Primitive::IsFloatingPointType(dst_type)
5711 ? Location::RequiresFpuRegister()
5712 : Location::RequiresRegister());
5713 }
5714 if (use_const_for_true_in) {
5715 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
5716 } else {
5717 locations_to_set->SetInAt(1,
5718 Primitive::IsFloatingPointType(dst_type)
5719 ? Location::RequiresFpuRegister()
5720 : Location::RequiresRegister());
5721 }
5722 if (materialized) {
5723 locations_to_set->SetInAt(2, Location::RequiresRegister());
5724 }
5725 // On R6 we don't require the output to be the same as the
5726 // first input for conditional moves unlike on R2.
5727 bool is_out_same_as_first_in = !can_move_conditionally || !is_r6;
5728 if (is_out_same_as_first_in) {
5729 locations_to_set->SetOut(Location::SameAsFirstInput());
5730 } else {
5731 locations_to_set->SetOut(Primitive::IsFloatingPointType(dst_type)
5732 ? Location::RequiresFpuRegister()
5733 : Location::RequiresRegister());
5734 }
5735 }
5736
5737 return can_move_conditionally;
5738}
5739
5740void InstructionCodeGeneratorMIPS::GenConditionalMoveR2(HSelect* select) {
5741 LocationSummary* locations = select->GetLocations();
5742 Location dst = locations->Out();
5743 Location src = locations->InAt(1);
5744 Register src_reg = ZERO;
5745 Register src_reg_high = ZERO;
5746 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5747 Register cond_reg = TMP;
5748 int cond_cc = 0;
5749 Primitive::Type cond_type = Primitive::kPrimInt;
5750 bool cond_inverted = false;
5751 Primitive::Type dst_type = select->GetType();
5752
5753 if (IsBooleanValueOrMaterializedCondition(cond)) {
5754 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5755 } else {
5756 HCondition* condition = cond->AsCondition();
5757 LocationSummary* cond_locations = cond->GetLocations();
5758 IfCondition if_cond = condition->GetCondition();
5759 cond_type = condition->InputAt(0)->GetType();
5760 switch (cond_type) {
5761 default:
5762 DCHECK_NE(cond_type, Primitive::kPrimLong);
5763 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5764 break;
5765 case Primitive::kPrimFloat:
5766 case Primitive::kPrimDouble:
5767 cond_inverted = MaterializeFpCompareR2(if_cond,
5768 condition->IsGtBias(),
5769 cond_type,
5770 cond_locations,
5771 cond_cc);
5772 break;
5773 }
5774 }
5775
5776 DCHECK(dst.Equals(locations->InAt(0)));
5777 if (src.IsRegister()) {
5778 src_reg = src.AsRegister<Register>();
5779 } else if (src.IsRegisterPair()) {
5780 src_reg = src.AsRegisterPairLow<Register>();
5781 src_reg_high = src.AsRegisterPairHigh<Register>();
5782 } else if (src.IsConstant()) {
5783 DCHECK(src.GetConstant()->IsZeroBitPattern());
5784 }
5785
5786 switch (cond_type) {
5787 default:
5788 switch (dst_type) {
5789 default:
5790 if (cond_inverted) {
5791 __ Movz(dst.AsRegister<Register>(), src_reg, cond_reg);
5792 } else {
5793 __ Movn(dst.AsRegister<Register>(), src_reg, cond_reg);
5794 }
5795 break;
5796 case Primitive::kPrimLong:
5797 if (cond_inverted) {
5798 __ Movz(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5799 __ Movz(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5800 } else {
5801 __ Movn(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5802 __ Movn(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5803 }
5804 break;
5805 case Primitive::kPrimFloat:
5806 if (cond_inverted) {
5807 __ MovzS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5808 } else {
5809 __ MovnS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5810 }
5811 break;
5812 case Primitive::kPrimDouble:
5813 if (cond_inverted) {
5814 __ MovzD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5815 } else {
5816 __ MovnD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5817 }
5818 break;
5819 }
5820 break;
5821 case Primitive::kPrimLong:
5822 LOG(FATAL) << "Unreachable";
5823 UNREACHABLE();
5824 case Primitive::kPrimFloat:
5825 case Primitive::kPrimDouble:
5826 switch (dst_type) {
5827 default:
5828 if (cond_inverted) {
5829 __ Movf(dst.AsRegister<Register>(), src_reg, cond_cc);
5830 } else {
5831 __ Movt(dst.AsRegister<Register>(), src_reg, cond_cc);
5832 }
5833 break;
5834 case Primitive::kPrimLong:
5835 if (cond_inverted) {
5836 __ Movf(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5837 __ Movf(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5838 } else {
5839 __ Movt(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5840 __ Movt(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5841 }
5842 break;
5843 case Primitive::kPrimFloat:
5844 if (cond_inverted) {
5845 __ MovfS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5846 } else {
5847 __ MovtS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5848 }
5849 break;
5850 case Primitive::kPrimDouble:
5851 if (cond_inverted) {
5852 __ MovfD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5853 } else {
5854 __ MovtD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5855 }
5856 break;
5857 }
5858 break;
5859 }
5860}
5861
5862void InstructionCodeGeneratorMIPS::GenConditionalMoveR6(HSelect* select) {
5863 LocationSummary* locations = select->GetLocations();
5864 Location dst = locations->Out();
5865 Location false_src = locations->InAt(0);
5866 Location true_src = locations->InAt(1);
5867 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5868 Register cond_reg = TMP;
5869 FRegister fcond_reg = FTMP;
5870 Primitive::Type cond_type = Primitive::kPrimInt;
5871 bool cond_inverted = false;
5872 Primitive::Type dst_type = select->GetType();
5873
5874 if (IsBooleanValueOrMaterializedCondition(cond)) {
5875 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5876 } else {
5877 HCondition* condition = cond->AsCondition();
5878 LocationSummary* cond_locations = cond->GetLocations();
5879 IfCondition if_cond = condition->GetCondition();
5880 cond_type = condition->InputAt(0)->GetType();
5881 switch (cond_type) {
5882 default:
5883 DCHECK_NE(cond_type, Primitive::kPrimLong);
5884 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5885 break;
5886 case Primitive::kPrimFloat:
5887 case Primitive::kPrimDouble:
5888 cond_inverted = MaterializeFpCompareR6(if_cond,
5889 condition->IsGtBias(),
5890 cond_type,
5891 cond_locations,
5892 fcond_reg);
5893 break;
5894 }
5895 }
5896
5897 if (true_src.IsConstant()) {
5898 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
5899 }
5900 if (false_src.IsConstant()) {
5901 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
5902 }
5903
5904 switch (dst_type) {
5905 default:
5906 if (Primitive::IsFloatingPointType(cond_type)) {
5907 __ Mfc1(cond_reg, fcond_reg);
5908 }
5909 if (true_src.IsConstant()) {
5910 if (cond_inverted) {
5911 __ Selnez(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5912 } else {
5913 __ Seleqz(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5914 }
5915 } else if (false_src.IsConstant()) {
5916 if (cond_inverted) {
5917 __ Seleqz(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5918 } else {
5919 __ Selnez(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5920 }
5921 } else {
5922 DCHECK_NE(cond_reg, AT);
5923 if (cond_inverted) {
5924 __ Seleqz(AT, true_src.AsRegister<Register>(), cond_reg);
5925 __ Selnez(TMP, false_src.AsRegister<Register>(), cond_reg);
5926 } else {
5927 __ Selnez(AT, true_src.AsRegister<Register>(), cond_reg);
5928 __ Seleqz(TMP, false_src.AsRegister<Register>(), cond_reg);
5929 }
5930 __ Or(dst.AsRegister<Register>(), AT, TMP);
5931 }
5932 break;
5933 case Primitive::kPrimLong: {
5934 if (Primitive::IsFloatingPointType(cond_type)) {
5935 __ Mfc1(cond_reg, fcond_reg);
5936 }
5937 Register dst_lo = dst.AsRegisterPairLow<Register>();
5938 Register dst_hi = dst.AsRegisterPairHigh<Register>();
5939 if (true_src.IsConstant()) {
5940 Register src_lo = false_src.AsRegisterPairLow<Register>();
5941 Register src_hi = false_src.AsRegisterPairHigh<Register>();
5942 if (cond_inverted) {
5943 __ Selnez(dst_lo, src_lo, cond_reg);
5944 __ Selnez(dst_hi, src_hi, cond_reg);
5945 } else {
5946 __ Seleqz(dst_lo, src_lo, cond_reg);
5947 __ Seleqz(dst_hi, src_hi, cond_reg);
5948 }
5949 } else {
5950 DCHECK(false_src.IsConstant());
5951 Register src_lo = true_src.AsRegisterPairLow<Register>();
5952 Register src_hi = true_src.AsRegisterPairHigh<Register>();
5953 if (cond_inverted) {
5954 __ Seleqz(dst_lo, src_lo, cond_reg);
5955 __ Seleqz(dst_hi, src_hi, cond_reg);
5956 } else {
5957 __ Selnez(dst_lo, src_lo, cond_reg);
5958 __ Selnez(dst_hi, src_hi, cond_reg);
5959 }
5960 }
5961 break;
5962 }
5963 case Primitive::kPrimFloat: {
5964 if (!Primitive::IsFloatingPointType(cond_type)) {
5965 // sel*.fmt tests bit 0 of the condition register, account for that.
5966 __ Sltu(TMP, ZERO, cond_reg);
5967 __ Mtc1(TMP, fcond_reg);
5968 }
5969 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
5970 if (true_src.IsConstant()) {
5971 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
5972 if (cond_inverted) {
5973 __ SelnezS(dst_reg, src_reg, fcond_reg);
5974 } else {
5975 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5976 }
5977 } else if (false_src.IsConstant()) {
5978 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
5979 if (cond_inverted) {
5980 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5981 } else {
5982 __ SelnezS(dst_reg, src_reg, fcond_reg);
5983 }
5984 } else {
5985 if (cond_inverted) {
5986 __ SelS(fcond_reg,
5987 true_src.AsFpuRegister<FRegister>(),
5988 false_src.AsFpuRegister<FRegister>());
5989 } else {
5990 __ SelS(fcond_reg,
5991 false_src.AsFpuRegister<FRegister>(),
5992 true_src.AsFpuRegister<FRegister>());
5993 }
5994 __ MovS(dst_reg, fcond_reg);
5995 }
5996 break;
5997 }
5998 case Primitive::kPrimDouble: {
5999 if (!Primitive::IsFloatingPointType(cond_type)) {
6000 // sel*.fmt tests bit 0 of the condition register, account for that.
6001 __ Sltu(TMP, ZERO, cond_reg);
6002 __ Mtc1(TMP, fcond_reg);
6003 }
6004 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
6005 if (true_src.IsConstant()) {
6006 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
6007 if (cond_inverted) {
6008 __ SelnezD(dst_reg, src_reg, fcond_reg);
6009 } else {
6010 __ SeleqzD(dst_reg, src_reg, fcond_reg);
6011 }
6012 } else if (false_src.IsConstant()) {
6013 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
6014 if (cond_inverted) {
6015 __ SeleqzD(dst_reg, src_reg, fcond_reg);
6016 } else {
6017 __ SelnezD(dst_reg, src_reg, fcond_reg);
6018 }
6019 } else {
6020 if (cond_inverted) {
6021 __ SelD(fcond_reg,
6022 true_src.AsFpuRegister<FRegister>(),
6023 false_src.AsFpuRegister<FRegister>());
6024 } else {
6025 __ SelD(fcond_reg,
6026 false_src.AsFpuRegister<FRegister>(),
6027 true_src.AsFpuRegister<FRegister>());
6028 }
6029 __ MovD(dst_reg, fcond_reg);
6030 }
6031 break;
6032 }
6033 }
6034}
6035
Goran Jakovljevicc6418422016-12-05 16:31:55 +01006036void LocationsBuilderMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
6037 LocationSummary* locations = new (GetGraph()->GetArena())
6038 LocationSummary(flag, LocationSummary::kNoCall);
6039 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07006040}
6041
Goran Jakovljevicc6418422016-12-05 16:31:55 +01006042void InstructionCodeGeneratorMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
6043 __ LoadFromOffset(kLoadWord,
6044 flag->GetLocations()->Out().AsRegister<Register>(),
6045 SP,
6046 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07006047}
6048
David Brazdil74eb1b22015-12-14 11:44:01 +00006049void LocationsBuilderMIPS::VisitSelect(HSelect* select) {
6050 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexey Frunze674b9ee2016-09-20 14:54:15 -07006051 CanMoveConditionally(select, codegen_->GetInstructionSetFeatures().IsR6(), locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00006052}
6053
6054void InstructionCodeGeneratorMIPS::VisitSelect(HSelect* select) {
Alexey Frunze674b9ee2016-09-20 14:54:15 -07006055 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
6056 if (CanMoveConditionally(select, is_r6, /* locations_to_set */ nullptr)) {
6057 if (is_r6) {
6058 GenConditionalMoveR6(select);
6059 } else {
6060 GenConditionalMoveR2(select);
6061 }
6062 } else {
6063 LocationSummary* locations = select->GetLocations();
6064 MipsLabel false_target;
6065 GenerateTestAndBranch(select,
6066 /* condition_input_index */ 2,
6067 /* true_target */ nullptr,
6068 &false_target);
6069 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
6070 __ Bind(&false_target);
6071 }
David Brazdil74eb1b22015-12-14 11:44:01 +00006072}
6073
David Srbecky0cf44932015-12-09 14:09:59 +00006074void LocationsBuilderMIPS::VisitNativeDebugInfo(HNativeDebugInfo* info) {
6075 new (GetGraph()->GetArena()) LocationSummary(info);
6076}
6077
David Srbeckyd28f4a02016-03-14 17:14:24 +00006078void InstructionCodeGeneratorMIPS::VisitNativeDebugInfo(HNativeDebugInfo*) {
6079 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00006080}
6081
6082void CodeGeneratorMIPS::GenerateNop() {
6083 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00006084}
6085
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006086void LocationsBuilderMIPS::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
6087 Primitive::Type field_type = field_info.GetFieldType();
6088 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6089 bool generate_volatile = field_info.IsVolatile() && is_wide;
Alexey Frunze15958152017-02-09 19:08:30 -08006090 bool object_field_get_with_read_barrier =
6091 kEmitCompilerReadBarrier && (field_type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006092 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08006093 instruction,
6094 generate_volatile
6095 ? LocationSummary::kCallOnMainOnly
6096 : (object_field_get_with_read_barrier
6097 ? LocationSummary::kCallOnSlowPath
6098 : LocationSummary::kNoCall));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006099
Alexey Frunzec61c0762017-04-10 13:54:23 -07006100 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6101 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6102 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006103 locations->SetInAt(0, Location::RequiresRegister());
6104 if (generate_volatile) {
6105 InvokeRuntimeCallingConvention calling_convention;
6106 // need A0 to hold base + offset
6107 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6108 if (field_type == Primitive::kPrimLong) {
6109 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimLong));
6110 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006111 // Use Location::Any() to prevent situations when running out of available fp registers.
6112 locations->SetOut(Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006113 // Need some temp core regs since FP results are returned in core registers
6114 Location reg = calling_convention.GetReturnLocation(Primitive::kPrimLong);
6115 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairLow<Register>()));
6116 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairHigh<Register>()));
6117 }
6118 } else {
6119 if (Primitive::IsFloatingPointType(instruction->GetType())) {
6120 locations->SetOut(Location::RequiresFpuRegister());
6121 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006122 // The output overlaps in the case of an object field get with
6123 // read barriers enabled: we do not want the move to overwrite the
6124 // object's location, as we need it to emit the read barrier.
6125 locations->SetOut(Location::RequiresRegister(),
6126 object_field_get_with_read_barrier
6127 ? Location::kOutputOverlap
6128 : Location::kNoOutputOverlap);
6129 }
6130 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6131 // We need a temporary register for the read barrier marking slow
6132 // path in CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006133 if (!kBakerReadBarrierThunksEnableForFields) {
6134 locations->AddTemp(Location::RequiresRegister());
6135 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006136 }
6137 }
6138}
6139
6140void InstructionCodeGeneratorMIPS::HandleFieldGet(HInstruction* instruction,
6141 const FieldInfo& field_info,
6142 uint32_t dex_pc) {
6143 Primitive::Type type = field_info.GetFieldType();
6144 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08006145 Location obj_loc = locations->InAt(0);
6146 Register obj = obj_loc.AsRegister<Register>();
6147 Location dst_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006148 LoadOperandType load_type = kLoadUnsignedByte;
6149 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006150 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006151 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006152
6153 switch (type) {
6154 case Primitive::kPrimBoolean:
6155 load_type = kLoadUnsignedByte;
6156 break;
6157 case Primitive::kPrimByte:
6158 load_type = kLoadSignedByte;
6159 break;
6160 case Primitive::kPrimShort:
6161 load_type = kLoadSignedHalfword;
6162 break;
6163 case Primitive::kPrimChar:
6164 load_type = kLoadUnsignedHalfword;
6165 break;
6166 case Primitive::kPrimInt:
6167 case Primitive::kPrimFloat:
6168 case Primitive::kPrimNot:
6169 load_type = kLoadWord;
6170 break;
6171 case Primitive::kPrimLong:
6172 case Primitive::kPrimDouble:
6173 load_type = kLoadDoubleword;
6174 break;
6175 case Primitive::kPrimVoid:
6176 LOG(FATAL) << "Unreachable type " << type;
6177 UNREACHABLE();
6178 }
6179
6180 if (is_volatile && load_type == kLoadDoubleword) {
6181 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006182 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006183 // Do implicit Null check
6184 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6185 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
Serban Constantinescufca16662016-07-14 09:21:59 +01006186 codegen_->InvokeRuntime(kQuickA64Load, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006187 CheckEntrypointTypes<kQuickA64Load, int64_t, volatile const int64_t*>();
6188 if (type == Primitive::kPrimDouble) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006189 // FP results are returned in core registers. Need to move them.
Alexey Frunze15958152017-02-09 19:08:30 -08006190 if (dst_loc.IsFpuRegister()) {
6191 __ Mtc1(locations->GetTemp(1).AsRegister<Register>(), dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006192 __ MoveToFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunze15958152017-02-09 19:08:30 -08006193 dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006194 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006195 DCHECK(dst_loc.IsDoubleStackSlot());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006196 __ StoreToOffset(kStoreWord,
6197 locations->GetTemp(1).AsRegister<Register>(),
6198 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006199 dst_loc.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006200 __ StoreToOffset(kStoreWord,
6201 locations->GetTemp(2).AsRegister<Register>(),
6202 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006203 dst_loc.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006204 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006205 }
6206 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006207 if (type == Primitive::kPrimNot) {
6208 // /* HeapReference<Object> */ dst = *(obj + offset)
6209 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006210 Location temp_loc =
6211 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08006212 // Note that a potential implicit null check is handled in this
6213 // CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier call.
6214 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6215 dst_loc,
6216 obj,
6217 offset,
6218 temp_loc,
6219 /* needs_null_check */ true);
6220 if (is_volatile) {
6221 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6222 }
6223 } else {
6224 __ LoadFromOffset(kLoadWord, dst_loc.AsRegister<Register>(), obj, offset, null_checker);
6225 if (is_volatile) {
6226 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6227 }
6228 // If read barriers are enabled, emit read barriers other than
6229 // Baker's using a slow path (and also unpoison the loaded
6230 // reference, if heap poisoning is enabled).
6231 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
6232 }
6233 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006234 Register dst;
6235 if (type == Primitive::kPrimLong) {
Alexey Frunze15958152017-02-09 19:08:30 -08006236 DCHECK(dst_loc.IsRegisterPair());
6237 dst = dst_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006238 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006239 DCHECK(dst_loc.IsRegister());
6240 dst = dst_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006241 }
Alexey Frunze2923db72016-08-20 01:55:47 -07006242 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006243 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006244 DCHECK(dst_loc.IsFpuRegister());
6245 FRegister dst = dst_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006246 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006247 __ LoadSFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006248 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006249 __ LoadDFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006250 }
6251 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006252 }
6253
Alexey Frunze15958152017-02-09 19:08:30 -08006254 // Memory barriers, in the case of references, are handled in the
6255 // previous switch statement.
6256 if (is_volatile && (type != Primitive::kPrimNot)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006257 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6258 }
6259}
6260
6261void LocationsBuilderMIPS::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
6262 Primitive::Type field_type = field_info.GetFieldType();
6263 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6264 bool generate_volatile = field_info.IsVolatile() && is_wide;
6265 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006266 instruction, generate_volatile ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006267
6268 locations->SetInAt(0, Location::RequiresRegister());
6269 if (generate_volatile) {
6270 InvokeRuntimeCallingConvention calling_convention;
6271 // need A0 to hold base + offset
6272 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6273 if (field_type == Primitive::kPrimLong) {
6274 locations->SetInAt(1, Location::RegisterPairLocation(
6275 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
6276 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006277 // Use Location::Any() to prevent situations when running out of available fp registers.
6278 locations->SetInAt(1, Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006279 // Pass FP parameters in core registers.
6280 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
6281 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
6282 }
6283 } else {
6284 if (Primitive::IsFloatingPointType(field_type)) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006285 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006286 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006287 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006288 }
6289 }
6290}
6291
6292void InstructionCodeGeneratorMIPS::HandleFieldSet(HInstruction* instruction,
6293 const FieldInfo& field_info,
Goran Jakovljevice114da22016-12-26 14:21:43 +01006294 uint32_t dex_pc,
6295 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006296 Primitive::Type type = field_info.GetFieldType();
6297 LocationSummary* locations = instruction->GetLocations();
6298 Register obj = locations->InAt(0).AsRegister<Register>();
Alexey Frunzef58b2482016-09-02 22:14:06 -07006299 Location value_location = locations->InAt(1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006300 StoreOperandType store_type = kStoreByte;
6301 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006302 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Alexey Frunzec061de12017-02-14 13:27:23 -08006303 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006304 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006305
6306 switch (type) {
6307 case Primitive::kPrimBoolean:
6308 case Primitive::kPrimByte:
6309 store_type = kStoreByte;
6310 break;
6311 case Primitive::kPrimShort:
6312 case Primitive::kPrimChar:
6313 store_type = kStoreHalfword;
6314 break;
6315 case Primitive::kPrimInt:
6316 case Primitive::kPrimFloat:
6317 case Primitive::kPrimNot:
6318 store_type = kStoreWord;
6319 break;
6320 case Primitive::kPrimLong:
6321 case Primitive::kPrimDouble:
6322 store_type = kStoreDoubleword;
6323 break;
6324 case Primitive::kPrimVoid:
6325 LOG(FATAL) << "Unreachable type " << type;
6326 UNREACHABLE();
6327 }
6328
6329 if (is_volatile) {
6330 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
6331 }
6332
6333 if (is_volatile && store_type == kStoreDoubleword) {
6334 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006335 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006336 // Do implicit Null check.
6337 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6338 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6339 if (type == Primitive::kPrimDouble) {
6340 // Pass FP parameters in core registers.
Alexey Frunzef58b2482016-09-02 22:14:06 -07006341 if (value_location.IsFpuRegister()) {
6342 __ Mfc1(locations->GetTemp(1).AsRegister<Register>(),
6343 value_location.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006344 __ MoveFromFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunzef58b2482016-09-02 22:14:06 -07006345 value_location.AsFpuRegister<FRegister>());
6346 } else if (value_location.IsDoubleStackSlot()) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006347 __ LoadFromOffset(kLoadWord,
6348 locations->GetTemp(1).AsRegister<Register>(),
6349 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006350 value_location.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006351 __ LoadFromOffset(kLoadWord,
6352 locations->GetTemp(2).AsRegister<Register>(),
6353 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006354 value_location.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006355 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006356 DCHECK(value_location.IsConstant());
6357 DCHECK(value_location.GetConstant()->IsDoubleConstant());
6358 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006359 __ LoadConst64(locations->GetTemp(2).AsRegister<Register>(),
6360 locations->GetTemp(1).AsRegister<Register>(),
6361 value);
6362 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006363 }
Serban Constantinescufca16662016-07-14 09:21:59 +01006364 codegen_->InvokeRuntime(kQuickA64Store, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006365 CheckEntrypointTypes<kQuickA64Store, void, volatile int64_t *, int64_t>();
6366 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006367 if (value_location.IsConstant()) {
6368 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
6369 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
6370 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006371 Register src;
6372 if (type == Primitive::kPrimLong) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006373 src = value_location.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006374 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006375 src = value_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006376 }
Alexey Frunzec061de12017-02-14 13:27:23 -08006377 if (kPoisonHeapReferences && needs_write_barrier) {
6378 // Note that in the case where `value` is a null reference,
6379 // we do not enter this block, as a null reference does not
6380 // need poisoning.
6381 DCHECK_EQ(type, Primitive::kPrimNot);
6382 __ PoisonHeapReference(TMP, src);
6383 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
6384 } else {
6385 __ StoreToOffset(store_type, src, obj, offset, null_checker);
6386 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006387 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006388 FRegister src = value_location.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006389 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006390 __ StoreSToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006391 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006392 __ StoreDToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006393 }
6394 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006395 }
6396
Alexey Frunzec061de12017-02-14 13:27:23 -08006397 if (needs_write_barrier) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006398 Register src = value_location.AsRegister<Register>();
Goran Jakovljevice114da22016-12-26 14:21:43 +01006399 codegen_->MarkGCCard(obj, src, value_can_be_null);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006400 }
6401
6402 if (is_volatile) {
6403 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
6404 }
6405}
6406
6407void LocationsBuilderMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6408 HandleFieldGet(instruction, instruction->GetFieldInfo());
6409}
6410
6411void InstructionCodeGeneratorMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6412 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
6413}
6414
6415void LocationsBuilderMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
6416 HandleFieldSet(instruction, instruction->GetFieldInfo());
6417}
6418
6419void InstructionCodeGeneratorMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01006420 HandleFieldSet(instruction,
6421 instruction->GetFieldInfo(),
6422 instruction->GetDexPc(),
6423 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006424}
6425
Alexey Frunze15958152017-02-09 19:08:30 -08006426void InstructionCodeGeneratorMIPS::GenerateReferenceLoadOneRegister(
6427 HInstruction* instruction,
6428 Location out,
6429 uint32_t offset,
6430 Location maybe_temp,
6431 ReadBarrierOption read_barrier_option) {
6432 Register out_reg = out.AsRegister<Register>();
6433 if (read_barrier_option == kWithReadBarrier) {
6434 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006435 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
6436 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6437 }
Alexey Frunze15958152017-02-09 19:08:30 -08006438 if (kUseBakerReadBarrier) {
6439 // Load with fast path based Baker's read barrier.
6440 // /* HeapReference<Object> */ out = *(out + offset)
6441 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6442 out,
6443 out_reg,
6444 offset,
6445 maybe_temp,
6446 /* needs_null_check */ false);
6447 } else {
6448 // Load with slow path based read barrier.
6449 // Save the value of `out` into `maybe_temp` before overwriting it
6450 // in the following move operation, as we will need it for the
6451 // read barrier below.
6452 __ Move(maybe_temp.AsRegister<Register>(), out_reg);
6453 // /* HeapReference<Object> */ out = *(out + offset)
6454 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6455 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6456 }
6457 } else {
6458 // Plain load with no read barrier.
6459 // /* HeapReference<Object> */ out = *(out + offset)
6460 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6461 __ MaybeUnpoisonHeapReference(out_reg);
6462 }
6463}
6464
6465void InstructionCodeGeneratorMIPS::GenerateReferenceLoadTwoRegisters(
6466 HInstruction* instruction,
6467 Location out,
6468 Location obj,
6469 uint32_t offset,
6470 Location maybe_temp,
6471 ReadBarrierOption read_barrier_option) {
6472 Register out_reg = out.AsRegister<Register>();
6473 Register obj_reg = obj.AsRegister<Register>();
6474 if (read_barrier_option == kWithReadBarrier) {
6475 CHECK(kEmitCompilerReadBarrier);
6476 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006477 if (!kBakerReadBarrierThunksEnableForFields) {
6478 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6479 }
Alexey Frunze15958152017-02-09 19:08:30 -08006480 // Load with fast path based Baker's read barrier.
6481 // /* HeapReference<Object> */ out = *(obj + offset)
6482 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6483 out,
6484 obj_reg,
6485 offset,
6486 maybe_temp,
6487 /* needs_null_check */ false);
6488 } else {
6489 // Load with slow path based read barrier.
6490 // /* HeapReference<Object> */ out = *(obj + offset)
6491 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6492 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6493 }
6494 } else {
6495 // Plain load with no read barrier.
6496 // /* HeapReference<Object> */ out = *(obj + offset)
6497 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6498 __ MaybeUnpoisonHeapReference(out_reg);
6499 }
6500}
6501
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006502static inline int GetBakerMarkThunkNumber(Register reg) {
6503 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 21, "Expecting equal");
6504 if (reg >= V0 && reg <= T7) { // 14 consequtive regs.
6505 return reg - V0;
6506 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
6507 return 14 + (reg - S2);
6508 } else if (reg == FP) { // One more.
6509 return 20;
6510 }
6511 LOG(FATAL) << "Unexpected register " << reg;
6512 UNREACHABLE();
6513}
6514
6515static inline int GetBakerMarkFieldArrayThunkDisplacement(Register reg, bool short_offset) {
6516 int num = GetBakerMarkThunkNumber(reg) +
6517 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
6518 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
6519}
6520
6521static inline int GetBakerMarkGcRootThunkDisplacement(Register reg) {
6522 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
6523 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
6524}
6525
Alexey Frunze15958152017-02-09 19:08:30 -08006526void InstructionCodeGeneratorMIPS::GenerateGcRootFieldLoad(HInstruction* instruction,
6527 Location root,
6528 Register obj,
6529 uint32_t offset,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006530 ReadBarrierOption read_barrier_option,
6531 MipsLabel* label_low) {
6532 bool reordering;
6533 if (label_low != nullptr) {
6534 DCHECK_EQ(offset, 0x5678u);
6535 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006536 Register root_reg = root.AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08006537 if (read_barrier_option == kWithReadBarrier) {
6538 DCHECK(kEmitCompilerReadBarrier);
6539 if (kUseBakerReadBarrier) {
6540 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6541 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006542 if (kBakerReadBarrierThunksEnableForGcRoots) {
6543 // Note that we do not actually check the value of `GetIsGcMarking()`
6544 // to decide whether to mark the loaded GC root or not. Instead, we
6545 // load into `temp` (T9) the read barrier mark introspection entrypoint.
6546 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
6547 // vice versa.
6548 //
6549 // We use thunks for the slow path. That thunk checks the reference
6550 // and jumps to the entrypoint if needed.
6551 //
6552 // temp = Thread::Current()->pReadBarrierMarkReg00
6553 // // AKA &art_quick_read_barrier_mark_introspection.
6554 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
6555 // if (temp != nullptr) {
6556 // temp = &gc_root_thunk<root_reg>
6557 // root = temp(root)
6558 // }
Alexey Frunze15958152017-02-09 19:08:30 -08006559
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006560 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
6561 const int32_t entry_point_offset =
6562 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(0);
6563 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
6564 int16_t offset_low = Low16Bits(offset);
6565 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
6566 // extension in lw.
6567 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
6568 Register base = short_offset ? obj : TMP;
6569 // Loading the entrypoint does not require a load acquire since it is only changed when
6570 // threads are suspended or running a checkpoint.
6571 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
6572 reordering = __ SetReorder(false);
6573 if (!short_offset) {
6574 DCHECK(!label_low);
6575 __ AddUpper(base, obj, offset_high);
6576 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07006577 MipsLabel skip_call;
6578 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006579 if (label_low != nullptr) {
6580 DCHECK(short_offset);
6581 __ Bind(label_low);
6582 }
6583 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6584 __ LoadFromOffset(kLoadWord, root_reg, base, offset_low); // Single instruction
6585 // in delay slot.
6586 if (isR6) {
6587 __ Jialc(T9, thunk_disp);
6588 } else {
6589 __ Addiu(T9, T9, thunk_disp);
6590 __ Jalr(T9);
6591 __ Nop();
6592 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07006593 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006594 __ SetReorder(reordering);
6595 } else {
6596 // Note that we do not actually check the value of `GetIsGcMarking()`
6597 // to decide whether to mark the loaded GC root or not. Instead, we
6598 // load into `temp` (T9) the read barrier mark entry point corresponding
6599 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
6600 // is false, and vice versa.
6601 //
6602 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
6603 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6604 // if (temp != null) {
6605 // root = temp(root)
6606 // }
Alexey Frunze15958152017-02-09 19:08:30 -08006607
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006608 if (label_low != nullptr) {
6609 reordering = __ SetReorder(false);
6610 __ Bind(label_low);
6611 }
6612 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6613 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6614 if (label_low != nullptr) {
6615 __ SetReorder(reordering);
6616 }
6617 static_assert(
6618 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6619 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6620 "have different sizes.");
6621 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6622 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6623 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08006624
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006625 // Slow path marking the GC root `root`.
6626 Location temp = Location::RegisterLocation(T9);
6627 SlowPathCodeMIPS* slow_path =
6628 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(
6629 instruction,
6630 root,
6631 /*entrypoint*/ temp);
6632 codegen_->AddSlowPath(slow_path);
6633
6634 const int32_t entry_point_offset =
6635 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(root.reg() - 1);
6636 // Loading the entrypoint does not require a load acquire since it is only changed when
6637 // threads are suspended or running a checkpoint.
6638 __ LoadFromOffset(kLoadWord, temp.AsRegister<Register>(), TR, entry_point_offset);
6639 __ Bnez(temp.AsRegister<Register>(), slow_path->GetEntryLabel());
6640 __ Bind(slow_path->GetExitLabel());
6641 }
Alexey Frunze15958152017-02-09 19:08:30 -08006642 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006643 if (label_low != nullptr) {
6644 reordering = __ SetReorder(false);
6645 __ Bind(label_low);
6646 }
Alexey Frunze15958152017-02-09 19:08:30 -08006647 // GC root loaded through a slow path for read barriers other
6648 // than Baker's.
6649 // /* GcRoot<mirror::Object>* */ root = obj + offset
6650 __ Addiu32(root_reg, obj, offset);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006651 if (label_low != nullptr) {
6652 __ SetReorder(reordering);
6653 }
Alexey Frunze15958152017-02-09 19:08:30 -08006654 // /* mirror::Object* */ root = root->Read()
6655 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6656 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006657 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006658 if (label_low != nullptr) {
6659 reordering = __ SetReorder(false);
6660 __ Bind(label_low);
6661 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006662 // Plain GC root load with no read barrier.
6663 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6664 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6665 // Note that GC roots are not affected by heap poisoning, thus we
6666 // do not have to unpoison `root_reg` here.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006667 if (label_low != nullptr) {
6668 __ SetReorder(reordering);
6669 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006670 }
6671}
6672
Alexey Frunze15958152017-02-09 19:08:30 -08006673void CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6674 Location ref,
6675 Register obj,
6676 uint32_t offset,
6677 Location temp,
6678 bool needs_null_check) {
6679 DCHECK(kEmitCompilerReadBarrier);
6680 DCHECK(kUseBakerReadBarrier);
6681
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006682 if (kBakerReadBarrierThunksEnableForFields) {
6683 // Note that we do not actually check the value of `GetIsGcMarking()`
6684 // to decide whether to mark the loaded reference or not. Instead, we
6685 // load into `temp` (T9) the read barrier mark introspection entrypoint.
6686 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
6687 // vice versa.
6688 //
6689 // We use thunks for the slow path. That thunk checks the reference
6690 // and jumps to the entrypoint if needed. If the holder is not gray,
6691 // it issues a load-load memory barrier and returns to the original
6692 // reference load.
6693 //
6694 // temp = Thread::Current()->pReadBarrierMarkReg00
6695 // // AKA &art_quick_read_barrier_mark_introspection.
6696 // if (temp != nullptr) {
6697 // temp = &field_array_thunk<holder_reg>
6698 // temp()
6699 // }
6700 // not_gray_return_address:
6701 // // If the offset is too large to fit into the lw instruction, we
6702 // // use an adjusted base register (TMP) here. This register
6703 // // receives bits 16 ... 31 of the offset before the thunk invocation
6704 // // and the thunk benefits from it.
6705 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
6706 // gray_return_address:
6707
6708 DCHECK(temp.IsInvalid());
6709 bool isR6 = GetInstructionSetFeatures().IsR6();
6710 int16_t offset_low = Low16Bits(offset);
6711 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lw.
6712 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
6713 bool reordering = __ SetReorder(false);
6714 const int32_t entry_point_offset =
6715 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(0);
6716 // There may have or may have not been a null check if the field offset is smaller than
6717 // the page size.
6718 // There must've been a null check in case it's actually a load from an array.
6719 // We will, however, perform an explicit null check in the thunk as it's easier to
6720 // do it than not.
6721 if (instruction->IsArrayGet()) {
6722 DCHECK(!needs_null_check);
6723 }
6724 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
6725 // Loading the entrypoint does not require a load acquire since it is only changed when
6726 // threads are suspended or running a checkpoint.
6727 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
6728 Register ref_reg = ref.AsRegister<Register>();
6729 Register base = short_offset ? obj : TMP;
Alexey Frunze0cab6562017-07-25 15:19:36 -07006730 MipsLabel skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006731 if (short_offset) {
6732 if (isR6) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07006733 __ Beqzc(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006734 __ Nop(); // In forbidden slot.
6735 __ Jialc(T9, thunk_disp);
6736 } else {
Alexey Frunze0cab6562017-07-25 15:19:36 -07006737 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006738 __ Addiu(T9, T9, thunk_disp); // In delay slot.
6739 __ Jalr(T9);
6740 __ Nop(); // In delay slot.
6741 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07006742 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006743 } else {
6744 if (isR6) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07006745 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006746 __ Aui(base, obj, offset_high); // In delay slot.
6747 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006748 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006749 } else {
6750 __ Lui(base, offset_high);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006751 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006752 __ Addiu(T9, T9, thunk_disp); // In delay slot.
6753 __ Jalr(T9);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006754 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006755 __ Addu(base, base, obj); // In delay slot.
6756 }
6757 }
6758 // /* HeapReference<Object> */ ref = *(obj + offset)
6759 __ LoadFromOffset(kLoadWord, ref_reg, base, offset_low); // Single instruction.
6760 if (needs_null_check) {
6761 MaybeRecordImplicitNullCheck(instruction);
6762 }
6763 __ MaybeUnpoisonHeapReference(ref_reg);
6764 __ SetReorder(reordering);
6765 return;
6766 }
6767
Alexey Frunze15958152017-02-09 19:08:30 -08006768 // /* HeapReference<Object> */ ref = *(obj + offset)
6769 Location no_index = Location::NoLocation();
6770 ScaleFactor no_scale_factor = TIMES_1;
6771 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6772 ref,
6773 obj,
6774 offset,
6775 no_index,
6776 no_scale_factor,
6777 temp,
6778 needs_null_check);
6779}
6780
6781void CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6782 Location ref,
6783 Register obj,
6784 uint32_t data_offset,
6785 Location index,
6786 Location temp,
6787 bool needs_null_check) {
6788 DCHECK(kEmitCompilerReadBarrier);
6789 DCHECK(kUseBakerReadBarrier);
6790
6791 static_assert(
6792 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6793 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006794 ScaleFactor scale_factor = TIMES_4;
6795
6796 if (kBakerReadBarrierThunksEnableForArrays) {
6797 // Note that we do not actually check the value of `GetIsGcMarking()`
6798 // to decide whether to mark the loaded reference or not. Instead, we
6799 // load into `temp` (T9) the read barrier mark introspection entrypoint.
6800 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
6801 // vice versa.
6802 //
6803 // We use thunks for the slow path. That thunk checks the reference
6804 // and jumps to the entrypoint if needed. If the holder is not gray,
6805 // it issues a load-load memory barrier and returns to the original
6806 // reference load.
6807 //
6808 // temp = Thread::Current()->pReadBarrierMarkReg00
6809 // // AKA &art_quick_read_barrier_mark_introspection.
6810 // if (temp != nullptr) {
6811 // temp = &field_array_thunk<holder_reg>
6812 // temp()
6813 // }
6814 // not_gray_return_address:
6815 // // The element address is pre-calculated in the TMP register before the
6816 // // thunk invocation and the thunk benefits from it.
6817 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
6818 // gray_return_address:
6819
6820 DCHECK(temp.IsInvalid());
6821 DCHECK(index.IsValid());
6822 bool reordering = __ SetReorder(false);
6823 const int32_t entry_point_offset =
6824 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(0);
6825 // We will not do the explicit null check in the thunk as some form of a null check
6826 // must've been done earlier.
6827 DCHECK(!needs_null_check);
6828 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
6829 // Loading the entrypoint does not require a load acquire since it is only changed when
6830 // threads are suspended or running a checkpoint.
6831 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
6832 Register ref_reg = ref.AsRegister<Register>();
6833 Register index_reg = index.IsRegisterPair()
6834 ? index.AsRegisterPairLow<Register>()
6835 : index.AsRegister<Register>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07006836 MipsLabel skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006837 if (GetInstructionSetFeatures().IsR6()) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07006838 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006839 __ Lsa(TMP, index_reg, obj, scale_factor); // In delay slot.
6840 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006841 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006842 } else {
6843 __ Sll(TMP, index_reg, scale_factor);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006844 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006845 __ Addiu(T9, T9, thunk_disp); // In delay slot.
6846 __ Jalr(T9);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006847 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006848 __ Addu(TMP, TMP, obj); // In delay slot.
6849 }
6850 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
6851 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
6852 __ LoadFromOffset(kLoadWord, ref_reg, TMP, data_offset); // Single instruction.
6853 __ MaybeUnpoisonHeapReference(ref_reg);
6854 __ SetReorder(reordering);
6855 return;
6856 }
6857
Alexey Frunze15958152017-02-09 19:08:30 -08006858 // /* HeapReference<Object> */ ref =
6859 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08006860 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6861 ref,
6862 obj,
6863 data_offset,
6864 index,
6865 scale_factor,
6866 temp,
6867 needs_null_check);
6868}
6869
6870void CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6871 Location ref,
6872 Register obj,
6873 uint32_t offset,
6874 Location index,
6875 ScaleFactor scale_factor,
6876 Location temp,
6877 bool needs_null_check,
6878 bool always_update_field) {
6879 DCHECK(kEmitCompilerReadBarrier);
6880 DCHECK(kUseBakerReadBarrier);
6881
6882 // In slow path based read barriers, the read barrier call is
6883 // inserted after the original load. However, in fast path based
6884 // Baker's read barriers, we need to perform the load of
6885 // mirror::Object::monitor_ *before* the original reference load.
6886 // This load-load ordering is required by the read barrier.
6887 // The fast path/slow path (for Baker's algorithm) should look like:
6888 //
6889 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6890 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6891 // HeapReference<Object> ref = *src; // Original reference load.
6892 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6893 // if (is_gray) {
6894 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6895 // }
6896 //
6897 // Note: the original implementation in ReadBarrier::Barrier is
6898 // slightly more complex as it performs additional checks that we do
6899 // not do here for performance reasons.
6900
6901 Register ref_reg = ref.AsRegister<Register>();
6902 Register temp_reg = temp.AsRegister<Register>();
6903 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6904
6905 // /* int32_t */ monitor = obj->monitor_
6906 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
6907 if (needs_null_check) {
6908 MaybeRecordImplicitNullCheck(instruction);
6909 }
6910 // /* LockWord */ lock_word = LockWord(monitor)
6911 static_assert(sizeof(LockWord) == sizeof(int32_t),
6912 "art::LockWord and int32_t have different sizes.");
6913
6914 __ Sync(0); // Barrier to prevent load-load reordering.
6915
6916 // The actual reference load.
6917 if (index.IsValid()) {
6918 // Load types involving an "index": ArrayGet,
6919 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6920 // intrinsics.
6921 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
6922 if (index.IsConstant()) {
6923 size_t computed_offset =
6924 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
6925 __ LoadFromOffset(kLoadWord, ref_reg, obj, computed_offset);
6926 } else {
6927 // Handle the special case of the
6928 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6929 // intrinsics, which use a register pair as index ("long
6930 // offset"), of which only the low part contains data.
6931 Register index_reg = index.IsRegisterPair()
6932 ? index.AsRegisterPairLow<Register>()
6933 : index.AsRegister<Register>();
Chris Larsencd0295d2017-03-31 15:26:54 -07006934 __ ShiftAndAdd(TMP, index_reg, obj, scale_factor, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08006935 __ LoadFromOffset(kLoadWord, ref_reg, TMP, offset);
6936 }
6937 } else {
6938 // /* HeapReference<Object> */ ref = *(obj + offset)
6939 __ LoadFromOffset(kLoadWord, ref_reg, obj, offset);
6940 }
6941
6942 // Object* ref = ref_addr->AsMirrorPtr()
6943 __ MaybeUnpoisonHeapReference(ref_reg);
6944
6945 // Slow path marking the object `ref` when it is gray.
6946 SlowPathCodeMIPS* slow_path;
6947 if (always_update_field) {
6948 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS only supports address
6949 // of the form `obj + field_offset`, where `obj` is a register and
6950 // `field_offset` is a register pair (of which only the lower half
6951 // is used). Thus `offset` and `scale_factor` above are expected
6952 // to be null in this code path.
6953 DCHECK_EQ(offset, 0u);
6954 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
6955 slow_path = new (GetGraph()->GetArena())
6956 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(instruction,
6957 ref,
6958 obj,
6959 /* field_offset */ index,
6960 temp_reg);
6961 } else {
6962 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(instruction, ref);
6963 }
6964 AddSlowPath(slow_path);
6965
6966 // if (rb_state == ReadBarrier::GrayState())
6967 // ref = ReadBarrier::Mark(ref);
6968 // Given the numeric representation, it's enough to check the low bit of the
6969 // rb_state. We do that by shifting the bit into the sign bit (31) and
6970 // performing a branch on less than zero.
6971 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6972 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6973 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
6974 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
6975 __ Bltz(temp_reg, slow_path->GetEntryLabel());
6976 __ Bind(slow_path->GetExitLabel());
6977}
6978
6979void CodeGeneratorMIPS::GenerateReadBarrierSlow(HInstruction* instruction,
6980 Location out,
6981 Location ref,
6982 Location obj,
6983 uint32_t offset,
6984 Location index) {
6985 DCHECK(kEmitCompilerReadBarrier);
6986
6987 // Insert a slow path based read barrier *after* the reference load.
6988 //
6989 // If heap poisoning is enabled, the unpoisoning of the loaded
6990 // reference will be carried out by the runtime within the slow
6991 // path.
6992 //
6993 // Note that `ref` currently does not get unpoisoned (when heap
6994 // poisoning is enabled), which is alright as the `ref` argument is
6995 // not used by the artReadBarrierSlow entry point.
6996 //
6997 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6998 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena())
6999 ReadBarrierForHeapReferenceSlowPathMIPS(instruction, out, ref, obj, offset, index);
7000 AddSlowPath(slow_path);
7001
7002 __ B(slow_path->GetEntryLabel());
7003 __ Bind(slow_path->GetExitLabel());
7004}
7005
7006void CodeGeneratorMIPS::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7007 Location out,
7008 Location ref,
7009 Location obj,
7010 uint32_t offset,
7011 Location index) {
7012 if (kEmitCompilerReadBarrier) {
7013 // Baker's read barriers shall be handled by the fast path
7014 // (CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier).
7015 DCHECK(!kUseBakerReadBarrier);
7016 // If heap poisoning is enabled, unpoisoning will be taken care of
7017 // by the runtime within the slow path.
7018 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
7019 } else if (kPoisonHeapReferences) {
7020 __ UnpoisonHeapReference(out.AsRegister<Register>());
7021 }
7022}
7023
7024void CodeGeneratorMIPS::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7025 Location out,
7026 Location root) {
7027 DCHECK(kEmitCompilerReadBarrier);
7028
7029 // Insert a slow path based read barrier *after* the GC root load.
7030 //
7031 // Note that GC roots are not affected by heap poisoning, so we do
7032 // not need to do anything special for this here.
7033 SlowPathCodeMIPS* slow_path =
7034 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS(instruction, out, root);
7035 AddSlowPath(slow_path);
7036
7037 __ B(slow_path->GetEntryLabel());
7038 __ Bind(slow_path->GetExitLabel());
7039}
7040
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007041void LocationsBuilderMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007042 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
7043 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07007044 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007045 switch (type_check_kind) {
7046 case TypeCheckKind::kExactCheck:
7047 case TypeCheckKind::kAbstractClassCheck:
7048 case TypeCheckKind::kClassHierarchyCheck:
7049 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08007050 call_kind =
7051 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007052 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007053 break;
7054 case TypeCheckKind::kArrayCheck:
7055 case TypeCheckKind::kUnresolvedCheck:
7056 case TypeCheckKind::kInterfaceCheck:
7057 call_kind = LocationSummary::kCallOnSlowPath;
7058 break;
7059 }
7060
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007061 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007062 if (baker_read_barrier_slow_path) {
7063 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7064 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007065 locations->SetInAt(0, Location::RequiresRegister());
7066 locations->SetInAt(1, Location::RequiresRegister());
7067 // The output does overlap inputs.
7068 // Note that TypeCheckSlowPathMIPS uses this register too.
7069 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08007070 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007071}
7072
7073void InstructionCodeGeneratorMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007074 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007075 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08007076 Location obj_loc = locations->InAt(0);
7077 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007078 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08007079 Location out_loc = locations->Out();
7080 Register out = out_loc.AsRegister<Register>();
7081 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
7082 DCHECK_LE(num_temps, 1u);
7083 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007084 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
7085 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7086 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7087 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007088 MipsLabel done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007089 SlowPathCodeMIPS* slow_path = nullptr;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007090
7091 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007092 // Avoid this check if we know `obj` is not null.
7093 if (instruction->MustDoNullCheck()) {
7094 __ Move(out, ZERO);
7095 __ Beqz(obj, &done);
7096 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007097
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007098 switch (type_check_kind) {
7099 case TypeCheckKind::kExactCheck: {
7100 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007101 GenerateReferenceLoadTwoRegisters(instruction,
7102 out_loc,
7103 obj_loc,
7104 class_offset,
7105 maybe_temp_loc,
7106 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007107 // Classes must be equal for the instanceof to succeed.
7108 __ Xor(out, out, cls);
7109 __ Sltiu(out, out, 1);
7110 break;
7111 }
7112
7113 case TypeCheckKind::kAbstractClassCheck: {
7114 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007115 GenerateReferenceLoadTwoRegisters(instruction,
7116 out_loc,
7117 obj_loc,
7118 class_offset,
7119 maybe_temp_loc,
7120 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007121 // If the class is abstract, we eagerly fetch the super class of the
7122 // object to avoid doing a comparison we know will fail.
7123 MipsLabel loop;
7124 __ Bind(&loop);
7125 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08007126 GenerateReferenceLoadOneRegister(instruction,
7127 out_loc,
7128 super_offset,
7129 maybe_temp_loc,
7130 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007131 // If `out` is null, we use it for the result, and jump to `done`.
7132 __ Beqz(out, &done);
7133 __ Bne(out, cls, &loop);
7134 __ LoadConst32(out, 1);
7135 break;
7136 }
7137
7138 case TypeCheckKind::kClassHierarchyCheck: {
7139 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007140 GenerateReferenceLoadTwoRegisters(instruction,
7141 out_loc,
7142 obj_loc,
7143 class_offset,
7144 maybe_temp_loc,
7145 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007146 // Walk over the class hierarchy to find a match.
7147 MipsLabel loop, success;
7148 __ Bind(&loop);
7149 __ Beq(out, cls, &success);
7150 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08007151 GenerateReferenceLoadOneRegister(instruction,
7152 out_loc,
7153 super_offset,
7154 maybe_temp_loc,
7155 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007156 __ Bnez(out, &loop);
7157 // If `out` is null, we use it for the result, and jump to `done`.
7158 __ B(&done);
7159 __ Bind(&success);
7160 __ LoadConst32(out, 1);
7161 break;
7162 }
7163
7164 case TypeCheckKind::kArrayObjectCheck: {
7165 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007166 GenerateReferenceLoadTwoRegisters(instruction,
7167 out_loc,
7168 obj_loc,
7169 class_offset,
7170 maybe_temp_loc,
7171 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007172 // Do an exact check.
7173 MipsLabel success;
7174 __ Beq(out, cls, &success);
7175 // Otherwise, we need to check that the object's class is a non-primitive array.
7176 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08007177 GenerateReferenceLoadOneRegister(instruction,
7178 out_loc,
7179 component_offset,
7180 maybe_temp_loc,
7181 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007182 // If `out` is null, we use it for the result, and jump to `done`.
7183 __ Beqz(out, &done);
7184 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
7185 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
7186 __ Sltiu(out, out, 1);
7187 __ B(&done);
7188 __ Bind(&success);
7189 __ LoadConst32(out, 1);
7190 break;
7191 }
7192
7193 case TypeCheckKind::kArrayCheck: {
7194 // No read barrier since the slow path will retry upon failure.
7195 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007196 GenerateReferenceLoadTwoRegisters(instruction,
7197 out_loc,
7198 obj_loc,
7199 class_offset,
7200 maybe_temp_loc,
7201 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007202 DCHECK(locations->OnlyCallsOnSlowPath());
7203 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
7204 /* is_fatal */ false);
7205 codegen_->AddSlowPath(slow_path);
7206 __ Bne(out, cls, slow_path->GetEntryLabel());
7207 __ LoadConst32(out, 1);
7208 break;
7209 }
7210
7211 case TypeCheckKind::kUnresolvedCheck:
7212 case TypeCheckKind::kInterfaceCheck: {
7213 // Note that we indeed only call on slow path, but we always go
7214 // into the slow path for the unresolved and interface check
7215 // cases.
7216 //
7217 // We cannot directly call the InstanceofNonTrivial runtime
7218 // entry point without resorting to a type checking slow path
7219 // here (i.e. by calling InvokeRuntime directly), as it would
7220 // require to assign fixed registers for the inputs of this
7221 // HInstanceOf instruction (following the runtime calling
7222 // convention), which might be cluttered by the potential first
7223 // read barrier emission at the beginning of this method.
7224 //
7225 // TODO: Introduce a new runtime entry point taking the object
7226 // to test (instead of its class) as argument, and let it deal
7227 // with the read barrier issues. This will let us refactor this
7228 // case of the `switch` code as it was previously (with a direct
7229 // call to the runtime not using a type checking slow path).
7230 // This should also be beneficial for the other cases above.
7231 DCHECK(locations->OnlyCallsOnSlowPath());
7232 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
7233 /* is_fatal */ false);
7234 codegen_->AddSlowPath(slow_path);
7235 __ B(slow_path->GetEntryLabel());
7236 break;
7237 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007238 }
7239
7240 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007241
7242 if (slow_path != nullptr) {
7243 __ Bind(slow_path->GetExitLabel());
7244 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007245}
7246
7247void LocationsBuilderMIPS::VisitIntConstant(HIntConstant* constant) {
7248 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7249 locations->SetOut(Location::ConstantLocation(constant));
7250}
7251
7252void InstructionCodeGeneratorMIPS::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
7253 // Will be generated at use site.
7254}
7255
7256void LocationsBuilderMIPS::VisitNullConstant(HNullConstant* constant) {
7257 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7258 locations->SetOut(Location::ConstantLocation(constant));
7259}
7260
7261void InstructionCodeGeneratorMIPS::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
7262 // Will be generated at use site.
7263}
7264
7265void LocationsBuilderMIPS::HandleInvoke(HInvoke* invoke) {
7266 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
7267 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
7268}
7269
7270void LocationsBuilderMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
7271 HandleInvoke(invoke);
Alexey Frunze1b8464d2016-11-12 17:22:05 -08007272 // The register T7 is required to be used for the hidden argument in
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007273 // art_quick_imt_conflict_trampoline, so add the hidden argument.
Alexey Frunze1b8464d2016-11-12 17:22:05 -08007274 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T7));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007275}
7276
7277void InstructionCodeGeneratorMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
7278 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
7279 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007280 Location receiver = invoke->GetLocations()->InAt(0);
7281 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07007282 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007283
7284 // Set the hidden argument.
7285 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
7286 invoke->GetDexMethodIndex());
7287
7288 // temp = object->GetClass();
7289 if (receiver.IsStackSlot()) {
7290 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
7291 __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
7292 } else {
7293 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
7294 }
7295 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08007296 // Instead of simply (possibly) unpoisoning `temp` here, we should
7297 // emit a read barrier for the previous class reference load.
7298 // However this is not required in practice, as this is an
7299 // intermediate/temporary reference and because the current
7300 // concurrent copying collector keeps the from-space memory
7301 // intact/accessible until the end of the marking phase (the
7302 // concurrent copying collector may not in the future).
7303 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007304 __ LoadFromOffset(kLoadWord, temp, temp,
7305 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
7306 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00007307 invoke->GetImtIndex(), kMipsPointerSize));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007308 // temp = temp->GetImtEntryAt(method_offset);
7309 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
7310 // T9 = temp->GetEntryPoint();
7311 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
7312 // T9();
7313 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007314 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007315 DCHECK(!codegen_->IsLeafMethod());
7316 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
7317}
7318
7319void LocationsBuilderMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen701566a2015-10-27 15:29:13 -07007320 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
7321 if (intrinsic.TryDispatch(invoke)) {
7322 return;
7323 }
7324
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007325 HandleInvoke(invoke);
7326}
7327
7328void LocationsBuilderMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007329 // Explicit clinit checks triggered by static invokes must have been pruned by
7330 // art::PrepareForRegisterAllocation.
7331 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007332
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007333 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007334 bool has_extra_input = invoke->HasPcRelativeMethodLoadKind() && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007335
Chris Larsen701566a2015-10-27 15:29:13 -07007336 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
7337 if (intrinsic.TryDispatch(invoke)) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007338 if (invoke->GetLocations()->CanCall() && has_extra_input) {
7339 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
7340 }
Chris Larsen701566a2015-10-27 15:29:13 -07007341 return;
7342 }
7343
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007344 HandleInvoke(invoke);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007345
7346 // Add the extra input register if either the dex cache array base register
7347 // or the PC-relative base register for accessing literals is needed.
7348 if (has_extra_input) {
7349 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
7350 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007351}
7352
Orion Hodsonac141392017-01-13 11:53:47 +00007353void LocationsBuilderMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
7354 HandleInvoke(invoke);
7355}
7356
7357void InstructionCodeGeneratorMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
7358 codegen_->GenerateInvokePolymorphicCall(invoke);
7359}
7360
Chris Larsen701566a2015-10-27 15:29:13 -07007361static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007362 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen701566a2015-10-27 15:29:13 -07007363 IntrinsicCodeGeneratorMIPS intrinsic(codegen);
7364 intrinsic.Dispatch(invoke);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007365 return true;
7366 }
7367 return false;
7368}
7369
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007370HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind(
Alexey Frunze06a46c42016-07-19 15:00:40 -07007371 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007372 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007373 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007374 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
Vladimir Markoaad75c62016-10-03 08:46:48 +00007375 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007376 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007377 bool is_r6 = GetInstructionSetFeatures().IsR6();
7378 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007379 switch (desired_string_load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007380 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007381 case HLoadString::LoadKind::kBootImageInternTable:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007382 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007383 DCHECK(!Runtime::Current()->UseJitCompilation());
Alexey Frunze06a46c42016-07-19 15:00:40 -07007384 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007385 case HLoadString::LoadKind::kBootImageAddress:
7386 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007387 case HLoadString::LoadKind::kJitTableAddress:
7388 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007389 fallback_load = false;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007390 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007391 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007392 fallback_load = false;
7393 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007394 }
7395 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007396 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007397 }
7398 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007399}
7400
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007401HLoadClass::LoadKind CodeGeneratorMIPS::GetSupportedLoadClassKind(
7402 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007403 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007404 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007405 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
7406 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007407 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007408 bool is_r6 = GetInstructionSetFeatures().IsR6();
7409 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007410 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007411 case HLoadClass::LoadKind::kInvalid:
7412 LOG(FATAL) << "UNREACHABLE";
7413 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007414 case HLoadClass::LoadKind::kReferrersClass:
7415 fallback_load = false;
7416 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007417 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko94ec2db2017-09-06 17:21:03 +01007418 case HLoadClass::LoadKind::kBootImageClassTable:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007419 case HLoadClass::LoadKind::kBssEntry:
7420 DCHECK(!Runtime::Current()->UseJitCompilation());
7421 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007422 case HLoadClass::LoadKind::kBootImageAddress:
7423 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007424 case HLoadClass::LoadKind::kJitTableAddress:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007425 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007426 fallback_load = false;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007427 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007428 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007429 fallback_load = false;
7430 break;
7431 }
7432 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007433 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007434 }
7435 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007436}
7437
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007438Register CodeGeneratorMIPS::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
7439 Register temp) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007440 CHECK(!GetInstructionSetFeatures().IsR6());
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007441 CHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
7442 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
7443 if (!invoke->GetLocations()->Intrinsified()) {
7444 return location.AsRegister<Register>();
7445 }
7446 // For intrinsics we allow any location, so it may be on the stack.
7447 if (!location.IsRegister()) {
7448 __ LoadFromOffset(kLoadWord, temp, SP, location.GetStackIndex());
7449 return temp;
7450 }
7451 // For register locations, check if the register was saved. If so, get it from the stack.
7452 // Note: There is a chance that the register was saved but not overwritten, so we could
7453 // save one load. However, since this is just an intrinsic slow path we prefer this
7454 // simple and more robust approach rather that trying to determine if that's the case.
7455 SlowPathCode* slow_path = GetCurrentSlowPath();
7456 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
7457 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
7458 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
7459 __ LoadFromOffset(kLoadWord, temp, SP, stack_offset);
7460 return temp;
7461 }
7462 return location.AsRegister<Register>();
7463}
7464
Vladimir Markodc151b22015-10-15 18:02:30 +01007465HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS::GetSupportedInvokeStaticOrDirectDispatch(
7466 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01007467 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007468 HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007469 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007470 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007471 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
7472 // with irreducible loops.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007473 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007474 bool is_r6 = GetInstructionSetFeatures().IsR6();
7475 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007476 switch (dispatch_info.method_load_kind) {
Vladimir Marko65979462017-05-19 17:25:12 +01007477 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007478 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007479 break;
Vladimir Markodc151b22015-10-15 18:02:30 +01007480 default:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007481 fallback_load = false;
Vladimir Markodc151b22015-10-15 18:02:30 +01007482 break;
7483 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007484 if (fallback_load) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007485 dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007486 dispatch_info.method_load_data = 0;
7487 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007488 return dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01007489}
7490
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007491void CodeGeneratorMIPS::GenerateStaticOrDirectCall(
7492 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007493 // All registers are assumed to be correctly set up per the calling convention.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007494 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007495 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
7496 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007497 bool is_r6 = GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007498 Register base_reg = (invoke->HasPcRelativeMethodLoadKind() && !is_r6)
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007499 ? GetInvokeStaticOrDirectExtraParameter(invoke, temp.AsRegister<Register>())
7500 : ZERO;
7501
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007502 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007503 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007504 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007505 uint32_t offset =
7506 GetThreadOffset<kMipsPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007507 __ LoadFromOffset(kLoadWord,
7508 temp.AsRegister<Register>(),
7509 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007510 offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007511 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007512 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007513 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00007514 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007515 break;
Vladimir Marko65979462017-05-19 17:25:12 +01007516 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
7517 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007518 PcRelativePatchInfo* info_high = NewPcRelativeMethodPatch(invoke->GetTargetMethod());
7519 PcRelativePatchInfo* info_low =
7520 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
Vladimir Marko65979462017-05-19 17:25:12 +01007521 Register temp_reg = temp.AsRegister<Register>();
Alexey Frunzea663d9d2017-07-31 18:43:18 -07007522 EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base_reg);
7523 __ Addiu(temp_reg, TMP, /* placeholder */ 0x5678, &info_low->label);
Vladimir Marko65979462017-05-19 17:25:12 +01007524 break;
7525 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007526 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
7527 __ LoadConst32(temp.AsRegister<Register>(), invoke->GetMethodAddress());
7528 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007529 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007530 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007531 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007532 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
7533 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007534 Register temp_reg = temp.AsRegister<Register>();
Alexey Frunzea663d9d2017-07-31 18:43:18 -07007535 EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base_reg);
7536 __ Lw(temp_reg, TMP, /* placeholder */ 0x5678, &info_low->label);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007537 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007538 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007539 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
7540 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
7541 return; // No code pointer retrieval; the runtime performs the call directly.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007542 }
7543 }
7544
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007545 switch (code_ptr_location) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007546 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007547 __ Bal(&frame_entry_label_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007548 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007549 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
7550 // T9 = callee_method->entry_point_from_quick_compiled_code_;
Goran Jakovljevic1a878372015-10-26 14:28:52 +01007551 __ LoadFromOffset(kLoadWord,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007552 T9,
7553 callee_method.AsRegister<Register>(),
7554 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07007555 kMipsPointerSize).Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007556 // T9()
7557 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007558 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007559 break;
7560 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007561 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
7562
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007563 DCHECK(!IsLeafMethod());
7564}
7565
7566void InstructionCodeGeneratorMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007567 // Explicit clinit checks triggered by static invokes must have been pruned by
7568 // art::PrepareForRegisterAllocation.
7569 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007570
7571 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7572 return;
7573 }
7574
7575 LocationSummary* locations = invoke->GetLocations();
7576 codegen_->GenerateStaticOrDirectCall(invoke,
7577 locations->HasTemps()
7578 ? locations->GetTemp(0)
7579 : Location::NoLocation());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007580}
7581
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007582void CodeGeneratorMIPS::GenerateVirtualCall(
7583 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Goran Jakovljevice919b072016-10-04 10:17:34 +02007584 // Use the calling convention instead of the location of the receiver, as
7585 // intrinsics may have put the receiver in a different register. In the intrinsics
7586 // slow path, the arguments have been moved to the right place, so here we are
7587 // guaranteed that the receiver is the first register of the calling convention.
7588 InvokeDexCallingConvention calling_convention;
7589 Register receiver = calling_convention.GetRegisterAt(0);
7590
Chris Larsen3acee732015-11-18 13:31:08 -08007591 Register temp = temp_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007592 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7593 invoke->GetVTableIndex(), kMipsPointerSize).SizeValue();
7594 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07007595 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007596
7597 // temp = object->GetClass();
Goran Jakovljevice919b072016-10-04 10:17:34 +02007598 __ LoadFromOffset(kLoadWord, temp, receiver, class_offset);
Chris Larsen3acee732015-11-18 13:31:08 -08007599 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08007600 // Instead of simply (possibly) unpoisoning `temp` here, we should
7601 // emit a read barrier for the previous class reference load.
7602 // However this is not required in practice, as this is an
7603 // intermediate/temporary reference and because the current
7604 // concurrent copying collector keeps the from-space memory
7605 // intact/accessible until the end of the marking phase (the
7606 // concurrent copying collector may not in the future).
7607 __ MaybeUnpoisonHeapReference(temp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007608 // temp = temp->GetMethodAt(method_offset);
7609 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
7610 // T9 = temp->GetEntryPoint();
7611 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
7612 // T9();
7613 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007614 __ NopIfNoReordering();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007615 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Chris Larsen3acee732015-11-18 13:31:08 -08007616}
7617
7618void InstructionCodeGeneratorMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
7619 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7620 return;
7621 }
7622
7623 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007624 DCHECK(!codegen_->IsLeafMethod());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007625}
7626
7627void LocationsBuilderMIPS::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00007628 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007629 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007630 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007631 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
7632 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007633 return;
7634 }
Vladimir Marko41559982017-01-06 14:04:23 +00007635 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007636 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze15958152017-02-09 19:08:30 -08007637 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
7638 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunze06a46c42016-07-19 15:00:40 -07007639 ? LocationSummary::kCallOnSlowPath
7640 : LocationSummary::kNoCall;
7641 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007642 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
7643 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7644 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007645 switch (load_kind) {
7646 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007647 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007648 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko94ec2db2017-09-06 17:21:03 +01007649 case HLoadClass::LoadKind::kBootImageClassTable:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007650 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007651 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007652 break;
7653 }
7654 FALLTHROUGH_INTENDED;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007655 case HLoadClass::LoadKind::kReferrersClass:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007656 locations->SetInAt(0, Location::RequiresRegister());
7657 break;
7658 default:
7659 break;
7660 }
7661 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007662 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
7663 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7664 // Rely on the type resolution or initialization and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007665 // Request a temp to hold the BSS entry location for the slow path.
7666 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007667 RegisterSet caller_saves = RegisterSet::Empty();
7668 InvokeRuntimeCallingConvention calling_convention;
7669 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7670 locations->SetCustomSlowPathCallerSaves(caller_saves);
7671 } else {
7672 // For non-Baker read barriers we have a temp-clobbering call.
7673 }
7674 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007675}
7676
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007677// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7678// move.
7679void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00007680 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007681 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00007682 codegen_->GenerateLoadClassRuntimeCall(cls);
Pavle Batutae87a7182015-10-28 13:10:42 +01007683 return;
7684 }
Vladimir Marko41559982017-01-06 14:04:23 +00007685 DCHECK(!cls->NeedsAccessCheck());
Pavle Batutae87a7182015-10-28 13:10:42 +01007686
Vladimir Marko41559982017-01-06 14:04:23 +00007687 LocationSummary* locations = cls->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007688 Location out_loc = locations->Out();
7689 Register out = out_loc.AsRegister<Register>();
7690 Register base_or_current_method_reg;
7691 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7692 switch (load_kind) {
7693 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007694 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007695 case HLoadClass::LoadKind::kBootImageAddress:
7696 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007697 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7698 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007699 case HLoadClass::LoadKind::kReferrersClass:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007700 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007701 base_or_current_method_reg = locations->InAt(0).AsRegister<Register>();
7702 break;
7703 default:
7704 base_or_current_method_reg = ZERO;
7705 break;
7706 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00007707
Alexey Frunze15958152017-02-09 19:08:30 -08007708 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
7709 ? kWithoutReadBarrier
7710 : kCompilerReadBarrierOption;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007711 bool generate_null_check = false;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007712 CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high = nullptr;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007713 switch (load_kind) {
7714 case HLoadClass::LoadKind::kReferrersClass: {
7715 DCHECK(!cls->CanCallRuntime());
7716 DCHECK(!cls->MustGenerateClinitCheck());
7717 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7718 GenerateGcRootFieldLoad(cls,
7719 out_loc,
7720 base_or_current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08007721 ArtMethod::DeclaringClassOffset().Int32Value(),
7722 read_barrier_option);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007723 break;
7724 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007725 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007726 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08007727 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007728 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Alexey Frunze06a46c42016-07-19 15:00:40 -07007729 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007730 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7731 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007732 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7733 out,
Alexey Frunzea663d9d2017-07-31 18:43:18 -07007734 base_or_current_method_reg);
7735 __ Addiu(out, out, /* placeholder */ 0x5678, &info_low->label);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007736 break;
7737 }
7738 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08007739 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007740 uint32_t address = dchecked_integral_cast<uint32_t>(
7741 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
7742 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007743 __ LoadLiteral(out,
7744 base_or_current_method_reg,
7745 codegen_->DeduplicateBootImageAddressLiteral(address));
7746 break;
7747 }
Vladimir Marko94ec2db2017-09-06 17:21:03 +01007748 case HLoadClass::LoadKind::kBootImageClassTable: {
7749 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
7750 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
7751 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
7752 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7753 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
7754 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7755 out,
7756 base_or_current_method_reg);
7757 __ Lw(out, out, /* placeholder */ 0x5678, &info_low->label);
7758 // Extract the reference from the slot data, i.e. clear the hash bits.
7759 int32_t masked_hash = ClassTable::TableSlot::MaskHash(
7760 ComputeModifiedUtf8Hash(cls->GetDexFile().StringByTypeIdx(cls->GetTypeIndex())));
7761 if (masked_hash != 0) {
7762 __ Addiu(out, out, -masked_hash);
7763 }
7764 break;
7765 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007766 case HLoadClass::LoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007767 bss_info_high = codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
7768 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7769 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007770 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007771 Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007772 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high,
7773 temp,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007774 base_or_current_method_reg);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007775 GenerateGcRootFieldLoad(cls,
7776 out_loc,
7777 temp,
7778 /* placeholder */ 0x5678,
7779 read_barrier_option,
7780 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007781 generate_null_check = true;
7782 break;
7783 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007784 case HLoadClass::LoadKind::kJitTableAddress: {
Alexey Frunze627c1a02017-01-30 19:28:14 -08007785 CodeGeneratorMIPS::JitPatchInfo* info = codegen_->NewJitRootClassPatch(cls->GetDexFile(),
7786 cls->GetTypeIndex(),
7787 cls->GetClass());
7788 bool reordering = __ SetReorder(false);
7789 __ Bind(&info->high_label);
7790 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007791 __ SetReorder(reordering);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007792 GenerateGcRootFieldLoad(cls,
7793 out_loc,
7794 out,
7795 /* placeholder */ 0x5678,
7796 read_barrier_option,
7797 &info->low_label);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007798 break;
7799 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007800 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007801 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00007802 LOG(FATAL) << "UNREACHABLE";
7803 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007804 }
7805
7806 if (generate_null_check || cls->MustGenerateClinitCheck()) {
7807 DCHECK(cls->CanCallRuntime());
7808 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007809 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck(), bss_info_high);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007810 codegen_->AddSlowPath(slow_path);
7811 if (generate_null_check) {
7812 __ Beqz(out, slow_path->GetEntryLabel());
7813 }
7814 if (cls->MustGenerateClinitCheck()) {
7815 GenerateClassInitializationCheck(slow_path, out);
7816 } else {
7817 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007818 }
7819 }
7820}
7821
7822static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07007823 return Thread::ExceptionOffset<kMipsPointerSize>().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007824}
7825
7826void LocationsBuilderMIPS::VisitLoadException(HLoadException* load) {
7827 LocationSummary* locations =
7828 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
7829 locations->SetOut(Location::RequiresRegister());
7830}
7831
7832void InstructionCodeGeneratorMIPS::VisitLoadException(HLoadException* load) {
7833 Register out = load->GetLocations()->Out().AsRegister<Register>();
7834 __ LoadFromOffset(kLoadWord, out, TR, GetExceptionTlsOffset());
7835}
7836
7837void LocationsBuilderMIPS::VisitClearException(HClearException* clear) {
7838 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
7839}
7840
7841void InstructionCodeGeneratorMIPS::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
7842 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
7843}
7844
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007845void LocationsBuilderMIPS::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08007846 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007847 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007848 HLoadString::LoadKind load_kind = load->GetLoadKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07007849 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007850 switch (load_kind) {
7851 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007852 case HLoadString::LoadKind::kBootImageAddress:
7853 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007854 case HLoadString::LoadKind::kBootImageInternTable:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007855 case HLoadString::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007856 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007857 break;
7858 }
7859 FALLTHROUGH_INTENDED;
7860 // We need an extra register for PC-relative dex cache accesses.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007861 case HLoadString::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007862 locations->SetInAt(0, Location::RequiresRegister());
7863 break;
7864 default:
7865 break;
7866 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007867 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzebb51df82016-11-01 16:07:32 -07007868 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007869 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzebb51df82016-11-01 16:07:32 -07007870 } else {
7871 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007872 if (load_kind == HLoadString::LoadKind::kBssEntry) {
7873 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7874 // Rely on the pResolveString and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007875 // Request a temp to hold the BSS entry location for the slow path.
7876 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007877 RegisterSet caller_saves = RegisterSet::Empty();
7878 InvokeRuntimeCallingConvention calling_convention;
7879 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7880 locations->SetCustomSlowPathCallerSaves(caller_saves);
7881 } else {
7882 // For non-Baker read barriers we have a temp-clobbering call.
7883 }
7884 }
Alexey Frunzebb51df82016-11-01 16:07:32 -07007885 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007886}
7887
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007888// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7889// move.
7890void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007891 HLoadString::LoadKind load_kind = load->GetLoadKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007892 LocationSummary* locations = load->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007893 Location out_loc = locations->Out();
7894 Register out = out_loc.AsRegister<Register>();
7895 Register base_or_current_method_reg;
7896 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7897 switch (load_kind) {
7898 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007899 case HLoadString::LoadKind::kBootImageAddress:
7900 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007901 case HLoadString::LoadKind::kBootImageInternTable:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007902 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007903 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7904 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007905 default:
7906 base_or_current_method_reg = ZERO;
7907 break;
7908 }
7909
7910 switch (load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007911 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00007912 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007913 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007914 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007915 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7916 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007917 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7918 out,
Alexey Frunzea663d9d2017-07-31 18:43:18 -07007919 base_or_current_method_reg);
7920 __ Addiu(out, out, /* placeholder */ 0x5678, &info_low->label);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007921 return;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007922 }
7923 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007924 uint32_t address = dchecked_integral_cast<uint32_t>(
7925 reinterpret_cast<uintptr_t>(load->GetString().Get()));
7926 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007927 __ LoadLiteral(out,
7928 base_or_current_method_reg,
7929 codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007930 return;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007931 }
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007932 case HLoadString::LoadKind::kBootImageInternTable: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00007933 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007934 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007935 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007936 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7937 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007938 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7939 out,
7940 base_or_current_method_reg);
7941 __ Lw(out, out, /* placeholder */ 0x5678, &info_low->label);
7942 return;
7943 }
7944 case HLoadString::LoadKind::kBssEntry: {
7945 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
7946 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
7947 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
7948 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7949 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007950 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007951 Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007952 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7953 temp,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007954 base_or_current_method_reg);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007955 GenerateGcRootFieldLoad(load,
7956 out_loc,
7957 temp,
7958 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007959 kCompilerReadBarrierOption,
7960 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007961 SlowPathCodeMIPS* slow_path =
7962 new (GetGraph()->GetArena()) LoadStringSlowPathMIPS(load, info_high);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007963 codegen_->AddSlowPath(slow_path);
7964 __ Beqz(out, slow_path->GetEntryLabel());
7965 __ Bind(slow_path->GetExitLabel());
7966 return;
7967 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08007968 case HLoadString::LoadKind::kJitTableAddress: {
7969 CodeGeneratorMIPS::JitPatchInfo* info =
7970 codegen_->NewJitRootStringPatch(load->GetDexFile(),
7971 load->GetStringIndex(),
7972 load->GetString());
7973 bool reordering = __ SetReorder(false);
7974 __ Bind(&info->high_label);
7975 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007976 __ SetReorder(reordering);
Alexey Frunze15958152017-02-09 19:08:30 -08007977 GenerateGcRootFieldLoad(load,
7978 out_loc,
7979 out,
7980 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007981 kCompilerReadBarrierOption,
7982 &info->low_label);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007983 return;
7984 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007985 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007986 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007987 }
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007988
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007989 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007990 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007991 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007992 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007993 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007994 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
7995 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007996}
7997
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007998void LocationsBuilderMIPS::VisitLongConstant(HLongConstant* constant) {
7999 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
8000 locations->SetOut(Location::ConstantLocation(constant));
8001}
8002
8003void InstructionCodeGeneratorMIPS::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
8004 // Will be generated at use site.
8005}
8006
8007void LocationsBuilderMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
8008 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008009 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008010 InvokeRuntimeCallingConvention calling_convention;
8011 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8012}
8013
8014void InstructionCodeGeneratorMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
8015 if (instruction->IsEnter()) {
Serban Constantinescufca16662016-07-14 09:21:59 +01008016 codegen_->InvokeRuntime(kQuickLockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008017 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
8018 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008019 codegen_->InvokeRuntime(kQuickUnlockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008020 }
8021 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
8022}
8023
8024void LocationsBuilderMIPS::VisitMul(HMul* mul) {
8025 LocationSummary* locations =
8026 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
8027 switch (mul->GetResultType()) {
8028 case Primitive::kPrimInt:
8029 case Primitive::kPrimLong:
8030 locations->SetInAt(0, Location::RequiresRegister());
8031 locations->SetInAt(1, Location::RequiresRegister());
8032 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8033 break;
8034
8035 case Primitive::kPrimFloat:
8036 case Primitive::kPrimDouble:
8037 locations->SetInAt(0, Location::RequiresFpuRegister());
8038 locations->SetInAt(1, Location::RequiresFpuRegister());
8039 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8040 break;
8041
8042 default:
8043 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
8044 }
8045}
8046
8047void InstructionCodeGeneratorMIPS::VisitMul(HMul* instruction) {
8048 Primitive::Type type = instruction->GetType();
8049 LocationSummary* locations = instruction->GetLocations();
8050 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
8051
8052 switch (type) {
8053 case Primitive::kPrimInt: {
8054 Register dst = locations->Out().AsRegister<Register>();
8055 Register lhs = locations->InAt(0).AsRegister<Register>();
8056 Register rhs = locations->InAt(1).AsRegister<Register>();
8057
8058 if (isR6) {
8059 __ MulR6(dst, lhs, rhs);
8060 } else {
8061 __ MulR2(dst, lhs, rhs);
8062 }
8063 break;
8064 }
8065 case Primitive::kPrimLong: {
8066 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8067 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8068 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8069 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
8070 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
8071 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
8072
8073 // Extra checks to protect caused by the existance of A1_A2.
8074 // The algorithm is wrong if dst_high is either lhs_lo or rhs_lo:
8075 // (e.g. lhs=a0_a1, rhs=a2_a3 and dst=a1_a2).
8076 DCHECK_NE(dst_high, lhs_low);
8077 DCHECK_NE(dst_high, rhs_low);
8078
8079 // A_B * C_D
8080 // dst_hi: [ low(A*D) + low(B*C) + hi(B*D) ]
8081 // dst_lo: [ low(B*D) ]
8082 // Note: R2 and R6 MUL produce the low 32 bit of the multiplication result.
8083
8084 if (isR6) {
8085 __ MulR6(TMP, lhs_high, rhs_low);
8086 __ MulR6(dst_high, lhs_low, rhs_high);
8087 __ Addu(dst_high, dst_high, TMP);
8088 __ MuhuR6(TMP, lhs_low, rhs_low);
8089 __ Addu(dst_high, dst_high, TMP);
8090 __ MulR6(dst_low, lhs_low, rhs_low);
8091 } else {
8092 __ MulR2(TMP, lhs_high, rhs_low);
8093 __ MulR2(dst_high, lhs_low, rhs_high);
8094 __ Addu(dst_high, dst_high, TMP);
8095 __ MultuR2(lhs_low, rhs_low);
8096 __ Mfhi(TMP);
8097 __ Addu(dst_high, dst_high, TMP);
8098 __ Mflo(dst_low);
8099 }
8100 break;
8101 }
8102 case Primitive::kPrimFloat:
8103 case Primitive::kPrimDouble: {
8104 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8105 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
8106 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
8107 if (type == Primitive::kPrimFloat) {
8108 __ MulS(dst, lhs, rhs);
8109 } else {
8110 __ MulD(dst, lhs, rhs);
8111 }
8112 break;
8113 }
8114 default:
8115 LOG(FATAL) << "Unexpected mul type " << type;
8116 }
8117}
8118
8119void LocationsBuilderMIPS::VisitNeg(HNeg* neg) {
8120 LocationSummary* locations =
8121 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
8122 switch (neg->GetResultType()) {
8123 case Primitive::kPrimInt:
8124 case Primitive::kPrimLong:
8125 locations->SetInAt(0, Location::RequiresRegister());
8126 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8127 break;
8128
8129 case Primitive::kPrimFloat:
8130 case Primitive::kPrimDouble:
8131 locations->SetInAt(0, Location::RequiresFpuRegister());
8132 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8133 break;
8134
8135 default:
8136 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
8137 }
8138}
8139
8140void InstructionCodeGeneratorMIPS::VisitNeg(HNeg* instruction) {
8141 Primitive::Type type = instruction->GetType();
8142 LocationSummary* locations = instruction->GetLocations();
8143
8144 switch (type) {
8145 case Primitive::kPrimInt: {
8146 Register dst = locations->Out().AsRegister<Register>();
8147 Register src = locations->InAt(0).AsRegister<Register>();
8148 __ Subu(dst, ZERO, src);
8149 break;
8150 }
8151 case Primitive::kPrimLong: {
8152 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8153 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8154 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8155 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8156 __ Subu(dst_low, ZERO, src_low);
8157 __ Sltu(TMP, ZERO, dst_low);
8158 __ Subu(dst_high, ZERO, src_high);
8159 __ Subu(dst_high, dst_high, TMP);
8160 break;
8161 }
8162 case Primitive::kPrimFloat:
8163 case Primitive::kPrimDouble: {
8164 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8165 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8166 if (type == Primitive::kPrimFloat) {
8167 __ NegS(dst, src);
8168 } else {
8169 __ NegD(dst, src);
8170 }
8171 break;
8172 }
8173 default:
8174 LOG(FATAL) << "Unexpected neg type " << type;
8175 }
8176}
8177
8178void LocationsBuilderMIPS::VisitNewArray(HNewArray* instruction) {
8179 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008180 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008181 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008182 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00008183 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8184 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008185}
8186
8187void InstructionCodeGeneratorMIPS::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08008188 // Note: if heap poisoning is enabled, the entry point takes care
8189 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02008190 QuickEntrypointEnum entrypoint =
8191 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
8192 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00008193 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02008194 DCHECK(!codegen_->IsLeafMethod());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008195}
8196
8197void LocationsBuilderMIPS::VisitNewInstance(HNewInstance* instruction) {
8198 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008199 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008200 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00008201 if (instruction->IsStringAlloc()) {
8202 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
8203 } else {
8204 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00008205 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008206 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
8207}
8208
8209void InstructionCodeGeneratorMIPS::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08008210 // Note: if heap poisoning is enabled, the entry point takes care
8211 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00008212 if (instruction->IsStringAlloc()) {
8213 // String is allocated through StringFactory. Call NewEmptyString entry point.
8214 Register temp = instruction->GetLocations()->GetTemp(0).AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07008215 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00008216 __ LoadFromOffset(kLoadWord, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
8217 __ LoadFromOffset(kLoadWord, T9, temp, code_offset.Int32Value());
8218 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07008219 __ NopIfNoReordering();
David Brazdil6de19382016-01-08 17:37:10 +00008220 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
8221 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008222 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00008223 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00008224 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008225}
8226
8227void LocationsBuilderMIPS::VisitNot(HNot* instruction) {
8228 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
8229 locations->SetInAt(0, Location::RequiresRegister());
8230 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8231}
8232
8233void InstructionCodeGeneratorMIPS::VisitNot(HNot* instruction) {
8234 Primitive::Type type = instruction->GetType();
8235 LocationSummary* locations = instruction->GetLocations();
8236
8237 switch (type) {
8238 case Primitive::kPrimInt: {
8239 Register dst = locations->Out().AsRegister<Register>();
8240 Register src = locations->InAt(0).AsRegister<Register>();
8241 __ Nor(dst, src, ZERO);
8242 break;
8243 }
8244
8245 case Primitive::kPrimLong: {
8246 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8247 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8248 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8249 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8250 __ Nor(dst_high, src_high, ZERO);
8251 __ Nor(dst_low, src_low, ZERO);
8252 break;
8253 }
8254
8255 default:
8256 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
8257 }
8258}
8259
8260void LocationsBuilderMIPS::VisitBooleanNot(HBooleanNot* instruction) {
8261 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
8262 locations->SetInAt(0, Location::RequiresRegister());
8263 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8264}
8265
8266void InstructionCodeGeneratorMIPS::VisitBooleanNot(HBooleanNot* instruction) {
8267 LocationSummary* locations = instruction->GetLocations();
8268 __ Xori(locations->Out().AsRegister<Register>(),
8269 locations->InAt(0).AsRegister<Register>(),
8270 1);
8271}
8272
8273void LocationsBuilderMIPS::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01008274 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
8275 locations->SetInAt(0, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008276}
8277
Calin Juravle2ae48182016-03-16 14:05:09 +00008278void CodeGeneratorMIPS::GenerateImplicitNullCheck(HNullCheck* instruction) {
8279 if (CanMoveNullCheckToUser(instruction)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008280 return;
8281 }
8282 Location obj = instruction->GetLocations()->InAt(0);
8283
8284 __ Lw(ZERO, obj.AsRegister<Register>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00008285 RecordPcInfo(instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008286}
8287
Calin Juravle2ae48182016-03-16 14:05:09 +00008288void CodeGeneratorMIPS::GenerateExplicitNullCheck(HNullCheck* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008289 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00008290 AddSlowPath(slow_path);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008291
8292 Location obj = instruction->GetLocations()->InAt(0);
8293
8294 __ Beqz(obj.AsRegister<Register>(), slow_path->GetEntryLabel());
8295}
8296
8297void InstructionCodeGeneratorMIPS::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00008298 codegen_->GenerateNullCheck(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008299}
8300
8301void LocationsBuilderMIPS::VisitOr(HOr* instruction) {
8302 HandleBinaryOp(instruction);
8303}
8304
8305void InstructionCodeGeneratorMIPS::VisitOr(HOr* instruction) {
8306 HandleBinaryOp(instruction);
8307}
8308
8309void LocationsBuilderMIPS::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
8310 LOG(FATAL) << "Unreachable";
8311}
8312
8313void InstructionCodeGeneratorMIPS::VisitParallelMove(HParallelMove* instruction) {
8314 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
8315}
8316
8317void LocationsBuilderMIPS::VisitParameterValue(HParameterValue* instruction) {
8318 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
8319 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
8320 if (location.IsStackSlot()) {
8321 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
8322 } else if (location.IsDoubleStackSlot()) {
8323 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
8324 }
8325 locations->SetOut(location);
8326}
8327
8328void InstructionCodeGeneratorMIPS::VisitParameterValue(HParameterValue* instruction
8329 ATTRIBUTE_UNUSED) {
8330 // Nothing to do, the parameter is already at its location.
8331}
8332
8333void LocationsBuilderMIPS::VisitCurrentMethod(HCurrentMethod* instruction) {
8334 LocationSummary* locations =
8335 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
8336 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
8337}
8338
8339void InstructionCodeGeneratorMIPS::VisitCurrentMethod(HCurrentMethod* instruction
8340 ATTRIBUTE_UNUSED) {
8341 // Nothing to do, the method is already at its location.
8342}
8343
8344void LocationsBuilderMIPS::VisitPhi(HPhi* instruction) {
8345 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01008346 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008347 locations->SetInAt(i, Location::Any());
8348 }
8349 locations->SetOut(Location::Any());
8350}
8351
8352void InstructionCodeGeneratorMIPS::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
8353 LOG(FATAL) << "Unreachable";
8354}
8355
8356void LocationsBuilderMIPS::VisitRem(HRem* rem) {
8357 Primitive::Type type = rem->GetResultType();
8358 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008359 (type == Primitive::kPrimInt) ? LocationSummary::kNoCall : LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008360 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
8361
8362 switch (type) {
8363 case Primitive::kPrimInt:
8364 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08008365 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008366 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8367 break;
8368
8369 case Primitive::kPrimLong: {
8370 InvokeRuntimeCallingConvention calling_convention;
8371 locations->SetInAt(0, Location::RegisterPairLocation(
8372 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8373 locations->SetInAt(1, Location::RegisterPairLocation(
8374 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
8375 locations->SetOut(calling_convention.GetReturnLocation(type));
8376 break;
8377 }
8378
8379 case Primitive::kPrimFloat:
8380 case Primitive::kPrimDouble: {
8381 InvokeRuntimeCallingConvention calling_convention;
8382 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8383 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
8384 locations->SetOut(calling_convention.GetReturnLocation(type));
8385 break;
8386 }
8387
8388 default:
8389 LOG(FATAL) << "Unexpected rem type " << type;
8390 }
8391}
8392
8393void InstructionCodeGeneratorMIPS::VisitRem(HRem* instruction) {
8394 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008395
8396 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08008397 case Primitive::kPrimInt:
8398 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008399 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008400 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008401 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008402 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
8403 break;
8404 }
8405 case Primitive::kPrimFloat: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008406 codegen_->InvokeRuntime(kQuickFmodf, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008407 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008408 break;
8409 }
8410 case Primitive::kPrimDouble: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008411 codegen_->InvokeRuntime(kQuickFmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008412 CheckEntrypointTypes<kQuickFmod, double, double, double>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008413 break;
8414 }
8415 default:
8416 LOG(FATAL) << "Unexpected rem type " << type;
8417 }
8418}
8419
Igor Murashkind01745e2017-04-05 16:40:31 -07008420void LocationsBuilderMIPS::VisitConstructorFence(HConstructorFence* constructor_fence) {
8421 constructor_fence->SetLocations(nullptr);
8422}
8423
8424void InstructionCodeGeneratorMIPS::VisitConstructorFence(
8425 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
8426 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
8427}
8428
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008429void LocationsBuilderMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8430 memory_barrier->SetLocations(nullptr);
8431}
8432
8433void InstructionCodeGeneratorMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8434 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
8435}
8436
8437void LocationsBuilderMIPS::VisitReturn(HReturn* ret) {
8438 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
8439 Primitive::Type return_type = ret->InputAt(0)->GetType();
8440 locations->SetInAt(0, MipsReturnLocation(return_type));
8441}
8442
8443void InstructionCodeGeneratorMIPS::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
8444 codegen_->GenerateFrameExit();
8445}
8446
8447void LocationsBuilderMIPS::VisitReturnVoid(HReturnVoid* ret) {
8448 ret->SetLocations(nullptr);
8449}
8450
8451void InstructionCodeGeneratorMIPS::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
8452 codegen_->GenerateFrameExit();
8453}
8454
Alexey Frunze92d90602015-12-18 18:16:36 -08008455void LocationsBuilderMIPS::VisitRor(HRor* ror) {
8456 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008457}
8458
Alexey Frunze92d90602015-12-18 18:16:36 -08008459void InstructionCodeGeneratorMIPS::VisitRor(HRor* ror) {
8460 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008461}
8462
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008463void LocationsBuilderMIPS::VisitShl(HShl* shl) {
8464 HandleShift(shl);
8465}
8466
8467void InstructionCodeGeneratorMIPS::VisitShl(HShl* shl) {
8468 HandleShift(shl);
8469}
8470
8471void LocationsBuilderMIPS::VisitShr(HShr* shr) {
8472 HandleShift(shr);
8473}
8474
8475void InstructionCodeGeneratorMIPS::VisitShr(HShr* shr) {
8476 HandleShift(shr);
8477}
8478
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008479void LocationsBuilderMIPS::VisitSub(HSub* instruction) {
8480 HandleBinaryOp(instruction);
8481}
8482
8483void InstructionCodeGeneratorMIPS::VisitSub(HSub* instruction) {
8484 HandleBinaryOp(instruction);
8485}
8486
8487void LocationsBuilderMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8488 HandleFieldGet(instruction, instruction->GetFieldInfo());
8489}
8490
8491void InstructionCodeGeneratorMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8492 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
8493}
8494
8495void LocationsBuilderMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
8496 HandleFieldSet(instruction, instruction->GetFieldInfo());
8497}
8498
8499void InstructionCodeGeneratorMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01008500 HandleFieldSet(instruction,
8501 instruction->GetFieldInfo(),
8502 instruction->GetDexPc(),
8503 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008504}
8505
8506void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldGet(
8507 HUnresolvedInstanceFieldGet* instruction) {
8508 FieldAccessCallingConventionMIPS calling_convention;
8509 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8510 instruction->GetFieldType(),
8511 calling_convention);
8512}
8513
8514void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldGet(
8515 HUnresolvedInstanceFieldGet* instruction) {
8516 FieldAccessCallingConventionMIPS calling_convention;
8517 codegen_->GenerateUnresolvedFieldAccess(instruction,
8518 instruction->GetFieldType(),
8519 instruction->GetFieldIndex(),
8520 instruction->GetDexPc(),
8521 calling_convention);
8522}
8523
8524void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldSet(
8525 HUnresolvedInstanceFieldSet* instruction) {
8526 FieldAccessCallingConventionMIPS calling_convention;
8527 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8528 instruction->GetFieldType(),
8529 calling_convention);
8530}
8531
8532void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldSet(
8533 HUnresolvedInstanceFieldSet* instruction) {
8534 FieldAccessCallingConventionMIPS calling_convention;
8535 codegen_->GenerateUnresolvedFieldAccess(instruction,
8536 instruction->GetFieldType(),
8537 instruction->GetFieldIndex(),
8538 instruction->GetDexPc(),
8539 calling_convention);
8540}
8541
8542void LocationsBuilderMIPS::VisitUnresolvedStaticFieldGet(
8543 HUnresolvedStaticFieldGet* instruction) {
8544 FieldAccessCallingConventionMIPS calling_convention;
8545 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8546 instruction->GetFieldType(),
8547 calling_convention);
8548}
8549
8550void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldGet(
8551 HUnresolvedStaticFieldGet* instruction) {
8552 FieldAccessCallingConventionMIPS calling_convention;
8553 codegen_->GenerateUnresolvedFieldAccess(instruction,
8554 instruction->GetFieldType(),
8555 instruction->GetFieldIndex(),
8556 instruction->GetDexPc(),
8557 calling_convention);
8558}
8559
8560void LocationsBuilderMIPS::VisitUnresolvedStaticFieldSet(
8561 HUnresolvedStaticFieldSet* instruction) {
8562 FieldAccessCallingConventionMIPS calling_convention;
8563 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8564 instruction->GetFieldType(),
8565 calling_convention);
8566}
8567
8568void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldSet(
8569 HUnresolvedStaticFieldSet* instruction) {
8570 FieldAccessCallingConventionMIPS calling_convention;
8571 codegen_->GenerateUnresolvedFieldAccess(instruction,
8572 instruction->GetFieldType(),
8573 instruction->GetFieldIndex(),
8574 instruction->GetDexPc(),
8575 calling_convention);
8576}
8577
8578void LocationsBuilderMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01008579 LocationSummary* locations =
8580 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Lena Djokicca8c2952017-05-29 11:31:46 +02008581 // In suspend check slow path, usually there are no caller-save registers at all.
8582 // If SIMD instructions are present, however, we force spilling all live SIMD
8583 // registers in full width (since the runtime only saves/restores lower part).
8584 locations->SetCustomSlowPathCallerSaves(
8585 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008586}
8587
8588void InstructionCodeGeneratorMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
8589 HBasicBlock* block = instruction->GetBlock();
8590 if (block->GetLoopInformation() != nullptr) {
8591 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
8592 // The back edge will generate the suspend check.
8593 return;
8594 }
8595 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
8596 // The goto will generate the suspend check.
8597 return;
8598 }
8599 GenerateSuspendCheck(instruction, nullptr);
8600}
8601
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008602void LocationsBuilderMIPS::VisitThrow(HThrow* instruction) {
8603 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008604 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008605 InvokeRuntimeCallingConvention calling_convention;
8606 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8607}
8608
8609void InstructionCodeGeneratorMIPS::VisitThrow(HThrow* instruction) {
Serban Constantinescufca16662016-07-14 09:21:59 +01008610 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008611 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
8612}
8613
8614void LocationsBuilderMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8615 Primitive::Type input_type = conversion->GetInputType();
8616 Primitive::Type result_type = conversion->GetResultType();
8617 DCHECK_NE(input_type, result_type);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008618 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008619
8620 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
8621 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
8622 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
8623 }
8624
8625 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008626 if (!isR6 &&
8627 ((Primitive::IsFloatingPointType(result_type) && input_type == Primitive::kPrimLong) ||
8628 (result_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(input_type)))) {
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008629 call_kind = LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008630 }
8631
8632 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
8633
8634 if (call_kind == LocationSummary::kNoCall) {
8635 if (Primitive::IsFloatingPointType(input_type)) {
8636 locations->SetInAt(0, Location::RequiresFpuRegister());
8637 } else {
8638 locations->SetInAt(0, Location::RequiresRegister());
8639 }
8640
8641 if (Primitive::IsFloatingPointType(result_type)) {
8642 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8643 } else {
8644 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8645 }
8646 } else {
8647 InvokeRuntimeCallingConvention calling_convention;
8648
8649 if (Primitive::IsFloatingPointType(input_type)) {
8650 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8651 } else {
8652 DCHECK_EQ(input_type, Primitive::kPrimLong);
8653 locations->SetInAt(0, Location::RegisterPairLocation(
8654 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8655 }
8656
8657 locations->SetOut(calling_convention.GetReturnLocation(result_type));
8658 }
8659}
8660
8661void InstructionCodeGeneratorMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8662 LocationSummary* locations = conversion->GetLocations();
8663 Primitive::Type result_type = conversion->GetResultType();
8664 Primitive::Type input_type = conversion->GetInputType();
8665 bool has_sign_extension = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008666 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008667
8668 DCHECK_NE(input_type, result_type);
8669
8670 if (result_type == Primitive::kPrimLong && Primitive::IsIntegralType(input_type)) {
8671 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8672 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8673 Register src = locations->InAt(0).AsRegister<Register>();
8674
Alexey Frunzea871ef12016-06-27 15:20:11 -07008675 if (dst_low != src) {
8676 __ Move(dst_low, src);
8677 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008678 __ Sra(dst_high, src, 31);
8679 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
8680 Register dst = locations->Out().AsRegister<Register>();
8681 Register src = (input_type == Primitive::kPrimLong)
8682 ? locations->InAt(0).AsRegisterPairLow<Register>()
8683 : locations->InAt(0).AsRegister<Register>();
8684
8685 switch (result_type) {
8686 case Primitive::kPrimChar:
8687 __ Andi(dst, src, 0xFFFF);
8688 break;
8689 case Primitive::kPrimByte:
8690 if (has_sign_extension) {
8691 __ Seb(dst, src);
8692 } else {
8693 __ Sll(dst, src, 24);
8694 __ Sra(dst, dst, 24);
8695 }
8696 break;
8697 case Primitive::kPrimShort:
8698 if (has_sign_extension) {
8699 __ Seh(dst, src);
8700 } else {
8701 __ Sll(dst, src, 16);
8702 __ Sra(dst, dst, 16);
8703 }
8704 break;
8705 case Primitive::kPrimInt:
Alexey Frunzea871ef12016-06-27 15:20:11 -07008706 if (dst != src) {
8707 __ Move(dst, src);
8708 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008709 break;
8710
8711 default:
8712 LOG(FATAL) << "Unexpected type conversion from " << input_type
8713 << " to " << result_type;
8714 }
8715 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008716 if (input_type == Primitive::kPrimLong) {
8717 if (isR6) {
8718 // cvt.s.l/cvt.d.l requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8719 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8720 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8721 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8722 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8723 __ Mtc1(src_low, FTMP);
8724 __ Mthc1(src_high, FTMP);
8725 if (result_type == Primitive::kPrimFloat) {
8726 __ Cvtsl(dst, FTMP);
8727 } else {
8728 __ Cvtdl(dst, FTMP);
8729 }
8730 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008731 QuickEntrypointEnum entrypoint = (result_type == Primitive::kPrimFloat) ? kQuickL2f
8732 : kQuickL2d;
8733 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008734 if (result_type == Primitive::kPrimFloat) {
8735 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
8736 } else {
8737 CheckEntrypointTypes<kQuickL2d, double, int64_t>();
8738 }
8739 }
8740 } else {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008741 Register src = locations->InAt(0).AsRegister<Register>();
8742 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8743 __ Mtc1(src, FTMP);
8744 if (result_type == Primitive::kPrimFloat) {
8745 __ Cvtsw(dst, FTMP);
8746 } else {
8747 __ Cvtdw(dst, FTMP);
8748 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008749 }
8750 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
8751 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008752
8753 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
8754 // value of the output type if the input is outside of the range after the truncation or
8755 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
8756 // results. This matches the desired float/double-to-int/long conversion exactly.
8757 //
8758 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
8759 // value when the input is either a NaN or is outside of the range of the output type
8760 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
8761 // the same result.
8762 //
8763 // The code takes care of the different behaviors by first comparing the input to the
8764 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
8765 // If the input is greater than or equal to the minimum, it procedes to the truncate
8766 // instruction, which will handle such an input the same way irrespective of NAN2008.
8767 // Otherwise the input is compared to itself to determine whether it is a NaN or not
8768 // in order to return either zero or the minimum value.
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008769 if (result_type == Primitive::kPrimLong) {
8770 if (isR6) {
8771 // trunc.l.s/trunc.l.d requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8772 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8773 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8774 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8775 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008776
8777 if (input_type == Primitive::kPrimFloat) {
8778 __ TruncLS(FTMP, src);
8779 } else {
8780 __ TruncLD(FTMP, src);
8781 }
8782 __ Mfc1(dst_low, FTMP);
8783 __ Mfhc1(dst_high, FTMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008784 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008785 QuickEntrypointEnum entrypoint = (input_type == Primitive::kPrimFloat) ? kQuickF2l
8786 : kQuickD2l;
8787 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008788 if (input_type == Primitive::kPrimFloat) {
8789 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
8790 } else {
8791 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
8792 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008793 }
8794 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008795 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8796 Register dst = locations->Out().AsRegister<Register>();
8797 MipsLabel truncate;
8798 MipsLabel done;
8799
Lena Djokicf4e23a82017-05-09 15:43:45 +02008800 if (!isR6) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008801 if (input_type == Primitive::kPrimFloat) {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008802 uint32_t min_val = bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
8803 __ LoadConst32(TMP, min_val);
8804 __ Mtc1(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008805 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008806 uint64_t min_val = bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
8807 __ LoadConst32(TMP, High32Bits(min_val));
8808 __ Mtc1(ZERO, FTMP);
8809 __ MoveToFpuHigh(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008810 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008811
8812 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008813 __ ColeS(0, FTMP, src);
8814 } else {
8815 __ ColeD(0, FTMP, src);
8816 }
8817 __ Bc1t(0, &truncate);
8818
8819 if (input_type == Primitive::kPrimFloat) {
8820 __ CeqS(0, src, src);
8821 } else {
8822 __ CeqD(0, src, src);
8823 }
8824 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
8825 __ Movf(dst, ZERO, 0);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008826
8827 __ B(&done);
8828
8829 __ Bind(&truncate);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008830 }
8831
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008832 if (input_type == Primitive::kPrimFloat) {
8833 __ TruncWS(FTMP, src);
8834 } else {
8835 __ TruncWD(FTMP, src);
8836 }
8837 __ Mfc1(dst, FTMP);
8838
Lena Djokicf4e23a82017-05-09 15:43:45 +02008839 if (!isR6) {
8840 __ Bind(&done);
8841 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008842 }
8843 } else if (Primitive::IsFloatingPointType(result_type) &&
8844 Primitive::IsFloatingPointType(input_type)) {
8845 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8846 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8847 if (result_type == Primitive::kPrimFloat) {
8848 __ Cvtsd(dst, src);
8849 } else {
8850 __ Cvtds(dst, src);
8851 }
8852 } else {
8853 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
8854 << " to " << result_type;
8855 }
8856}
8857
8858void LocationsBuilderMIPS::VisitUShr(HUShr* ushr) {
8859 HandleShift(ushr);
8860}
8861
8862void InstructionCodeGeneratorMIPS::VisitUShr(HUShr* ushr) {
8863 HandleShift(ushr);
8864}
8865
8866void LocationsBuilderMIPS::VisitXor(HXor* instruction) {
8867 HandleBinaryOp(instruction);
8868}
8869
8870void InstructionCodeGeneratorMIPS::VisitXor(HXor* instruction) {
8871 HandleBinaryOp(instruction);
8872}
8873
8874void LocationsBuilderMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8875 // Nothing to do, this should be removed during prepare for register allocator.
8876 LOG(FATAL) << "Unreachable";
8877}
8878
8879void InstructionCodeGeneratorMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8880 // Nothing to do, this should be removed during prepare for register allocator.
8881 LOG(FATAL) << "Unreachable";
8882}
8883
8884void LocationsBuilderMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008885 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008886}
8887
8888void InstructionCodeGeneratorMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008889 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008890}
8891
8892void LocationsBuilderMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008893 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008894}
8895
8896void InstructionCodeGeneratorMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008897 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008898}
8899
8900void LocationsBuilderMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008901 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008902}
8903
8904void InstructionCodeGeneratorMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008905 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008906}
8907
8908void LocationsBuilderMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008909 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008910}
8911
8912void InstructionCodeGeneratorMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008913 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008914}
8915
8916void LocationsBuilderMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008917 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008918}
8919
8920void InstructionCodeGeneratorMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008921 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008922}
8923
8924void LocationsBuilderMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008925 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008926}
8927
8928void InstructionCodeGeneratorMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008929 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008930}
8931
8932void LocationsBuilderMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008933 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008934}
8935
8936void InstructionCodeGeneratorMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008937 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008938}
8939
8940void LocationsBuilderMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008941 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008942}
8943
8944void InstructionCodeGeneratorMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008945 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008946}
8947
8948void LocationsBuilderMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008949 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008950}
8951
8952void InstructionCodeGeneratorMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008953 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008954}
8955
8956void LocationsBuilderMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008957 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008958}
8959
8960void InstructionCodeGeneratorMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008961 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008962}
8963
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008964void LocationsBuilderMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8965 LocationSummary* locations =
8966 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
8967 locations->SetInAt(0, Location::RequiresRegister());
8968}
8969
Alexey Frunze96b66822016-09-10 02:32:44 -07008970void InstructionCodeGeneratorMIPS::GenPackedSwitchWithCompares(Register value_reg,
8971 int32_t lower_bound,
8972 uint32_t num_entries,
8973 HBasicBlock* switch_block,
8974 HBasicBlock* default_block) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008975 // Create a set of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008976 Register temp_reg = TMP;
8977 __ Addiu32(temp_reg, value_reg, -lower_bound);
8978 // Jump to default if index is negative
8979 // Note: We don't check the case that index is positive while value < lower_bound, because in
8980 // this case, index >= num_entries must be true. So that we can save one branch instruction.
8981 __ Bltz(temp_reg, codegen_->GetLabelOf(default_block));
8982
Alexey Frunze96b66822016-09-10 02:32:44 -07008983 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008984 // Jump to successors[0] if value == lower_bound.
8985 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[0]));
8986 int32_t last_index = 0;
8987 for (; num_entries - last_index > 2; last_index += 2) {
8988 __ Addiu(temp_reg, temp_reg, -2);
8989 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
8990 __ Bltz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
8991 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
8992 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
8993 }
8994 if (num_entries - last_index == 2) {
8995 // The last missing case_value.
8996 __ Addiu(temp_reg, temp_reg, -1);
8997 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008998 }
8999
Vladimir Markof3e0ee22015-12-17 15:23:13 +00009000 // And the default for any other value.
Alexey Frunze96b66822016-09-10 02:32:44 -07009001 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02009002 __ B(codegen_->GetLabelOf(default_block));
9003 }
9004}
9005
Alexey Frunze96b66822016-09-10 02:32:44 -07009006void InstructionCodeGeneratorMIPS::GenTableBasedPackedSwitch(Register value_reg,
9007 Register constant_area,
9008 int32_t lower_bound,
9009 uint32_t num_entries,
9010 HBasicBlock* switch_block,
9011 HBasicBlock* default_block) {
9012 // Create a jump table.
9013 std::vector<MipsLabel*> labels(num_entries);
9014 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
9015 for (uint32_t i = 0; i < num_entries; i++) {
9016 labels[i] = codegen_->GetLabelOf(successors[i]);
9017 }
9018 JumpTable* table = __ CreateJumpTable(std::move(labels));
9019
9020 // Is the value in range?
9021 __ Addiu32(TMP, value_reg, -lower_bound);
9022 if (IsInt<16>(static_cast<int32_t>(num_entries))) {
9023 __ Sltiu(AT, TMP, num_entries);
9024 __ Beqz(AT, codegen_->GetLabelOf(default_block));
9025 } else {
9026 __ LoadConst32(AT, num_entries);
9027 __ Bgeu(TMP, AT, codegen_->GetLabelOf(default_block));
9028 }
9029
9030 // We are in the range of the table.
9031 // Load the target address from the jump table, indexing by the value.
9032 __ LoadLabelAddress(AT, constant_area, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07009033 __ ShiftAndAdd(TMP, TMP, AT, 2, TMP);
Alexey Frunze96b66822016-09-10 02:32:44 -07009034 __ Lw(TMP, TMP, 0);
9035 // Compute the absolute target address by adding the table start address
9036 // (the table contains offsets to targets relative to its start).
9037 __ Addu(TMP, TMP, AT);
9038 // And jump.
9039 __ Jr(TMP);
9040 __ NopIfNoReordering();
9041}
9042
9043void InstructionCodeGeneratorMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
9044 int32_t lower_bound = switch_instr->GetStartValue();
9045 uint32_t num_entries = switch_instr->GetNumEntries();
9046 LocationSummary* locations = switch_instr->GetLocations();
9047 Register value_reg = locations->InAt(0).AsRegister<Register>();
9048 HBasicBlock* switch_block = switch_instr->GetBlock();
9049 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
9050
9051 if (codegen_->GetInstructionSetFeatures().IsR6() &&
9052 num_entries > kPackedSwitchJumpTableThreshold) {
9053 // R6 uses PC-relative addressing to access the jump table.
9054 // R2, OTOH, requires an HMipsComputeBaseMethodAddress input to access
9055 // the jump table and it is implemented by changing HPackedSwitch to
9056 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress.
9057 // See VisitMipsPackedSwitch() for the table-based implementation on R2.
9058 GenTableBasedPackedSwitch(value_reg,
9059 ZERO,
9060 lower_bound,
9061 num_entries,
9062 switch_block,
9063 default_block);
9064 } else {
9065 GenPackedSwitchWithCompares(value_reg,
9066 lower_bound,
9067 num_entries,
9068 switch_block,
9069 default_block);
9070 }
9071}
9072
9073void LocationsBuilderMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
9074 LocationSummary* locations =
9075 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
9076 locations->SetInAt(0, Location::RequiresRegister());
9077 // Constant area pointer (HMipsComputeBaseMethodAddress).
9078 locations->SetInAt(1, Location::RequiresRegister());
9079}
9080
9081void InstructionCodeGeneratorMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
9082 int32_t lower_bound = switch_instr->GetStartValue();
9083 uint32_t num_entries = switch_instr->GetNumEntries();
9084 LocationSummary* locations = switch_instr->GetLocations();
9085 Register value_reg = locations->InAt(0).AsRegister<Register>();
9086 Register constant_area = locations->InAt(1).AsRegister<Register>();
9087 HBasicBlock* switch_block = switch_instr->GetBlock();
9088 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
9089
9090 // This is an R2-only path. HPackedSwitch has been changed to
9091 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress
9092 // required to address the jump table relative to PC.
9093 GenTableBasedPackedSwitch(value_reg,
9094 constant_area,
9095 lower_bound,
9096 num_entries,
9097 switch_block,
9098 default_block);
9099}
9100
Alexey Frunzee3fb2452016-05-10 16:08:05 -07009101void LocationsBuilderMIPS::VisitMipsComputeBaseMethodAddress(
9102 HMipsComputeBaseMethodAddress* insn) {
9103 LocationSummary* locations =
9104 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
9105 locations->SetOut(Location::RequiresRegister());
9106}
9107
9108void InstructionCodeGeneratorMIPS::VisitMipsComputeBaseMethodAddress(
9109 HMipsComputeBaseMethodAddress* insn) {
9110 LocationSummary* locations = insn->GetLocations();
9111 Register reg = locations->Out().AsRegister<Register>();
9112
9113 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
9114
9115 // Generate a dummy PC-relative call to obtain PC.
9116 __ Nal();
9117 // Grab the return address off RA.
9118 __ Move(reg, RA);
9119
9120 // Remember this offset (the obtained PC value) for later use with constant area.
9121 __ BindPcRelBaseLabel();
9122}
9123
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02009124void LocationsBuilderMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
9125 // The trampoline uses the same calling convention as dex calling conventions,
9126 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
9127 // the method_idx.
9128 HandleInvoke(invoke);
9129}
9130
9131void InstructionCodeGeneratorMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
9132 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
9133}
9134
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009135void LocationsBuilderMIPS::VisitClassTableGet(HClassTableGet* instruction) {
9136 LocationSummary* locations =
9137 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
9138 locations->SetInAt(0, Location::RequiresRegister());
9139 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009140}
9141
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009142void InstructionCodeGeneratorMIPS::VisitClassTableGet(HClassTableGet* instruction) {
9143 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00009144 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009145 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009146 instruction->GetIndex(), kMipsPointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009147 __ LoadFromOffset(kLoadWord,
9148 locations->Out().AsRegister<Register>(),
9149 locations->InAt(0).AsRegister<Register>(),
9150 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009151 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009152 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00009153 instruction->GetIndex(), kMipsPointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00009154 __ LoadFromOffset(kLoadWord,
9155 locations->Out().AsRegister<Register>(),
9156 locations->InAt(0).AsRegister<Register>(),
9157 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009158 __ LoadFromOffset(kLoadWord,
9159 locations->Out().AsRegister<Register>(),
9160 locations->Out().AsRegister<Register>(),
9161 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009162 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009163}
9164
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02009165#undef __
9166#undef QUICK_ENTRY_POINT
9167
9168} // namespace mips
9169} // namespace art