blob: a27cbce3dbf455b33749336dcab568da4ad52b9c [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips64/asm_support_mips64.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070020#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070022#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080023#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
27#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070028#include "intrinsics_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070029#include "mirror/array-inl.h"
30#include "mirror/class-inl.h"
31#include "offsets.h"
32#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070033#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070034#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070035#include "utils/stack_checks.h"
36
37namespace art {
38namespace mips64 {
39
40static constexpr int kCurrentMethodStackOffset = 0;
41static constexpr GpuRegister kMethodRegisterArgument = A0;
42
Alexey Frunze4147fcc2017-06-17 19:57:27 -070043// Flags controlling the use of thunks for Baker read barriers.
44constexpr bool kBakerReadBarrierThunksEnableForFields = true;
45constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
46constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
47
Alexey Frunze4dda3372015-06-01 18:31:49 -070048Location Mips64ReturnLocation(Primitive::Type return_type) {
49 switch (return_type) {
50 case Primitive::kPrimBoolean:
51 case Primitive::kPrimByte:
52 case Primitive::kPrimChar:
53 case Primitive::kPrimShort:
54 case Primitive::kPrimInt:
55 case Primitive::kPrimNot:
56 case Primitive::kPrimLong:
57 return Location::RegisterLocation(V0);
58
59 case Primitive::kPrimFloat:
60 case Primitive::kPrimDouble:
61 return Location::FpuRegisterLocation(F0);
62
63 case Primitive::kPrimVoid:
64 return Location();
65 }
66 UNREACHABLE();
67}
68
69Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(Primitive::Type type) const {
70 return Mips64ReturnLocation(type);
71}
72
73Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
74 return Location::RegisterLocation(kMethodRegisterArgument);
75}
76
77Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(Primitive::Type type) {
78 Location next_location;
79 if (type == Primitive::kPrimVoid) {
80 LOG(FATAL) << "Unexpected parameter type " << type;
81 }
82
83 if (Primitive::IsFloatingPointType(type) &&
84 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
85 next_location = Location::FpuRegisterLocation(
86 calling_convention.GetFpuRegisterAt(float_index_++));
87 gp_index_++;
88 } else if (!Primitive::IsFloatingPointType(type) &&
89 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
90 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
91 float_index_++;
92 } else {
93 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
94 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
95 : Location::StackSlot(stack_offset);
96 }
97
98 // Space on the stack is reserved for all arguments.
99 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
100
Alexey Frunze4dda3372015-06-01 18:31:49 -0700101 return next_location;
102}
103
104Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
105 return Mips64ReturnLocation(type);
106}
107
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100108// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
109#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700110#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700111
112class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
113 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000114 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700115
116 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100117 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700118 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
119 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000120 if (instruction_->CanThrowIntoCatchBlock()) {
121 // Live registers will be restored in the catch block if caught.
122 SaveLiveRegisters(codegen, instruction_->GetLocations());
123 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700124 // We're moving two locations to locations that could overlap, so we need a parallel
125 // move resolver.
126 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100127 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700128 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
129 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100130 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700131 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
132 Primitive::kPrimInt);
Serban Constantinescufc734082016-07-19 17:18:07 +0100133 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
134 ? kQuickThrowStringBounds
135 : kQuickThrowArrayBounds;
136 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100137 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700138 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
139 }
140
Alexandre Rames8158f282015-08-07 10:26:17 +0100141 bool IsFatal() const OVERRIDE { return true; }
142
Roland Levillain46648892015-06-19 16:07:18 +0100143 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
144
Alexey Frunze4dda3372015-06-01 18:31:49 -0700145 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700146 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
147};
148
149class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
150 public:
Alexey Frunzec61c0762017-04-10 13:54:23 -0700151 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction)
152 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700153
154 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
155 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
156 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100157 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700158 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
159 }
160
Alexandre Rames8158f282015-08-07 10:26:17 +0100161 bool IsFatal() const OVERRIDE { return true; }
162
Roland Levillain46648892015-06-19 16:07:18 +0100163 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
164
Alexey Frunze4dda3372015-06-01 18:31:49 -0700165 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700166 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
167};
168
169class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
170 public:
171 LoadClassSlowPathMIPS64(HLoadClass* cls,
172 HInstruction* at,
173 uint32_t dex_pc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700174 bool do_clinit,
175 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high = nullptr)
176 : SlowPathCodeMIPS64(at),
177 cls_(cls),
178 dex_pc_(dex_pc),
179 do_clinit_(do_clinit),
180 bss_info_high_(bss_info_high) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700181 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
182 }
183
184 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000185 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700186 Location out = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700187 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700188 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
189 InvokeRuntimeCallingConvention calling_convention;
190 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
191 const bool is_load_class_bss_entry =
192 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700193 __ Bind(GetEntryLabel());
194 SaveLiveRegisters(codegen, locations);
195
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700196 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
197 GpuRegister entry_address = kNoGpuRegister;
198 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
199 GpuRegister temp = locations->GetTemp(0).AsRegister<GpuRegister>();
200 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
201 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
202 // kSaveEverything call.
203 entry_address = temp_is_a0 ? out.AsRegister<GpuRegister>() : temp;
204 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
205 if (temp_is_a0) {
206 __ Move(entry_address, temp);
207 }
208 }
209
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000210 dex::TypeIndex type_index = cls_->GetTypeIndex();
211 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100212 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
213 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000214 mips64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700215 if (do_clinit_) {
216 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
217 } else {
218 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
219 }
220
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700221 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
222 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
223 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
224 DCHECK(bss_info_high_);
225 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
226 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, bss_info_high_);
227 __ Bind(&info_low->label);
228 __ StoreToOffset(kStoreWord,
229 calling_convention.GetRegisterAt(0),
230 entry_address,
231 /* placeholder */ 0x5678);
232 }
233
Alexey Frunze4dda3372015-06-01 18:31:49 -0700234 // Move the class to the desired location.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700235 if (out.IsValid()) {
236 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000237 Primitive::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700238 mips64_codegen->MoveLocation(out,
239 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
240 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700241 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700242 RestoreLiveRegisters(codegen, locations);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700243
244 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
245 if (is_load_class_bss_entry && !baker_or_no_read_barriers) {
246 // For non-Baker read barriers we need to re-calculate the address of
247 // the class entry.
248 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000249 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700250 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
251 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, info_high);
252 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, info_low);
253 __ StoreToOffset(kStoreWord, out.AsRegister<GpuRegister>(), TMP, /* placeholder */ 0x5678);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000254 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700255 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700256 }
257
Roland Levillain46648892015-06-19 16:07:18 +0100258 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
259
Alexey Frunze4dda3372015-06-01 18:31:49 -0700260 private:
261 // The class this slow path will load.
262 HLoadClass* const cls_;
263
Alexey Frunze4dda3372015-06-01 18:31:49 -0700264 // The dex PC of `at_`.
265 const uint32_t dex_pc_;
266
267 // Whether to initialize the class.
268 const bool do_clinit_;
269
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700270 // Pointer to the high half PC-relative patch info for HLoadClass/kBssEntry.
271 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high_;
272
Alexey Frunze4dda3372015-06-01 18:31:49 -0700273 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
274};
275
276class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
277 public:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700278 explicit LoadStringSlowPathMIPS64(HLoadString* instruction,
279 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high)
280 : SlowPathCodeMIPS64(instruction), bss_info_high_(bss_info_high) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700281
282 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700283 DCHECK(instruction_->IsLoadString());
284 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700285 LocationSummary* locations = instruction_->GetLocations();
286 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700287 HLoadString* load = instruction_->AsLoadString();
288 const dex::StringIndex string_index = load->GetStringIndex();
289 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700290 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700291 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
292 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700293 __ Bind(GetEntryLabel());
294 SaveLiveRegisters(codegen, locations);
295
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700296 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
297 GpuRegister entry_address = kNoGpuRegister;
298 if (baker_or_no_read_barriers) {
299 GpuRegister temp = locations->GetTemp(0).AsRegister<GpuRegister>();
300 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
301 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
302 // kSaveEverything call.
303 entry_address = temp_is_a0 ? out : temp;
304 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
305 if (temp_is_a0) {
306 __ Move(entry_address, temp);
307 }
308 }
309
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000310 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100311 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700312 instruction_,
313 instruction_->GetDexPc(),
314 this);
315 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700316
317 // Store the resolved string to the BSS entry.
318 if (baker_or_no_read_barriers) {
319 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
320 DCHECK(bss_info_high_);
321 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100322 mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(),
323 string_index,
324 bss_info_high_);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700325 __ Bind(&info_low->label);
326 __ StoreToOffset(kStoreWord,
327 calling_convention.GetRegisterAt(0),
328 entry_address,
329 /* placeholder */ 0x5678);
330 }
331
Alexey Frunze4dda3372015-06-01 18:31:49 -0700332 Primitive::Type type = instruction_->GetType();
333 mips64_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700334 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700335 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700336 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800337
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700338 // Store the resolved string to the BSS entry.
339 if (!baker_or_no_read_barriers) {
340 // For non-Baker read barriers we need to re-calculate the address of
341 // the string entry.
342 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100343 mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700344 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100345 mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700346 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, info_low);
347 __ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
348 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700349 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700350 }
351
Roland Levillain46648892015-06-19 16:07:18 +0100352 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
353
Alexey Frunze4dda3372015-06-01 18:31:49 -0700354 private:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700355 // Pointer to the high half PC-relative patch info.
356 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high_;
357
Alexey Frunze4dda3372015-06-01 18:31:49 -0700358 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
359};
360
361class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
362 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000363 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700364
365 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
366 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
367 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000368 if (instruction_->CanThrowIntoCatchBlock()) {
369 // Live registers will be restored in the catch block if caught.
370 SaveLiveRegisters(codegen, instruction_->GetLocations());
371 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100372 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700373 instruction_,
374 instruction_->GetDexPc(),
375 this);
376 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
377 }
378
Alexandre Rames8158f282015-08-07 10:26:17 +0100379 bool IsFatal() const OVERRIDE { return true; }
380
Roland Levillain46648892015-06-19 16:07:18 +0100381 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
382
Alexey Frunze4dda3372015-06-01 18:31:49 -0700383 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700384 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
385};
386
387class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
388 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100389 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000390 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700391
392 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200393 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700394 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
395 __ Bind(GetEntryLabel());
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200396 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufc734082016-07-19 17:18:07 +0100397 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700398 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200399 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700400 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700401 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700402 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700403 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700404 }
405 }
406
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700407 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700408 DCHECK(successor_ == nullptr);
409 return &return_label_;
410 }
411
Roland Levillain46648892015-06-19 16:07:18 +0100412 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
413
Alexey Frunze4dda3372015-06-01 18:31:49 -0700414 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700415 // If not null, the block to branch to after the suspend check.
416 HBasicBlock* const successor_;
417
418 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700419 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700420
421 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
422};
423
424class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
425 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800426 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction, bool is_fatal)
427 : SlowPathCodeMIPS64(instruction), is_fatal_(is_fatal) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700428
429 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
430 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800431
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100432 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700433 DCHECK(instruction_->IsCheckCast()
434 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
435 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
436
437 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800438 if (!is_fatal_) {
439 SaveLiveRegisters(codegen, locations);
440 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700441
442 // We're moving two locations to locations that could overlap, so we need a parallel
443 // move resolver.
444 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800445 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700446 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
447 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800448 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700449 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
450 Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700451 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100452 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800453 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700454 Primitive::Type ret_type = instruction_->GetType();
455 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
456 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700457 } else {
458 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800459 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
460 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700461 }
462
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800463 if (!is_fatal_) {
464 RestoreLiveRegisters(codegen, locations);
465 __ Bc(GetExitLabel());
466 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700467 }
468
Roland Levillain46648892015-06-19 16:07:18 +0100469 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
470
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800471 bool IsFatal() const OVERRIDE { return is_fatal_; }
472
Alexey Frunze4dda3372015-06-01 18:31:49 -0700473 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800474 const bool is_fatal_;
475
Alexey Frunze4dda3372015-06-01 18:31:49 -0700476 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
477};
478
479class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
480 public:
Aart Bik42249c32016-01-07 15:33:50 -0800481 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000482 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700483
484 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800485 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700486 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100487 LocationSummary* locations = instruction_->GetLocations();
488 SaveLiveRegisters(codegen, locations);
489 InvokeRuntimeCallingConvention calling_convention;
490 __ LoadConst32(calling_convention.GetRegisterAt(0),
491 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufc734082016-07-19 17:18:07 +0100492 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100493 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700494 }
495
Roland Levillain46648892015-06-19 16:07:18 +0100496 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
497
Alexey Frunze4dda3372015-06-01 18:31:49 -0700498 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700499 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
500};
501
Alexey Frunze15958152017-02-09 19:08:30 -0800502class ArraySetSlowPathMIPS64 : public SlowPathCodeMIPS64 {
503 public:
504 explicit ArraySetSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
505
506 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
507 LocationSummary* locations = instruction_->GetLocations();
508 __ Bind(GetEntryLabel());
509 SaveLiveRegisters(codegen, locations);
510
511 InvokeRuntimeCallingConvention calling_convention;
512 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
513 parallel_move.AddMove(
514 locations->InAt(0),
515 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
516 Primitive::kPrimNot,
517 nullptr);
518 parallel_move.AddMove(
519 locations->InAt(1),
520 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
521 Primitive::kPrimInt,
522 nullptr);
523 parallel_move.AddMove(
524 locations->InAt(2),
525 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
526 Primitive::kPrimNot,
527 nullptr);
528 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
529
530 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
531 mips64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
532 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
533 RestoreLiveRegisters(codegen, locations);
534 __ Bc(GetExitLabel());
535 }
536
537 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS64"; }
538
539 private:
540 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS64);
541};
542
543// Slow path marking an object reference `ref` during a read
544// barrier. The field `obj.field` in the object `obj` holding this
545// reference does not get updated by this slow path after marking (see
546// ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 below for that).
547//
548// This means that after the execution of this slow path, `ref` will
549// always be up-to-date, but `obj.field` may not; i.e., after the
550// flip, `ref` will be a to-space reference, but `obj.field` will
551// probably still be a from-space reference (unless it gets updated by
552// another thread, or if another thread installed another object
553// reference (different from `ref`) in `obj.field`).
554//
555// If `entrypoint` is a valid location it is assumed to already be
556// holding the entrypoint. The case where the entrypoint is passed in
557// is for the GcRoot read barrier.
558class ReadBarrierMarkSlowPathMIPS64 : public SlowPathCodeMIPS64 {
559 public:
560 ReadBarrierMarkSlowPathMIPS64(HInstruction* instruction,
561 Location ref,
562 Location entrypoint = Location::NoLocation())
563 : SlowPathCodeMIPS64(instruction), ref_(ref), entrypoint_(entrypoint) {
564 DCHECK(kEmitCompilerReadBarrier);
565 }
566
567 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
568
569 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
570 LocationSummary* locations = instruction_->GetLocations();
571 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
572 DCHECK(locations->CanCall());
573 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
574 DCHECK(instruction_->IsInstanceFieldGet() ||
575 instruction_->IsStaticFieldGet() ||
576 instruction_->IsArrayGet() ||
577 instruction_->IsArraySet() ||
578 instruction_->IsLoadClass() ||
579 instruction_->IsLoadString() ||
580 instruction_->IsInstanceOf() ||
581 instruction_->IsCheckCast() ||
582 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
583 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
584 << "Unexpected instruction in read barrier marking slow path: "
585 << instruction_->DebugName();
586
587 __ Bind(GetEntryLabel());
588 // No need to save live registers; it's taken care of by the
589 // entrypoint. Also, there is no need to update the stack mask,
590 // as this runtime call will not trigger a garbage collection.
591 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
592 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
593 (S2 <= ref_reg && ref_reg <= S7) ||
594 (ref_reg == S8)) << ref_reg;
595 // "Compact" slow path, saving two moves.
596 //
597 // Instead of using the standard runtime calling convention (input
598 // and output in A0 and V0 respectively):
599 //
600 // A0 <- ref
601 // V0 <- ReadBarrierMark(A0)
602 // ref <- V0
603 //
604 // we just use rX (the register containing `ref`) as input and output
605 // of a dedicated entrypoint:
606 //
607 // rX <- ReadBarrierMarkRegX(rX)
608 //
609 if (entrypoint_.IsValid()) {
610 mips64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
611 DCHECK_EQ(entrypoint_.AsRegister<GpuRegister>(), T9);
612 __ Jalr(entrypoint_.AsRegister<GpuRegister>());
613 __ Nop();
614 } else {
615 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100616 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800617 // This runtime call does not require a stack map.
618 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
619 instruction_,
620 this);
621 }
622 __ Bc(GetExitLabel());
623 }
624
625 private:
626 // The location (register) of the marked object reference.
627 const Location ref_;
628
629 // The location of the entrypoint if already loaded.
630 const Location entrypoint_;
631
632 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS64);
633};
634
635// Slow path marking an object reference `ref` during a read barrier,
636// and if needed, atomically updating the field `obj.field` in the
637// object `obj` holding this reference after marking (contrary to
638// ReadBarrierMarkSlowPathMIPS64 above, which never tries to update
639// `obj.field`).
640//
641// This means that after the execution of this slow path, both `ref`
642// and `obj.field` will be up-to-date; i.e., after the flip, both will
643// hold the same to-space reference (unless another thread installed
644// another object reference (different from `ref`) in `obj.field`).
645class ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 : public SlowPathCodeMIPS64 {
646 public:
647 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(HInstruction* instruction,
648 Location ref,
649 GpuRegister obj,
650 Location field_offset,
651 GpuRegister temp1)
652 : SlowPathCodeMIPS64(instruction),
653 ref_(ref),
654 obj_(obj),
655 field_offset_(field_offset),
656 temp1_(temp1) {
657 DCHECK(kEmitCompilerReadBarrier);
658 }
659
660 const char* GetDescription() const OVERRIDE {
661 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS64";
662 }
663
664 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
665 LocationSummary* locations = instruction_->GetLocations();
666 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
667 DCHECK(locations->CanCall());
668 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
669 // This slow path is only used by the UnsafeCASObject intrinsic.
670 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
671 << "Unexpected instruction in read barrier marking and field updating slow path: "
672 << instruction_->DebugName();
673 DCHECK(instruction_->GetLocations()->Intrinsified());
674 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
675 DCHECK(field_offset_.IsRegister()) << field_offset_;
676
677 __ Bind(GetEntryLabel());
678
679 // Save the old reference.
680 // Note that we cannot use AT or TMP to save the old reference, as those
681 // are used by the code that follows, but we need the old reference after
682 // the call to the ReadBarrierMarkRegX entry point.
683 DCHECK_NE(temp1_, AT);
684 DCHECK_NE(temp1_, TMP);
685 __ Move(temp1_, ref_reg);
686
687 // No need to save live registers; it's taken care of by the
688 // entrypoint. Also, there is no need to update the stack mask,
689 // as this runtime call will not trigger a garbage collection.
690 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
691 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
692 (S2 <= ref_reg && ref_reg <= S7) ||
693 (ref_reg == S8)) << ref_reg;
694 // "Compact" slow path, saving two moves.
695 //
696 // Instead of using the standard runtime calling convention (input
697 // and output in A0 and V0 respectively):
698 //
699 // A0 <- ref
700 // V0 <- ReadBarrierMark(A0)
701 // ref <- V0
702 //
703 // we just use rX (the register containing `ref`) as input and output
704 // of a dedicated entrypoint:
705 //
706 // rX <- ReadBarrierMarkRegX(rX)
707 //
708 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100709 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800710 // This runtime call does not require a stack map.
711 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
712 instruction_,
713 this);
714
715 // If the new reference is different from the old reference,
716 // update the field in the holder (`*(obj_ + field_offset_)`).
717 //
718 // Note that this field could also hold a different object, if
719 // another thread had concurrently changed it. In that case, the
720 // the compare-and-set (CAS) loop below would abort, leaving the
721 // field as-is.
722 Mips64Label done;
723 __ Beqc(temp1_, ref_reg, &done);
724
725 // Update the the holder's field atomically. This may fail if
726 // mutator updates before us, but it's OK. This is achieved
727 // using a strong compare-and-set (CAS) operation with relaxed
728 // memory synchronization ordering, where the expected value is
729 // the old reference and the desired value is the new reference.
730
731 // Convenience aliases.
732 GpuRegister base = obj_;
733 GpuRegister offset = field_offset_.AsRegister<GpuRegister>();
734 GpuRegister expected = temp1_;
735 GpuRegister value = ref_reg;
736 GpuRegister tmp_ptr = TMP; // Pointer to actual memory.
737 GpuRegister tmp = AT; // Value in memory.
738
739 __ Daddu(tmp_ptr, base, offset);
740
741 if (kPoisonHeapReferences) {
742 __ PoisonHeapReference(expected);
743 // Do not poison `value` if it is the same register as
744 // `expected`, which has just been poisoned.
745 if (value != expected) {
746 __ PoisonHeapReference(value);
747 }
748 }
749
750 // do {
751 // tmp = [r_ptr] - expected;
752 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
753
754 Mips64Label loop_head, exit_loop;
755 __ Bind(&loop_head);
756 __ Ll(tmp, tmp_ptr);
757 // The LL instruction sign-extends the 32-bit value, but
758 // 32-bit references must be zero-extended. Zero-extend `tmp`.
759 __ Dext(tmp, tmp, 0, 32);
760 __ Bnec(tmp, expected, &exit_loop);
761 __ Move(tmp, value);
762 __ Sc(tmp, tmp_ptr);
763 __ Beqzc(tmp, &loop_head);
764 __ Bind(&exit_loop);
765
766 if (kPoisonHeapReferences) {
767 __ UnpoisonHeapReference(expected);
768 // Do not unpoison `value` if it is the same register as
769 // `expected`, which has just been unpoisoned.
770 if (value != expected) {
771 __ UnpoisonHeapReference(value);
772 }
773 }
774
775 __ Bind(&done);
776 __ Bc(GetExitLabel());
777 }
778
779 private:
780 // The location (register) of the marked object reference.
781 const Location ref_;
782 // The register containing the object holding the marked object reference field.
783 const GpuRegister obj_;
784 // The location of the offset of the marked reference field within `obj_`.
785 Location field_offset_;
786
787 const GpuRegister temp1_;
788
789 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS64);
790};
791
792// Slow path generating a read barrier for a heap reference.
793class ReadBarrierForHeapReferenceSlowPathMIPS64 : public SlowPathCodeMIPS64 {
794 public:
795 ReadBarrierForHeapReferenceSlowPathMIPS64(HInstruction* instruction,
796 Location out,
797 Location ref,
798 Location obj,
799 uint32_t offset,
800 Location index)
801 : SlowPathCodeMIPS64(instruction),
802 out_(out),
803 ref_(ref),
804 obj_(obj),
805 offset_(offset),
806 index_(index) {
807 DCHECK(kEmitCompilerReadBarrier);
808 // If `obj` is equal to `out` or `ref`, it means the initial object
809 // has been overwritten by (or after) the heap object reference load
810 // to be instrumented, e.g.:
811 //
812 // __ LoadFromOffset(kLoadWord, out, out, offset);
813 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
814 //
815 // In that case, we have lost the information about the original
816 // object, and the emitted read barrier cannot work properly.
817 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
818 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
819 }
820
821 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
822 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
823 LocationSummary* locations = instruction_->GetLocations();
824 Primitive::Type type = Primitive::kPrimNot;
825 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
826 DCHECK(locations->CanCall());
827 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
828 DCHECK(instruction_->IsInstanceFieldGet() ||
829 instruction_->IsStaticFieldGet() ||
830 instruction_->IsArrayGet() ||
831 instruction_->IsInstanceOf() ||
832 instruction_->IsCheckCast() ||
833 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
834 << "Unexpected instruction in read barrier for heap reference slow path: "
835 << instruction_->DebugName();
836
837 __ Bind(GetEntryLabel());
838 SaveLiveRegisters(codegen, locations);
839
840 // We may have to change the index's value, but as `index_` is a
841 // constant member (like other "inputs" of this slow path),
842 // introduce a copy of it, `index`.
843 Location index = index_;
844 if (index_.IsValid()) {
845 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
846 if (instruction_->IsArrayGet()) {
847 // Compute the actual memory offset and store it in `index`.
848 GpuRegister index_reg = index_.AsRegister<GpuRegister>();
849 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
850 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
851 // We are about to change the value of `index_reg` (see the
852 // calls to art::mips64::Mips64Assembler::Sll and
853 // art::mips64::MipsAssembler::Addiu32 below), but it has
854 // not been saved by the previous call to
855 // art::SlowPathCode::SaveLiveRegisters, as it is a
856 // callee-save register --
857 // art::SlowPathCode::SaveLiveRegisters does not consider
858 // callee-save registers, as it has been designed with the
859 // assumption that callee-save registers are supposed to be
860 // handled by the called function. So, as a callee-save
861 // register, `index_reg` _would_ eventually be saved onto
862 // the stack, but it would be too late: we would have
863 // changed its value earlier. Therefore, we manually save
864 // it here into another freely available register,
865 // `free_reg`, chosen of course among the caller-save
866 // registers (as a callee-save `free_reg` register would
867 // exhibit the same problem).
868 //
869 // Note we could have requested a temporary register from
870 // the register allocator instead; but we prefer not to, as
871 // this is a slow path, and we know we can find a
872 // caller-save register that is available.
873 GpuRegister free_reg = FindAvailableCallerSaveRegister(codegen);
874 __ Move(free_reg, index_reg);
875 index_reg = free_reg;
876 index = Location::RegisterLocation(index_reg);
877 } else {
878 // The initial register stored in `index_` has already been
879 // saved in the call to art::SlowPathCode::SaveLiveRegisters
880 // (as it is not a callee-save register), so we can freely
881 // use it.
882 }
883 // Shifting the index value contained in `index_reg` by the scale
884 // factor (2) cannot overflow in practice, as the runtime is
885 // unable to allocate object arrays with a size larger than
886 // 2^26 - 1 (that is, 2^28 - 4 bytes).
887 __ Sll(index_reg, index_reg, TIMES_4);
888 static_assert(
889 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
890 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
891 __ Addiu32(index_reg, index_reg, offset_);
892 } else {
893 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
894 // intrinsics, `index_` is not shifted by a scale factor of 2
895 // (as in the case of ArrayGet), as it is actually an offset
896 // to an object field within an object.
897 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
898 DCHECK(instruction_->GetLocations()->Intrinsified());
899 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
900 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
901 << instruction_->AsInvoke()->GetIntrinsic();
902 DCHECK_EQ(offset_, 0U);
903 DCHECK(index_.IsRegister());
904 }
905 }
906
907 // We're moving two or three locations to locations that could
908 // overlap, so we need a parallel move resolver.
909 InvokeRuntimeCallingConvention calling_convention;
910 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
911 parallel_move.AddMove(ref_,
912 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
913 Primitive::kPrimNot,
914 nullptr);
915 parallel_move.AddMove(obj_,
916 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
917 Primitive::kPrimNot,
918 nullptr);
919 if (index.IsValid()) {
920 parallel_move.AddMove(index,
921 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
922 Primitive::kPrimInt,
923 nullptr);
924 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
925 } else {
926 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
927 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
928 }
929 mips64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
930 instruction_,
931 instruction_->GetDexPc(),
932 this);
933 CheckEntrypointTypes<
934 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
935 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
936
937 RestoreLiveRegisters(codegen, locations);
938 __ Bc(GetExitLabel());
939 }
940
941 const char* GetDescription() const OVERRIDE {
942 return "ReadBarrierForHeapReferenceSlowPathMIPS64";
943 }
944
945 private:
946 GpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
947 size_t ref = static_cast<int>(ref_.AsRegister<GpuRegister>());
948 size_t obj = static_cast<int>(obj_.AsRegister<GpuRegister>());
949 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
950 if (i != ref &&
951 i != obj &&
952 !codegen->IsCoreCalleeSaveRegister(i) &&
953 !codegen->IsBlockedCoreRegister(i)) {
954 return static_cast<GpuRegister>(i);
955 }
956 }
957 // We shall never fail to find a free caller-save register, as
958 // there are more than two core caller-save registers on MIPS64
959 // (meaning it is possible to find one which is different from
960 // `ref` and `obj`).
961 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
962 LOG(FATAL) << "Could not find a free caller-save register";
963 UNREACHABLE();
964 }
965
966 const Location out_;
967 const Location ref_;
968 const Location obj_;
969 const uint32_t offset_;
970 // An additional location containing an index to an array.
971 // Only used for HArrayGet and the UnsafeGetObject &
972 // UnsafeGetObjectVolatile intrinsics.
973 const Location index_;
974
975 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS64);
976};
977
978// Slow path generating a read barrier for a GC root.
979class ReadBarrierForRootSlowPathMIPS64 : public SlowPathCodeMIPS64 {
980 public:
981 ReadBarrierForRootSlowPathMIPS64(HInstruction* instruction, Location out, Location root)
982 : SlowPathCodeMIPS64(instruction), out_(out), root_(root) {
983 DCHECK(kEmitCompilerReadBarrier);
984 }
985
986 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
987 LocationSummary* locations = instruction_->GetLocations();
988 Primitive::Type type = Primitive::kPrimNot;
989 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
990 DCHECK(locations->CanCall());
991 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
992 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
993 << "Unexpected instruction in read barrier for GC root slow path: "
994 << instruction_->DebugName();
995
996 __ Bind(GetEntryLabel());
997 SaveLiveRegisters(codegen, locations);
998
999 InvokeRuntimeCallingConvention calling_convention;
1000 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
1001 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
1002 root_,
1003 Primitive::kPrimNot);
1004 mips64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
1005 instruction_,
1006 instruction_->GetDexPc(),
1007 this);
1008 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1009 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1010
1011 RestoreLiveRegisters(codegen, locations);
1012 __ Bc(GetExitLabel());
1013 }
1014
1015 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS64"; }
1016
1017 private:
1018 const Location out_;
1019 const Location root_;
1020
1021 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS64);
1022};
1023
Alexey Frunze4dda3372015-06-01 18:31:49 -07001024CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
1025 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001026 const CompilerOptions& compiler_options,
1027 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -07001028 : CodeGenerator(graph,
1029 kNumberOfGpuRegisters,
1030 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001031 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001032 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1033 arraysize(kCoreCalleeSaves)),
1034 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1035 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001036 compiler_options,
1037 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001038 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001039 location_builder_(graph, this),
1040 instruction_visitor_(graph, this),
1041 move_resolver_(graph->GetArena(), this),
Goran Jakovljevic19680d32017-05-11 10:38:36 +02001042 assembler_(graph->GetArena(), &isa_features),
Alexey Frunze19f6c692016-11-30 19:19:55 -08001043 isa_features_(isa_features),
Alexey Frunzef63f5692016-12-13 17:43:11 -08001044 uint32_literals_(std::less<uint32_t>(),
1045 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -08001046 uint64_literals_(std::less<uint64_t>(),
1047 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001048 pc_relative_method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001049 method_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunzef63f5692016-12-13 17:43:11 -08001050 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001051 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001052 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001053 string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001054 jit_string_patches_(StringReferenceValueComparator(),
1055 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1056 jit_class_patches_(TypeReferenceValueComparator(),
1057 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001058 // Save RA (containing the return address) to mimic Quick.
1059 AddAllocatedRegister(Location::RegisterLocation(RA));
1060}
1061
1062#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001063// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1064#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001065#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -07001066
1067void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001068 // Ensure that we fix up branches.
1069 __ FinalizeCode();
1070
1071 // Adjust native pc offsets in stack maps.
1072 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001073 uint32_t old_position =
1074 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips64);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001075 uint32_t new_position = __ GetAdjustedPosition(old_position);
1076 DCHECK_GE(new_position, old_position);
1077 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
1078 }
1079
1080 // Adjust pc offsets for the disassembly information.
1081 if (disasm_info_ != nullptr) {
1082 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1083 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1084 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1085 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1086 it.second.start = __ GetAdjustedPosition(it.second.start);
1087 it.second.end = __ GetAdjustedPosition(it.second.end);
1088 }
1089 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1090 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1091 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1092 }
1093 }
1094
Alexey Frunze4dda3372015-06-01 18:31:49 -07001095 CodeGenerator::Finalize(allocator);
1096}
1097
1098Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
1099 return codegen_->GetAssembler();
1100}
1101
1102void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001103 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001104 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1105}
1106
1107void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001108 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001109 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
1110}
1111
1112void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
1113 // Pop reg
1114 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +02001115 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001116}
1117
1118void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
1119 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +02001120 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001121 __ Sd(GpuRegister(reg), SP, 0);
1122}
1123
1124void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
1125 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
1126 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
1127 // Allocate a scratch register other than TMP, if available.
1128 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1129 // automatically unspilled when the scratch scope object is destroyed).
1130 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1131 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +02001132 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001133 __ LoadFromOffset(load_type,
1134 GpuRegister(ensure_scratch.GetRegister()),
1135 SP,
1136 index1 + stack_offset);
1137 __ LoadFromOffset(load_type,
1138 TMP,
1139 SP,
1140 index2 + stack_offset);
1141 __ StoreToOffset(store_type,
1142 GpuRegister(ensure_scratch.GetRegister()),
1143 SP,
1144 index2 + stack_offset);
1145 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
1146}
1147
1148static dwarf::Reg DWARFReg(GpuRegister reg) {
1149 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1150}
1151
David Srbeckyba702002016-02-01 18:15:29 +00001152static dwarf::Reg DWARFReg(FpuRegister reg) {
1153 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
1154}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001155
1156void CodeGeneratorMIPS64::GenerateFrameEntry() {
1157 __ Bind(&frame_entry_label_);
1158
1159 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips64) || !IsLeafMethod();
1160
1161 if (do_overflow_check) {
1162 __ LoadFromOffset(kLoadWord,
1163 ZERO,
1164 SP,
1165 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips64)));
1166 RecordPcInfo(nullptr, 0);
1167 }
1168
Alexey Frunze4dda3372015-06-01 18:31:49 -07001169 if (HasEmptyFrame()) {
1170 return;
1171 }
1172
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001173 // Make sure the frame size isn't unreasonably large.
1174 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips64)) {
1175 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips64) << " bytes";
1176 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001177
1178 // Spill callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001179
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001180 uint32_t ofs = GetFrameSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001181 __ IncreaseFrameSize(ofs);
1182
1183 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1184 GpuRegister reg = kCoreCalleeSaves[i];
1185 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001186 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001187 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001188 __ cfi().RelOffset(DWARFReg(reg), ofs);
1189 }
1190 }
1191
1192 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1193 FpuRegister reg = kFpuCalleeSaves[i];
1194 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001195 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001196 __ StoreFpuToOffset(kStoreDoubleword, reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +00001197 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001198 }
1199 }
1200
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001201 // Save the current method if we need it. Note that we do not
1202 // do this in HCurrentMethod, as the instruction might have been removed
1203 // in the SSA graph.
1204 if (RequiresCurrentMethod()) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001205 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001206 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001207
1208 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1209 // Initialize should_deoptimize flag to 0.
1210 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1211 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001212}
1213
1214void CodeGeneratorMIPS64::GenerateFrameExit() {
1215 __ cfi().RememberState();
1216
Alexey Frunze4dda3372015-06-01 18:31:49 -07001217 if (!HasEmptyFrame()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001218 // Restore callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001219
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001220 // For better instruction scheduling restore RA before other registers.
1221 uint32_t ofs = GetFrameSize();
1222 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001223 GpuRegister reg = kCoreCalleeSaves[i];
1224 if (allocated_registers_.ContainsCoreRegister(reg)) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001225 ofs -= kMips64DoublewordSize;
1226 __ LoadFromOffset(kLoadDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001227 __ cfi().Restore(DWARFReg(reg));
1228 }
1229 }
1230
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001231 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1232 FpuRegister reg = kFpuCalleeSaves[i];
1233 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
1234 ofs -= kMips64DoublewordSize;
1235 __ LoadFpuFromOffset(kLoadDoubleword, reg, SP, ofs);
1236 __ cfi().Restore(DWARFReg(reg));
1237 }
1238 }
1239
1240 __ DecreaseFrameSize(GetFrameSize());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001241 }
1242
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001243 __ Jic(RA, 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001244
1245 __ cfi().RestoreState();
1246 __ cfi().DefCFAOffset(GetFrameSize());
1247}
1248
1249void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
1250 __ Bind(GetLabelOf(block));
1251}
1252
1253void CodeGeneratorMIPS64::MoveLocation(Location destination,
1254 Location source,
Calin Juravlee460d1d2015-09-29 04:52:17 +01001255 Primitive::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001256 if (source.Equals(destination)) {
1257 return;
1258 }
1259
1260 // A valid move can always be inferred from the destination and source
1261 // locations. When moving from and to a register, the argument type can be
1262 // used to generate 32bit instead of 64bit moves.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001263 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001264 DCHECK_EQ(unspecified_type, false);
1265
1266 if (destination.IsRegister() || destination.IsFpuRegister()) {
1267 if (unspecified_type) {
1268 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1269 if (source.IsStackSlot() ||
1270 (src_cst != nullptr && (src_cst->IsIntConstant()
1271 || src_cst->IsFloatConstant()
1272 || src_cst->IsNullConstant()))) {
1273 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001274 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001275 } else {
1276 // If the source is a double stack slot or a 64bit constant, a 64bit
1277 // type is appropriate. Else the source is a register, and since the
1278 // type has not been specified, we chose a 64bit type to force a 64bit
1279 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001280 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001281 }
1282 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001283 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1284 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001285 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1286 // Move to GPR/FPR from stack
1287 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001288 if (Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001289 __ LoadFpuFromOffset(load_type,
1290 destination.AsFpuRegister<FpuRegister>(),
1291 SP,
1292 source.GetStackIndex());
1293 } else {
1294 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
1295 __ LoadFromOffset(load_type,
1296 destination.AsRegister<GpuRegister>(),
1297 SP,
1298 source.GetStackIndex());
1299 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001300 } else if (source.IsSIMDStackSlot()) {
1301 __ LoadFpuFromOffset(kLoadQuadword,
1302 destination.AsFpuRegister<FpuRegister>(),
1303 SP,
1304 source.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001305 } else if (source.IsConstant()) {
1306 // Move to GPR/FPR from constant
1307 GpuRegister gpr = AT;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001308 if (!Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001309 gpr = destination.AsRegister<GpuRegister>();
1310 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001311 if (dst_type == Primitive::kPrimInt || dst_type == Primitive::kPrimFloat) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001312 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
1313 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
1314 gpr = ZERO;
1315 } else {
1316 __ LoadConst32(gpr, value);
1317 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001318 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001319 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
1320 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
1321 gpr = ZERO;
1322 } else {
1323 __ LoadConst64(gpr, value);
1324 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001325 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001326 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001327 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001328 } else if (dst_type == Primitive::kPrimDouble) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001329 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
1330 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001331 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001332 if (destination.IsRegister()) {
1333 // Move to GPR from GPR
1334 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
1335 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001336 DCHECK(destination.IsFpuRegister());
1337 if (Primitive::Is64BitType(dst_type)) {
1338 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1339 } else {
1340 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1341 }
1342 }
1343 } else if (source.IsFpuRegister()) {
1344 if (destination.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001345 if (GetGraph()->HasSIMD()) {
1346 __ MoveV(VectorRegisterFrom(destination),
1347 VectorRegisterFrom(source));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001348 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001349 // Move to FPR from FPR
1350 if (dst_type == Primitive::kPrimFloat) {
1351 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1352 } else {
1353 DCHECK_EQ(dst_type, Primitive::kPrimDouble);
1354 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1355 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001356 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001357 } else {
1358 DCHECK(destination.IsRegister());
1359 if (Primitive::Is64BitType(dst_type)) {
1360 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1361 } else {
1362 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1363 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001364 }
1365 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001366 } else if (destination.IsSIMDStackSlot()) {
1367 if (source.IsFpuRegister()) {
1368 __ StoreFpuToOffset(kStoreQuadword,
1369 source.AsFpuRegister<FpuRegister>(),
1370 SP,
1371 destination.GetStackIndex());
1372 } else {
1373 DCHECK(source.IsSIMDStackSlot());
1374 __ LoadFpuFromOffset(kLoadQuadword,
1375 FTMP,
1376 SP,
1377 source.GetStackIndex());
1378 __ StoreFpuToOffset(kStoreQuadword,
1379 FTMP,
1380 SP,
1381 destination.GetStackIndex());
1382 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001383 } else { // The destination is not a register. It must be a stack slot.
1384 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1385 if (source.IsRegister() || source.IsFpuRegister()) {
1386 if (unspecified_type) {
1387 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001388 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001389 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001390 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001391 }
1392 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001393 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1394 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001395 // Move to stack from GPR/FPR
1396 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1397 if (source.IsRegister()) {
1398 __ StoreToOffset(store_type,
1399 source.AsRegister<GpuRegister>(),
1400 SP,
1401 destination.GetStackIndex());
1402 } else {
1403 __ StoreFpuToOffset(store_type,
1404 source.AsFpuRegister<FpuRegister>(),
1405 SP,
1406 destination.GetStackIndex());
1407 }
1408 } else if (source.IsConstant()) {
1409 // Move to stack from constant
1410 HConstant* src_cst = source.GetConstant();
1411 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001412 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001413 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001414 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
1415 if (value != 0) {
1416 gpr = TMP;
1417 __ LoadConst32(gpr, value);
1418 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001419 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001420 DCHECK(destination.IsDoubleStackSlot());
1421 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
1422 if (value != 0) {
1423 gpr = TMP;
1424 __ LoadConst64(gpr, value);
1425 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001426 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001427 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001428 } else {
1429 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
1430 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
1431 // Move to stack from stack
1432 if (destination.IsStackSlot()) {
1433 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1434 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1435 } else {
1436 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
1437 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
1438 }
1439 }
1440 }
1441}
1442
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001443void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, Primitive::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001444 DCHECK(!loc1.IsConstant());
1445 DCHECK(!loc2.IsConstant());
1446
1447 if (loc1.Equals(loc2)) {
1448 return;
1449 }
1450
1451 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
1452 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
1453 bool is_fp_reg1 = loc1.IsFpuRegister();
1454 bool is_fp_reg2 = loc2.IsFpuRegister();
1455
1456 if (loc2.IsRegister() && loc1.IsRegister()) {
1457 // Swap 2 GPRs
1458 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
1459 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
1460 __ Move(TMP, r2);
1461 __ Move(r2, r1);
1462 __ Move(r1, TMP);
1463 } else if (is_fp_reg2 && is_fp_reg1) {
1464 // Swap 2 FPRs
1465 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
1466 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001467 if (type == Primitive::kPrimFloat) {
1468 __ MovS(FTMP, r1);
1469 __ MovS(r1, r2);
1470 __ MovS(r2, FTMP);
1471 } else {
1472 DCHECK_EQ(type, Primitive::kPrimDouble);
1473 __ MovD(FTMP, r1);
1474 __ MovD(r1, r2);
1475 __ MovD(r2, FTMP);
1476 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001477 } else if (is_slot1 != is_slot2) {
1478 // Swap GPR/FPR and stack slot
1479 Location reg_loc = is_slot1 ? loc2 : loc1;
1480 Location mem_loc = is_slot1 ? loc1 : loc2;
1481 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
1482 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1483 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
1484 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
1485 if (reg_loc.IsFpuRegister()) {
1486 __ StoreFpuToOffset(store_type,
1487 reg_loc.AsFpuRegister<FpuRegister>(),
1488 SP,
1489 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001490 if (mem_loc.IsStackSlot()) {
1491 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1492 } else {
1493 DCHECK(mem_loc.IsDoubleStackSlot());
1494 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1495 }
1496 } else {
1497 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
1498 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
1499 }
1500 } else if (is_slot1 && is_slot2) {
1501 move_resolver_.Exchange(loc1.GetStackIndex(),
1502 loc2.GetStackIndex(),
1503 loc1.IsDoubleStackSlot());
1504 } else {
1505 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
1506 }
1507}
1508
Calin Juravle175dc732015-08-25 15:42:32 +01001509void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
1510 DCHECK(location.IsRegister());
1511 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
1512}
1513
Calin Juravlee460d1d2015-09-29 04:52:17 +01001514void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1515 if (location.IsRegister()) {
1516 locations->AddTemp(location);
1517 } else {
1518 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1519 }
1520}
1521
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001522void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
1523 GpuRegister value,
1524 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001525 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001526 GpuRegister card = AT;
1527 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001528 if (value_can_be_null) {
1529 __ Beqzc(value, &done);
1530 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001531 __ LoadFromOffset(kLoadDoubleword,
1532 card,
1533 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001534 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001535 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
1536 __ Daddu(temp, card, temp);
1537 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001538 if (value_can_be_null) {
1539 __ Bind(&done);
1540 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001541}
1542
Alexey Frunze19f6c692016-11-30 19:19:55 -08001543template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1544inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
1545 const ArenaDeque<PcRelativePatchInfo>& infos,
1546 ArenaVector<LinkerPatch>* linker_patches) {
1547 for (const PcRelativePatchInfo& info : infos) {
1548 const DexFile& dex_file = info.target_dex_file;
1549 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001550 DCHECK(info.label.IsBound());
1551 uint32_t literal_offset = __ GetLabelLocation(&info.label);
1552 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1553 uint32_t pc_rel_offset = __ GetLabelLocation(&info_high.label);
1554 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Alexey Frunze19f6c692016-11-30 19:19:55 -08001555 }
1556}
1557
1558void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1559 DCHECK(linker_patches->empty());
1560 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001561 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001562 method_bss_entry_patches_.size() +
Alexey Frunzef63f5692016-12-13 17:43:11 -08001563 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001564 type_bss_entry_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001565 pc_relative_string_patches_.size() +
1566 string_bss_entry_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -08001567 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001568 if (GetCompilerOptions().IsBootImage()) {
1569 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
Alexey Frunzef63f5692016-12-13 17:43:11 -08001570 linker_patches);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001571 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
1572 linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001573 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
1574 linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001575 } else {
1576 DCHECK(pc_relative_method_patches_.empty());
Vladimir Marko94ec2db2017-09-06 17:21:03 +01001577 EmitPcRelativeLinkerPatches<LinkerPatch::TypeClassTablePatch>(pc_relative_type_patches_,
1578 linker_patches);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001579 EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
1580 linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001581 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001582 EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
1583 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001584 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1585 linker_patches);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001586 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
1587 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001588 DCHECK_EQ(size, linker_patches->size());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001589}
1590
Vladimir Marko65979462017-05-19 17:25:12 +01001591CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001592 MethodReference target_method,
1593 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001594 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001595 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001596 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001597 &pc_relative_method_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001598}
1599
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001600CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001601 MethodReference target_method,
1602 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001603 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001604 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001605 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001606 &method_bss_entry_patches_);
1607}
1608
Alexey Frunzef63f5692016-12-13 17:43:11 -08001609CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001610 const DexFile& dex_file,
1611 dex::TypeIndex type_index,
1612 const PcRelativePatchInfo* info_high) {
1613 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001614}
1615
Vladimir Marko1998cd02017-01-13 13:02:58 +00001616CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001617 const DexFile& dex_file,
1618 dex::TypeIndex type_index,
1619 const PcRelativePatchInfo* info_high) {
1620 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001621}
1622
Vladimir Marko65979462017-05-19 17:25:12 +01001623CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001624 const DexFile& dex_file,
1625 dex::StringIndex string_index,
1626 const PcRelativePatchInfo* info_high) {
1627 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001628}
1629
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001630CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewStringBssEntryPatch(
1631 const DexFile& dex_file,
1632 dex::StringIndex string_index,
1633 const PcRelativePatchInfo* info_high) {
1634 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
1635}
1636
Alexey Frunze19f6c692016-11-30 19:19:55 -08001637CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001638 const DexFile& dex_file,
1639 uint32_t offset_or_index,
1640 const PcRelativePatchInfo* info_high,
1641 ArenaDeque<PcRelativePatchInfo>* patches) {
1642 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001643 return &patches->back();
1644}
1645
Alexey Frunzef63f5692016-12-13 17:43:11 -08001646Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1647 return map->GetOrCreate(
1648 value,
1649 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1650}
1651
Alexey Frunze19f6c692016-11-30 19:19:55 -08001652Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1653 return uint64_literals_.GetOrCreate(
1654 value,
1655 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1656}
1657
Alexey Frunzef63f5692016-12-13 17:43:11 -08001658Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001659 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001660}
1661
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001662void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
1663 GpuRegister out,
1664 PcRelativePatchInfo* info_low) {
1665 DCHECK(!info_high->patch_info_high);
1666 __ Bind(&info_high->label);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001667 // Add the high half of a 32-bit offset to PC.
1668 __ Auipc(out, /* placeholder */ 0x1234);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001669 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001670 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001671 if (info_low != nullptr) {
1672 DCHECK_EQ(info_low->patch_info_high, info_high);
1673 __ Bind(&info_low->label);
1674 }
Alexey Frunze19f6c692016-11-30 19:19:55 -08001675}
1676
Alexey Frunze627c1a02017-01-30 19:28:14 -08001677Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file,
1678 dex::StringIndex string_index,
1679 Handle<mirror::String> handle) {
1680 jit_string_roots_.Overwrite(StringReference(&dex_file, string_index),
1681 reinterpret_cast64<uint64_t>(handle.GetReference()));
1682 return jit_string_patches_.GetOrCreate(
1683 StringReference(&dex_file, string_index),
1684 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1685}
1686
1687Literal* CodeGeneratorMIPS64::DeduplicateJitClassLiteral(const DexFile& dex_file,
1688 dex::TypeIndex type_index,
1689 Handle<mirror::Class> handle) {
1690 jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
1691 reinterpret_cast64<uint64_t>(handle.GetReference()));
1692 return jit_class_patches_.GetOrCreate(
1693 TypeReference(&dex_file, type_index),
1694 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1695}
1696
1697void CodeGeneratorMIPS64::PatchJitRootUse(uint8_t* code,
1698 const uint8_t* roots_data,
1699 const Literal* literal,
1700 uint64_t index_in_table) const {
1701 uint32_t literal_offset = GetAssembler().GetLabelLocation(literal->GetLabel());
1702 uintptr_t address =
1703 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1704 reinterpret_cast<uint32_t*>(code + literal_offset)[0] = dchecked_integral_cast<uint32_t>(address);
1705}
1706
1707void CodeGeneratorMIPS64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1708 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001709 const StringReference& string_reference = entry.first;
1710 Literal* table_entry_literal = entry.second;
1711 const auto it = jit_string_roots_.find(string_reference);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001712 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001713 uint64_t index_in_table = it->second;
1714 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001715 }
1716 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001717 const TypeReference& type_reference = entry.first;
1718 Literal* table_entry_literal = entry.second;
1719 const auto it = jit_class_roots_.find(type_reference);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001720 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001721 uint64_t index_in_table = it->second;
1722 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001723 }
1724}
1725
David Brazdil58282f42016-01-14 12:45:10 +00001726void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001727 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1728 blocked_core_registers_[ZERO] = true;
1729 blocked_core_registers_[K0] = true;
1730 blocked_core_registers_[K1] = true;
1731 blocked_core_registers_[GP] = true;
1732 blocked_core_registers_[SP] = true;
1733 blocked_core_registers_[RA] = true;
1734
Lazar Trsicd9672662015-09-03 17:33:01 +02001735 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1736 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001737 blocked_core_registers_[AT] = true;
1738 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001739 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001740 blocked_fpu_registers_[FTMP] = true;
1741
1742 // Reserve suspend and thread registers.
1743 blocked_core_registers_[S0] = true;
1744 blocked_core_registers_[TR] = true;
1745
1746 // Reserve T9 for function calls
1747 blocked_core_registers_[T9] = true;
1748
Goran Jakovljevic782be112016-06-21 12:39:04 +02001749 if (GetGraph()->IsDebuggable()) {
1750 // Stubs do not save callee-save floating point registers. If the graph
1751 // is debuggable, we need to deal with these registers differently. For
1752 // now, just block them.
1753 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1754 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1755 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001756 }
1757}
1758
Alexey Frunze4dda3372015-06-01 18:31:49 -07001759size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1760 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001761 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001762}
1763
1764size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1765 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001766 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001767}
1768
1769size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001770 __ StoreFpuToOffset(GetGraph()->HasSIMD() ? kStoreQuadword : kStoreDoubleword,
1771 FpuRegister(reg_id),
1772 SP,
1773 stack_index);
1774 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001775}
1776
1777size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001778 __ LoadFpuFromOffset(GetGraph()->HasSIMD() ? kLoadQuadword : kLoadDoubleword,
1779 FpuRegister(reg_id),
1780 SP,
1781 stack_index);
1782 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001783}
1784
1785void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001786 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001787}
1788
1789void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001790 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001791}
1792
Calin Juravle175dc732015-08-25 15:42:32 +01001793void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001794 HInstruction* instruction,
1795 uint32_t dex_pc,
1796 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001797 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001798 GenerateInvokeRuntime(GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Serban Constantinescufc734082016-07-19 17:18:07 +01001799 if (EntrypointRequiresStackMap(entrypoint)) {
1800 RecordPcInfo(instruction, dex_pc, slow_path);
1801 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001802}
1803
Alexey Frunze15958152017-02-09 19:08:30 -08001804void CodeGeneratorMIPS64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1805 HInstruction* instruction,
1806 SlowPathCode* slow_path) {
1807 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1808 GenerateInvokeRuntime(entry_point_offset);
1809}
1810
1811void CodeGeneratorMIPS64::GenerateInvokeRuntime(int32_t entry_point_offset) {
1812 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
1813 __ Jalr(T9);
1814 __ Nop();
1815}
1816
Alexey Frunze4dda3372015-06-01 18:31:49 -07001817void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1818 GpuRegister class_reg) {
1819 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1820 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1821 __ Bltc(TMP, AT, slow_path->GetEntryLabel());
Alexey Frunze15958152017-02-09 19:08:30 -08001822 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1823 __ Sync(0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001824 __ Bind(slow_path->GetExitLabel());
1825}
1826
1827void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1828 __ Sync(0); // only stype 0 is supported
1829}
1830
1831void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1832 HBasicBlock* successor) {
1833 SuspendCheckSlowPathMIPS64* slow_path =
1834 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS64(instruction, successor);
1835 codegen_->AddSlowPath(slow_path);
1836
1837 __ LoadFromOffset(kLoadUnsignedHalfword,
1838 TMP,
1839 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001840 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001841 if (successor == nullptr) {
1842 __ Bnezc(TMP, slow_path->GetEntryLabel());
1843 __ Bind(slow_path->GetReturnLabel());
1844 } else {
1845 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001846 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001847 // slow_path will return to GetLabelOf(successor).
1848 }
1849}
1850
1851InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1852 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001853 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001854 assembler_(codegen->GetAssembler()),
1855 codegen_(codegen) {}
1856
1857void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1858 DCHECK_EQ(instruction->InputCount(), 2U);
1859 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1860 Primitive::Type type = instruction->GetResultType();
1861 switch (type) {
1862 case Primitive::kPrimInt:
1863 case Primitive::kPrimLong: {
1864 locations->SetInAt(0, Location::RequiresRegister());
1865 HInstruction* right = instruction->InputAt(1);
1866 bool can_use_imm = false;
1867 if (right->IsConstant()) {
1868 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1869 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1870 can_use_imm = IsUint<16>(imm);
1871 } else if (instruction->IsAdd()) {
1872 can_use_imm = IsInt<16>(imm);
1873 } else {
1874 DCHECK(instruction->IsSub());
1875 can_use_imm = IsInt<16>(-imm);
1876 }
1877 }
1878 if (can_use_imm)
1879 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1880 else
1881 locations->SetInAt(1, Location::RequiresRegister());
1882 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1883 }
1884 break;
1885
1886 case Primitive::kPrimFloat:
1887 case Primitive::kPrimDouble:
1888 locations->SetInAt(0, Location::RequiresFpuRegister());
1889 locations->SetInAt(1, Location::RequiresFpuRegister());
1890 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1891 break;
1892
1893 default:
1894 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1895 }
1896}
1897
1898void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1899 Primitive::Type type = instruction->GetType();
1900 LocationSummary* locations = instruction->GetLocations();
1901
1902 switch (type) {
1903 case Primitive::kPrimInt:
1904 case Primitive::kPrimLong: {
1905 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1906 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1907 Location rhs_location = locations->InAt(1);
1908
1909 GpuRegister rhs_reg = ZERO;
1910 int64_t rhs_imm = 0;
1911 bool use_imm = rhs_location.IsConstant();
1912 if (use_imm) {
1913 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1914 } else {
1915 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1916 }
1917
1918 if (instruction->IsAnd()) {
1919 if (use_imm)
1920 __ Andi(dst, lhs, rhs_imm);
1921 else
1922 __ And(dst, lhs, rhs_reg);
1923 } else if (instruction->IsOr()) {
1924 if (use_imm)
1925 __ Ori(dst, lhs, rhs_imm);
1926 else
1927 __ Or(dst, lhs, rhs_reg);
1928 } else if (instruction->IsXor()) {
1929 if (use_imm)
1930 __ Xori(dst, lhs, rhs_imm);
1931 else
1932 __ Xor(dst, lhs, rhs_reg);
1933 } else if (instruction->IsAdd()) {
1934 if (type == Primitive::kPrimInt) {
1935 if (use_imm)
1936 __ Addiu(dst, lhs, rhs_imm);
1937 else
1938 __ Addu(dst, lhs, rhs_reg);
1939 } else {
1940 if (use_imm)
1941 __ Daddiu(dst, lhs, rhs_imm);
1942 else
1943 __ Daddu(dst, lhs, rhs_reg);
1944 }
1945 } else {
1946 DCHECK(instruction->IsSub());
1947 if (type == Primitive::kPrimInt) {
1948 if (use_imm)
1949 __ Addiu(dst, lhs, -rhs_imm);
1950 else
1951 __ Subu(dst, lhs, rhs_reg);
1952 } else {
1953 if (use_imm)
1954 __ Daddiu(dst, lhs, -rhs_imm);
1955 else
1956 __ Dsubu(dst, lhs, rhs_reg);
1957 }
1958 }
1959 break;
1960 }
1961 case Primitive::kPrimFloat:
1962 case Primitive::kPrimDouble: {
1963 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
1964 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1965 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1966 if (instruction->IsAdd()) {
1967 if (type == Primitive::kPrimFloat)
1968 __ AddS(dst, lhs, rhs);
1969 else
1970 __ AddD(dst, lhs, rhs);
1971 } else if (instruction->IsSub()) {
1972 if (type == Primitive::kPrimFloat)
1973 __ SubS(dst, lhs, rhs);
1974 else
1975 __ SubD(dst, lhs, rhs);
1976 } else {
1977 LOG(FATAL) << "Unexpected floating-point binary operation";
1978 }
1979 break;
1980 }
1981 default:
1982 LOG(FATAL) << "Unexpected binary operation type " << type;
1983 }
1984}
1985
1986void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001987 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001988
1989 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1990 Primitive::Type type = instr->GetResultType();
1991 switch (type) {
1992 case Primitive::kPrimInt:
1993 case Primitive::kPrimLong: {
1994 locations->SetInAt(0, Location::RequiresRegister());
1995 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001996 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001997 break;
1998 }
1999 default:
2000 LOG(FATAL) << "Unexpected shift type " << type;
2001 }
2002}
2003
2004void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002005 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002006 LocationSummary* locations = instr->GetLocations();
2007 Primitive::Type type = instr->GetType();
2008
2009 switch (type) {
2010 case Primitive::kPrimInt:
2011 case Primitive::kPrimLong: {
2012 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2013 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2014 Location rhs_location = locations->InAt(1);
2015
2016 GpuRegister rhs_reg = ZERO;
2017 int64_t rhs_imm = 0;
2018 bool use_imm = rhs_location.IsConstant();
2019 if (use_imm) {
2020 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2021 } else {
2022 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2023 }
2024
2025 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00002026 uint32_t shift_value = rhs_imm &
2027 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002028
Alexey Frunze92d90602015-12-18 18:16:36 -08002029 if (shift_value == 0) {
2030 if (dst != lhs) {
2031 __ Move(dst, lhs);
2032 }
2033 } else if (type == Primitive::kPrimInt) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002034 if (instr->IsShl()) {
2035 __ Sll(dst, lhs, shift_value);
2036 } else if (instr->IsShr()) {
2037 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002038 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002039 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002040 } else {
2041 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002042 }
2043 } else {
2044 if (shift_value < 32) {
2045 if (instr->IsShl()) {
2046 __ Dsll(dst, lhs, shift_value);
2047 } else if (instr->IsShr()) {
2048 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002049 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002050 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002051 } else {
2052 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002053 }
2054 } else {
2055 shift_value -= 32;
2056 if (instr->IsShl()) {
2057 __ Dsll32(dst, lhs, shift_value);
2058 } else if (instr->IsShr()) {
2059 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002060 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002061 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002062 } else {
2063 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002064 }
2065 }
2066 }
2067 } else {
2068 if (type == Primitive::kPrimInt) {
2069 if (instr->IsShl()) {
2070 __ Sllv(dst, lhs, rhs_reg);
2071 } else if (instr->IsShr()) {
2072 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002073 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002074 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002075 } else {
2076 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002077 }
2078 } else {
2079 if (instr->IsShl()) {
2080 __ Dsllv(dst, lhs, rhs_reg);
2081 } else if (instr->IsShr()) {
2082 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002083 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002084 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002085 } else {
2086 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002087 }
2088 }
2089 }
2090 break;
2091 }
2092 default:
2093 LOG(FATAL) << "Unexpected shift operation type " << type;
2094 }
2095}
2096
2097void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
2098 HandleBinaryOp(instruction);
2099}
2100
2101void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
2102 HandleBinaryOp(instruction);
2103}
2104
2105void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
2106 HandleBinaryOp(instruction);
2107}
2108
2109void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
2110 HandleBinaryOp(instruction);
2111}
2112
2113void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002114 Primitive::Type type = instruction->GetType();
2115 bool object_array_get_with_read_barrier =
2116 kEmitCompilerReadBarrier && (type == Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002117 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08002118 new (GetGraph()->GetArena()) LocationSummary(instruction,
2119 object_array_get_with_read_barrier
2120 ? LocationSummary::kCallOnSlowPath
2121 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002122 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2123 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2124 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002125 locations->SetInAt(0, Location::RequiresRegister());
2126 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexey Frunze15958152017-02-09 19:08:30 -08002127 if (Primitive::IsFloatingPointType(type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002128 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2129 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002130 // The output overlaps in the case of an object array get with
2131 // read barriers enabled: we do not want the move to overwrite the
2132 // array's location, as we need it to emit the read barrier.
2133 locations->SetOut(Location::RequiresRegister(),
2134 object_array_get_with_read_barrier
2135 ? Location::kOutputOverlap
2136 : Location::kNoOutputOverlap);
2137 }
2138 // We need a temporary register for the read barrier marking slow
2139 // path in CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier.
2140 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002141 bool temp_needed = instruction->GetIndex()->IsConstant()
2142 ? !kBakerReadBarrierThunksEnableForFields
2143 : !kBakerReadBarrierThunksEnableForArrays;
2144 if (temp_needed) {
2145 locations->AddTemp(Location::RequiresRegister());
2146 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002147 }
2148}
2149
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002150static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS64* codegen) {
2151 auto null_checker = [codegen, instruction]() {
2152 codegen->MaybeRecordImplicitNullCheck(instruction);
2153 };
2154 return null_checker;
2155}
2156
Alexey Frunze4dda3372015-06-01 18:31:49 -07002157void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
2158 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002159 Location obj_loc = locations->InAt(0);
2160 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
2161 Location out_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002162 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002163 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002164 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002165
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002166 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002167 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2168 instruction->IsStringCharAt();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002169 switch (type) {
2170 case Primitive::kPrimBoolean: {
Alexey Frunze15958152017-02-09 19:08:30 -08002171 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002172 if (index.IsConstant()) {
2173 size_t offset =
2174 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002175 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002176 } else {
2177 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002178 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002179 }
2180 break;
2181 }
2182
2183 case Primitive::kPrimByte: {
Alexey Frunze15958152017-02-09 19:08:30 -08002184 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002185 if (index.IsConstant()) {
2186 size_t offset =
2187 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002188 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002189 } else {
2190 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002191 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002192 }
2193 break;
2194 }
2195
2196 case Primitive::kPrimShort: {
Alexey Frunze15958152017-02-09 19:08:30 -08002197 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002198 if (index.IsConstant()) {
2199 size_t offset =
2200 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002201 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002202 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002203 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002204 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002205 }
2206 break;
2207 }
2208
2209 case Primitive::kPrimChar: {
Alexey Frunze15958152017-02-09 19:08:30 -08002210 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002211 if (maybe_compressed_char_at) {
2212 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002213 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002214 __ Dext(TMP, TMP, 0, 1);
2215 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2216 "Expecting 0=compressed, 1=uncompressed");
2217 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002218 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002219 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2220 if (maybe_compressed_char_at) {
2221 Mips64Label uncompressed_load, done;
2222 __ Bnezc(TMP, &uncompressed_load);
2223 __ LoadFromOffset(kLoadUnsignedByte,
2224 out,
2225 obj,
2226 data_offset + (const_index << TIMES_1));
2227 __ Bc(&done);
2228 __ Bind(&uncompressed_load);
2229 __ LoadFromOffset(kLoadUnsignedHalfword,
2230 out,
2231 obj,
2232 data_offset + (const_index << TIMES_2));
2233 __ Bind(&done);
2234 } else {
2235 __ LoadFromOffset(kLoadUnsignedHalfword,
2236 out,
2237 obj,
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002238 data_offset + (const_index << TIMES_2),
2239 null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002240 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002241 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002242 GpuRegister index_reg = index.AsRegister<GpuRegister>();
2243 if (maybe_compressed_char_at) {
2244 Mips64Label uncompressed_load, done;
2245 __ Bnezc(TMP, &uncompressed_load);
2246 __ Daddu(TMP, obj, index_reg);
2247 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2248 __ Bc(&done);
2249 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002250 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002251 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2252 __ Bind(&done);
2253 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002254 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002255 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002256 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002257 }
2258 break;
2259 }
2260
Alexey Frunze15958152017-02-09 19:08:30 -08002261 case Primitive::kPrimInt: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002262 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002263 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002264 LoadOperandType load_type = (type == Primitive::kPrimNot) ? kLoadUnsignedWord : kLoadWord;
2265 if (index.IsConstant()) {
2266 size_t offset =
2267 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002268 __ LoadFromOffset(load_type, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002269 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002270 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002271 __ LoadFromOffset(load_type, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002272 }
2273 break;
2274 }
2275
Alexey Frunze15958152017-02-09 19:08:30 -08002276 case Primitive::kPrimNot: {
2277 static_assert(
2278 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2279 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2280 // /* HeapReference<Object> */ out =
2281 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2282 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002283 bool temp_needed = index.IsConstant()
2284 ? !kBakerReadBarrierThunksEnableForFields
2285 : !kBakerReadBarrierThunksEnableForArrays;
2286 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002287 // Note that a potential implicit null check is handled in this
2288 // CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002289 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2290 if (index.IsConstant()) {
2291 // Array load with a constant index can be treated as a field load.
2292 size_t offset =
2293 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2294 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2295 out_loc,
2296 obj,
2297 offset,
2298 temp,
2299 /* needs_null_check */ false);
2300 } else {
2301 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2302 out_loc,
2303 obj,
2304 data_offset,
2305 index,
2306 temp,
2307 /* needs_null_check */ false);
2308 }
Alexey Frunze15958152017-02-09 19:08:30 -08002309 } else {
2310 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2311 if (index.IsConstant()) {
2312 size_t offset =
2313 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2314 __ LoadFromOffset(kLoadUnsignedWord, out, obj, offset, null_checker);
2315 // If read barriers are enabled, emit read barriers other than
2316 // Baker's using a slow path (and also unpoison the loaded
2317 // reference, if heap poisoning is enabled).
2318 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2319 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002320 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002321 __ LoadFromOffset(kLoadUnsignedWord, out, TMP, data_offset, null_checker);
2322 // If read barriers are enabled, emit read barriers other than
2323 // Baker's using a slow path (and also unpoison the loaded
2324 // reference, if heap poisoning is enabled).
2325 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2326 out_loc,
2327 out_loc,
2328 obj_loc,
2329 data_offset,
2330 index);
2331 }
2332 }
2333 break;
2334 }
2335
Alexey Frunze4dda3372015-06-01 18:31:49 -07002336 case Primitive::kPrimLong: {
Alexey Frunze15958152017-02-09 19:08:30 -08002337 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002338 if (index.IsConstant()) {
2339 size_t offset =
2340 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002341 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002342 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002343 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002344 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002345 }
2346 break;
2347 }
2348
2349 case Primitive::kPrimFloat: {
Alexey Frunze15958152017-02-09 19:08:30 -08002350 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002351 if (index.IsConstant()) {
2352 size_t offset =
2353 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002354 __ LoadFpuFromOffset(kLoadWord, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002355 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002356 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002357 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002358 }
2359 break;
2360 }
2361
2362 case Primitive::kPrimDouble: {
Alexey Frunze15958152017-02-09 19:08:30 -08002363 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002364 if (index.IsConstant()) {
2365 size_t offset =
2366 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002367 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002368 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002369 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002370 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002371 }
2372 break;
2373 }
2374
2375 case Primitive::kPrimVoid:
2376 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2377 UNREACHABLE();
2378 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002379}
2380
2381void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
2382 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2383 locations->SetInAt(0, Location::RequiresRegister());
2384 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2385}
2386
2387void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
2388 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002389 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002390 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2391 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2392 __ LoadFromOffset(kLoadWord, out, obj, offset);
2393 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002394 // Mask out compression flag from String's array length.
2395 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2396 __ Srl(out, out, 1u);
2397 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002398}
2399
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002400Location LocationsBuilderMIPS64::RegisterOrZeroConstant(HInstruction* instruction) {
2401 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2402 ? Location::ConstantLocation(instruction->AsConstant())
2403 : Location::RequiresRegister();
2404}
2405
2406Location LocationsBuilderMIPS64::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2407 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2408 // We can store a non-zero float or double constant without first loading it into the FPU,
2409 // but we should only prefer this if the constant has a single use.
2410 if (instruction->IsConstant() &&
2411 (instruction->AsConstant()->IsZeroBitPattern() ||
2412 instruction->GetUses().HasExactlyOneElement())) {
2413 return Location::ConstantLocation(instruction->AsConstant());
2414 // Otherwise fall through and require an FPU register for the constant.
2415 }
2416 return Location::RequiresFpuRegister();
2417}
2418
Alexey Frunze4dda3372015-06-01 18:31:49 -07002419void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002420 Primitive::Type value_type = instruction->GetComponentType();
2421
2422 bool needs_write_barrier =
2423 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2424 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2425
Alexey Frunze4dda3372015-06-01 18:31:49 -07002426 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2427 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002428 may_need_runtime_call_for_type_check ?
2429 LocationSummary::kCallOnSlowPath :
2430 LocationSummary::kNoCall);
2431
2432 locations->SetInAt(0, Location::RequiresRegister());
2433 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2434 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
2435 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002436 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002437 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2438 }
2439 if (needs_write_barrier) {
2440 // Temporary register for the write barrier.
2441 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002442 }
2443}
2444
2445void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
2446 LocationSummary* locations = instruction->GetLocations();
2447 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2448 Location index = locations->InAt(1);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002449 Location value_location = locations->InAt(2);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002450 Primitive::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002451 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002452 bool needs_write_barrier =
2453 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002454 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002455 GpuRegister base_reg = index.IsConstant() ? obj : TMP;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002456
2457 switch (value_type) {
2458 case Primitive::kPrimBoolean:
2459 case Primitive::kPrimByte: {
2460 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002461 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002462 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002463 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002464 __ Daddu(base_reg, obj, index.AsRegister<GpuRegister>());
2465 }
2466 if (value_location.IsConstant()) {
2467 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2468 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2469 } else {
2470 GpuRegister value = value_location.AsRegister<GpuRegister>();
2471 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002472 }
2473 break;
2474 }
2475
2476 case Primitive::kPrimShort:
2477 case Primitive::kPrimChar: {
2478 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002479 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002480 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002481 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002482 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002483 }
2484 if (value_location.IsConstant()) {
2485 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2486 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2487 } else {
2488 GpuRegister value = value_location.AsRegister<GpuRegister>();
2489 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002490 }
2491 break;
2492 }
2493
Alexey Frunze15958152017-02-09 19:08:30 -08002494 case Primitive::kPrimInt: {
2495 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2496 if (index.IsConstant()) {
2497 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2498 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002499 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002500 }
2501 if (value_location.IsConstant()) {
2502 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2503 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2504 } else {
2505 GpuRegister value = value_location.AsRegister<GpuRegister>();
2506 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2507 }
2508 break;
2509 }
2510
Alexey Frunze4dda3372015-06-01 18:31:49 -07002511 case Primitive::kPrimNot: {
Alexey Frunze15958152017-02-09 19:08:30 -08002512 if (value_location.IsConstant()) {
2513 // Just setting null.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002514 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002515 if (index.IsConstant()) {
Alexey Frunzec061de12017-02-14 13:27:23 -08002516 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002517 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002518 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunzec061de12017-02-14 13:27:23 -08002519 }
Alexey Frunze15958152017-02-09 19:08:30 -08002520 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2521 DCHECK_EQ(value, 0);
2522 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2523 DCHECK(!needs_write_barrier);
2524 DCHECK(!may_need_runtime_call_for_type_check);
2525 break;
2526 }
2527
2528 DCHECK(needs_write_barrier);
2529 GpuRegister value = value_location.AsRegister<GpuRegister>();
2530 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
2531 GpuRegister temp2 = TMP; // Doesn't need to survive slow path.
2532 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2533 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2534 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2535 Mips64Label done;
2536 SlowPathCodeMIPS64* slow_path = nullptr;
2537
2538 if (may_need_runtime_call_for_type_check) {
2539 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS64(instruction);
2540 codegen_->AddSlowPath(slow_path);
2541 if (instruction->GetValueCanBeNull()) {
2542 Mips64Label non_zero;
2543 __ Bnezc(value, &non_zero);
2544 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2545 if (index.IsConstant()) {
2546 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002547 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002548 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002549 }
Alexey Frunze15958152017-02-09 19:08:30 -08002550 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2551 __ Bc(&done);
2552 __ Bind(&non_zero);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002553 }
Alexey Frunze15958152017-02-09 19:08:30 -08002554
2555 // Note that when read barriers are enabled, the type checks
2556 // are performed without read barriers. This is fine, even in
2557 // the case where a class object is in the from-space after
2558 // the flip, as a comparison involving such a type would not
2559 // produce a false positive; it may of course produce a false
2560 // negative, in which case we would take the ArraySet slow
2561 // path.
2562
2563 // /* HeapReference<Class> */ temp1 = obj->klass_
2564 __ LoadFromOffset(kLoadUnsignedWord, temp1, obj, class_offset, null_checker);
2565 __ MaybeUnpoisonHeapReference(temp1);
2566
2567 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2568 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, component_offset);
2569 // /* HeapReference<Class> */ temp2 = value->klass_
2570 __ LoadFromOffset(kLoadUnsignedWord, temp2, value, class_offset);
2571 // If heap poisoning is enabled, no need to unpoison `temp1`
2572 // nor `temp2`, as we are comparing two poisoned references.
2573
2574 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2575 Mips64Label do_put;
2576 __ Beqc(temp1, temp2, &do_put);
2577 // If heap poisoning is enabled, the `temp1` reference has
2578 // not been unpoisoned yet; unpoison it now.
2579 __ MaybeUnpoisonHeapReference(temp1);
2580
2581 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2582 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, super_offset);
2583 // If heap poisoning is enabled, no need to unpoison
2584 // `temp1`, as we are comparing against null below.
2585 __ Bnezc(temp1, slow_path->GetEntryLabel());
2586 __ Bind(&do_put);
2587 } else {
2588 __ Bnec(temp1, temp2, slow_path->GetEntryLabel());
2589 }
2590 }
2591
2592 GpuRegister source = value;
2593 if (kPoisonHeapReferences) {
2594 // Note that in the case where `value` is a null reference,
2595 // we do not enter this block, as a null reference does not
2596 // need poisoning.
2597 __ Move(temp1, value);
2598 __ PoisonHeapReference(temp1);
2599 source = temp1;
2600 }
2601
2602 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2603 if (index.IsConstant()) {
2604 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002605 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002606 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002607 }
2608 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2609
2610 if (!may_need_runtime_call_for_type_check) {
2611 codegen_->MaybeRecordImplicitNullCheck(instruction);
2612 }
2613
2614 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2615
2616 if (done.IsLinked()) {
2617 __ Bind(&done);
2618 }
2619
2620 if (slow_path != nullptr) {
2621 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002622 }
2623 break;
2624 }
2625
2626 case Primitive::kPrimLong: {
2627 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002628 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002629 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002630 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002631 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002632 }
2633 if (value_location.IsConstant()) {
2634 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2635 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2636 } else {
2637 GpuRegister value = value_location.AsRegister<GpuRegister>();
2638 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002639 }
2640 break;
2641 }
2642
2643 case Primitive::kPrimFloat: {
2644 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002645 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002646 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002647 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002648 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002649 }
2650 if (value_location.IsConstant()) {
2651 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2652 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2653 } else {
2654 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2655 __ StoreFpuToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002656 }
2657 break;
2658 }
2659
2660 case Primitive::kPrimDouble: {
2661 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002662 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002663 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002664 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002665 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002666 }
2667 if (value_location.IsConstant()) {
2668 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2669 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2670 } else {
2671 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2672 __ StoreFpuToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002673 }
2674 break;
2675 }
2676
2677 case Primitive::kPrimVoid:
2678 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2679 UNREACHABLE();
2680 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002681}
2682
2683void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002684 RegisterSet caller_saves = RegisterSet::Empty();
2685 InvokeRuntimeCallingConvention calling_convention;
2686 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2687 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2688 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002689 locations->SetInAt(0, Location::RequiresRegister());
2690 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002691}
2692
2693void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
2694 LocationSummary* locations = instruction->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002695 BoundsCheckSlowPathMIPS64* slow_path =
2696 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002697 codegen_->AddSlowPath(slow_path);
2698
2699 GpuRegister index = locations->InAt(0).AsRegister<GpuRegister>();
2700 GpuRegister length = locations->InAt(1).AsRegister<GpuRegister>();
2701
2702 // length is limited by the maximum positive signed 32-bit integer.
2703 // Unsigned comparison of length and index checks for index < 0
2704 // and for length <= index simultaneously.
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002705 __ Bgeuc(index, length, slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002706}
2707
Alexey Frunze15958152017-02-09 19:08:30 -08002708// Temp is used for read barrier.
2709static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
2710 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002711 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08002712 (kUseBakerReadBarrier ||
2713 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2714 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2715 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
2716 return 1;
2717 }
2718 return 0;
2719}
2720
2721// Extra temp is used for read barrier.
2722static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
2723 return 1 + NumberOfInstanceOfTemps(type_check_kind);
2724}
2725
Alexey Frunze4dda3372015-06-01 18:31:49 -07002726void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002727 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2728 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
2729
2730 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
2731 switch (type_check_kind) {
2732 case TypeCheckKind::kExactCheck:
2733 case TypeCheckKind::kAbstractClassCheck:
2734 case TypeCheckKind::kClassHierarchyCheck:
2735 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08002736 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002737 ? LocationSummary::kCallOnSlowPath
2738 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
2739 break;
2740 case TypeCheckKind::kArrayCheck:
2741 case TypeCheckKind::kUnresolvedCheck:
2742 case TypeCheckKind::kInterfaceCheck:
2743 call_kind = LocationSummary::kCallOnSlowPath;
2744 break;
2745 }
2746
2747 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002748 locations->SetInAt(0, Location::RequiresRegister());
2749 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08002750 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002751}
2752
2753void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002754 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002755 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002756 Location obj_loc = locations->InAt(0);
2757 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002758 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08002759 Location temp_loc = locations->GetTemp(0);
2760 GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
2761 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
2762 DCHECK_LE(num_temps, 2u);
2763 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002764 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2765 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2766 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2767 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2768 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
2769 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
2770 const uint32_t object_array_data_offset =
2771 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2772 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002773
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002774 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
2775 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
2776 // read barriers is done for performance and code size reasons.
2777 bool is_type_check_slow_path_fatal = false;
2778 if (!kEmitCompilerReadBarrier) {
2779 is_type_check_slow_path_fatal =
2780 (type_check_kind == TypeCheckKind::kExactCheck ||
2781 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2782 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2783 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
2784 !instruction->CanThrowIntoCatchBlock();
2785 }
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002786 SlowPathCodeMIPS64* slow_path =
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002787 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
2788 is_type_check_slow_path_fatal);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002789 codegen_->AddSlowPath(slow_path);
2790
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002791 // Avoid this check if we know `obj` is not null.
2792 if (instruction->MustDoNullCheck()) {
2793 __ Beqzc(obj, &done);
2794 }
2795
2796 switch (type_check_kind) {
2797 case TypeCheckKind::kExactCheck:
2798 case TypeCheckKind::kArrayCheck: {
2799 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002800 GenerateReferenceLoadTwoRegisters(instruction,
2801 temp_loc,
2802 obj_loc,
2803 class_offset,
2804 maybe_temp2_loc,
2805 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002806 // Jump to slow path for throwing the exception or doing a
2807 // more involved array check.
2808 __ Bnec(temp, cls, slow_path->GetEntryLabel());
2809 break;
2810 }
2811
2812 case TypeCheckKind::kAbstractClassCheck: {
2813 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002814 GenerateReferenceLoadTwoRegisters(instruction,
2815 temp_loc,
2816 obj_loc,
2817 class_offset,
2818 maybe_temp2_loc,
2819 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002820 // If the class is abstract, we eagerly fetch the super class of the
2821 // object to avoid doing a comparison we know will fail.
2822 Mips64Label loop;
2823 __ Bind(&loop);
2824 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002825 GenerateReferenceLoadOneRegister(instruction,
2826 temp_loc,
2827 super_offset,
2828 maybe_temp2_loc,
2829 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002830 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2831 // exception.
2832 __ Beqzc(temp, slow_path->GetEntryLabel());
2833 // Otherwise, compare the classes.
2834 __ Bnec(temp, cls, &loop);
2835 break;
2836 }
2837
2838 case TypeCheckKind::kClassHierarchyCheck: {
2839 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002840 GenerateReferenceLoadTwoRegisters(instruction,
2841 temp_loc,
2842 obj_loc,
2843 class_offset,
2844 maybe_temp2_loc,
2845 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002846 // Walk over the class hierarchy to find a match.
2847 Mips64Label loop;
2848 __ Bind(&loop);
2849 __ Beqc(temp, cls, &done);
2850 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002851 GenerateReferenceLoadOneRegister(instruction,
2852 temp_loc,
2853 super_offset,
2854 maybe_temp2_loc,
2855 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002856 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2857 // exception. Otherwise, jump to the beginning of the loop.
2858 __ Bnezc(temp, &loop);
2859 __ Bc(slow_path->GetEntryLabel());
2860 break;
2861 }
2862
2863 case TypeCheckKind::kArrayObjectCheck: {
2864 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002865 GenerateReferenceLoadTwoRegisters(instruction,
2866 temp_loc,
2867 obj_loc,
2868 class_offset,
2869 maybe_temp2_loc,
2870 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002871 // Do an exact check.
2872 __ Beqc(temp, cls, &done);
2873 // Otherwise, we need to check that the object's class is a non-primitive array.
2874 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08002875 GenerateReferenceLoadOneRegister(instruction,
2876 temp_loc,
2877 component_offset,
2878 maybe_temp2_loc,
2879 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002880 // If the component type is null, jump to the slow path to throw the exception.
2881 __ Beqzc(temp, slow_path->GetEntryLabel());
2882 // Otherwise, the object is indeed an array, further check that this component
2883 // type is not a primitive type.
2884 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
2885 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2886 __ Bnezc(temp, slow_path->GetEntryLabel());
2887 break;
2888 }
2889
2890 case TypeCheckKind::kUnresolvedCheck:
2891 // We always go into the type check slow path for the unresolved check case.
2892 // We cannot directly call the CheckCast runtime entry point
2893 // without resorting to a type checking slow path here (i.e. by
2894 // calling InvokeRuntime directly), as it would require to
2895 // assign fixed registers for the inputs of this HInstanceOf
2896 // instruction (following the runtime calling convention), which
2897 // might be cluttered by the potential first read barrier
2898 // emission at the beginning of this method.
2899 __ Bc(slow_path->GetEntryLabel());
2900 break;
2901
2902 case TypeCheckKind::kInterfaceCheck: {
2903 // Avoid read barriers to improve performance of the fast path. We can not get false
2904 // positives by doing this.
2905 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002906 GenerateReferenceLoadTwoRegisters(instruction,
2907 temp_loc,
2908 obj_loc,
2909 class_offset,
2910 maybe_temp2_loc,
2911 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002912 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08002913 GenerateReferenceLoadTwoRegisters(instruction,
2914 temp_loc,
2915 temp_loc,
2916 iftable_offset,
2917 maybe_temp2_loc,
2918 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002919 // Iftable is never null.
2920 __ Lw(TMP, temp, array_length_offset);
2921 // Loop through the iftable and check if any class matches.
2922 Mips64Label loop;
2923 __ Bind(&loop);
2924 __ Beqzc(TMP, slow_path->GetEntryLabel());
2925 __ Lwu(AT, temp, object_array_data_offset);
2926 __ MaybeUnpoisonHeapReference(AT);
2927 // Go to next interface.
2928 __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
2929 __ Addiu(TMP, TMP, -2);
2930 // Compare the classes and continue the loop if they do not match.
2931 __ Bnec(AT, cls, &loop);
2932 break;
2933 }
2934 }
2935
2936 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002937 __ Bind(slow_path->GetExitLabel());
2938}
2939
2940void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
2941 LocationSummary* locations =
2942 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2943 locations->SetInAt(0, Location::RequiresRegister());
2944 if (check->HasUses()) {
2945 locations->SetOut(Location::SameAsFirstInput());
2946 }
2947}
2948
2949void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
2950 // We assume the class is not null.
2951 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
2952 check->GetLoadClass(),
2953 check,
2954 check->GetDexPc(),
2955 true);
2956 codegen_->AddSlowPath(slow_path);
2957 GenerateClassInitializationCheck(slow_path,
2958 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
2959}
2960
2961void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
2962 Primitive::Type in_type = compare->InputAt(0)->GetType();
2963
Alexey Frunze299a9392015-12-08 16:08:02 -08002964 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002965
2966 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002967 case Primitive::kPrimBoolean:
2968 case Primitive::kPrimByte:
2969 case Primitive::kPrimShort:
2970 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002971 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002972 case Primitive::kPrimLong:
2973 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002974 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002975 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2976 break;
2977
2978 case Primitive::kPrimFloat:
Alexey Frunze299a9392015-12-08 16:08:02 -08002979 case Primitive::kPrimDouble:
2980 locations->SetInAt(0, Location::RequiresFpuRegister());
2981 locations->SetInAt(1, Location::RequiresFpuRegister());
2982 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002983 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002984
2985 default:
2986 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2987 }
2988}
2989
2990void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
2991 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08002992 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002993 Primitive::Type in_type = instruction->InputAt(0)->GetType();
2994
2995 // 0 if: left == right
2996 // 1 if: left > right
2997 // -1 if: left < right
2998 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002999 case Primitive::kPrimBoolean:
3000 case Primitive::kPrimByte:
3001 case Primitive::kPrimShort:
3002 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003003 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003004 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003005 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003006 Location rhs_location = locations->InAt(1);
3007 bool use_imm = rhs_location.IsConstant();
3008 GpuRegister rhs = ZERO;
3009 if (use_imm) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003010 if (in_type == Primitive::kPrimLong) {
Aart Bika19616e2016-02-01 18:57:58 -08003011 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
3012 if (value != 0) {
3013 rhs = AT;
3014 __ LoadConst64(rhs, value);
3015 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00003016 } else {
3017 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
3018 if (value != 0) {
3019 rhs = AT;
3020 __ LoadConst32(rhs, value);
3021 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003022 }
3023 } else {
3024 rhs = rhs_location.AsRegister<GpuRegister>();
3025 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003026 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08003027 __ Slt(res, rhs, lhs);
3028 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003029 break;
3030 }
3031
Alexey Frunze299a9392015-12-08 16:08:02 -08003032 case Primitive::kPrimFloat: {
3033 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3034 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3035 Mips64Label done;
3036 __ CmpEqS(FTMP, lhs, rhs);
3037 __ LoadConst32(res, 0);
3038 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003039 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003040 __ CmpLtS(FTMP, lhs, rhs);
3041 __ LoadConst32(res, -1);
3042 __ Bc1nez(FTMP, &done);
3043 __ LoadConst32(res, 1);
3044 } else {
3045 __ CmpLtS(FTMP, rhs, lhs);
3046 __ LoadConst32(res, 1);
3047 __ Bc1nez(FTMP, &done);
3048 __ LoadConst32(res, -1);
3049 }
3050 __ Bind(&done);
3051 break;
3052 }
3053
Alexey Frunze4dda3372015-06-01 18:31:49 -07003054 case Primitive::kPrimDouble: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003055 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3056 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3057 Mips64Label done;
3058 __ CmpEqD(FTMP, lhs, rhs);
3059 __ LoadConst32(res, 0);
3060 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003061 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003062 __ CmpLtD(FTMP, lhs, rhs);
3063 __ LoadConst32(res, -1);
3064 __ Bc1nez(FTMP, &done);
3065 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003066 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08003067 __ CmpLtD(FTMP, rhs, lhs);
3068 __ LoadConst32(res, 1);
3069 __ Bc1nez(FTMP, &done);
3070 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003071 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003072 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003073 break;
3074 }
3075
3076 default:
3077 LOG(FATAL) << "Unimplemented compare type " << in_type;
3078 }
3079}
3080
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003081void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003082 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08003083 switch (instruction->InputAt(0)->GetType()) {
3084 default:
3085 case Primitive::kPrimLong:
3086 locations->SetInAt(0, Location::RequiresRegister());
3087 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3088 break;
3089
3090 case Primitive::kPrimFloat:
3091 case Primitive::kPrimDouble:
3092 locations->SetInAt(0, Location::RequiresFpuRegister());
3093 locations->SetInAt(1, Location::RequiresFpuRegister());
3094 break;
3095 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003096 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003097 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3098 }
3099}
3100
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003101void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003102 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003103 return;
3104 }
3105
Alexey Frunze299a9392015-12-08 16:08:02 -08003106 Primitive::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003107 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003108 switch (type) {
3109 default:
3110 // Integer case.
3111 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
3112 return;
3113 case Primitive::kPrimLong:
3114 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
3115 return;
Alexey Frunze299a9392015-12-08 16:08:02 -08003116 case Primitive::kPrimFloat:
3117 case Primitive::kPrimDouble:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003118 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3119 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003120 }
3121}
3122
Alexey Frunzec857c742015-09-23 15:12:39 -07003123void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3124 DCHECK(instruction->IsDiv() || instruction->IsRem());
3125 Primitive::Type type = instruction->GetResultType();
3126
3127 LocationSummary* locations = instruction->GetLocations();
3128 Location second = locations->InAt(1);
3129 DCHECK(second.IsConstant());
3130
3131 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3132 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3133 int64_t imm = Int64FromConstant(second.GetConstant());
3134 DCHECK(imm == 1 || imm == -1);
3135
3136 if (instruction->IsRem()) {
3137 __ Move(out, ZERO);
3138 } else {
3139 if (imm == -1) {
3140 if (type == Primitive::kPrimInt) {
3141 __ Subu(out, ZERO, dividend);
3142 } else {
3143 DCHECK_EQ(type, Primitive::kPrimLong);
3144 __ Dsubu(out, ZERO, dividend);
3145 }
3146 } else if (out != dividend) {
3147 __ Move(out, dividend);
3148 }
3149 }
3150}
3151
3152void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3153 DCHECK(instruction->IsDiv() || instruction->IsRem());
3154 Primitive::Type type = instruction->GetResultType();
3155
3156 LocationSummary* locations = instruction->GetLocations();
3157 Location second = locations->InAt(1);
3158 DCHECK(second.IsConstant());
3159
3160 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3161 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3162 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003163 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07003164 int ctz_imm = CTZ(abs_imm);
3165
3166 if (instruction->IsDiv()) {
3167 if (type == Primitive::kPrimInt) {
3168 if (ctz_imm == 1) {
3169 // Fast path for division by +/-2, which is very common.
3170 __ Srl(TMP, dividend, 31);
3171 } else {
3172 __ Sra(TMP, dividend, 31);
3173 __ Srl(TMP, TMP, 32 - ctz_imm);
3174 }
3175 __ Addu(out, dividend, TMP);
3176 __ Sra(out, out, ctz_imm);
3177 if (imm < 0) {
3178 __ Subu(out, ZERO, out);
3179 }
3180 } else {
3181 DCHECK_EQ(type, Primitive::kPrimLong);
3182 if (ctz_imm == 1) {
3183 // Fast path for division by +/-2, which is very common.
3184 __ Dsrl32(TMP, dividend, 31);
3185 } else {
3186 __ Dsra32(TMP, dividend, 31);
3187 if (ctz_imm > 32) {
3188 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3189 } else {
3190 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3191 }
3192 }
3193 __ Daddu(out, dividend, TMP);
3194 if (ctz_imm < 32) {
3195 __ Dsra(out, out, ctz_imm);
3196 } else {
3197 __ Dsra32(out, out, ctz_imm - 32);
3198 }
3199 if (imm < 0) {
3200 __ Dsubu(out, ZERO, out);
3201 }
3202 }
3203 } else {
3204 if (type == Primitive::kPrimInt) {
3205 if (ctz_imm == 1) {
3206 // Fast path for modulo +/-2, which is very common.
3207 __ Sra(TMP, dividend, 31);
3208 __ Subu(out, dividend, TMP);
3209 __ Andi(out, out, 1);
3210 __ Addu(out, out, TMP);
3211 } else {
3212 __ Sra(TMP, dividend, 31);
3213 __ Srl(TMP, TMP, 32 - ctz_imm);
3214 __ Addu(out, dividend, TMP);
3215 if (IsUint<16>(abs_imm - 1)) {
3216 __ Andi(out, out, abs_imm - 1);
3217 } else {
3218 __ Sll(out, out, 32 - ctz_imm);
3219 __ Srl(out, out, 32 - ctz_imm);
3220 }
3221 __ Subu(out, out, TMP);
3222 }
3223 } else {
3224 DCHECK_EQ(type, Primitive::kPrimLong);
3225 if (ctz_imm == 1) {
3226 // Fast path for modulo +/-2, which is very common.
3227 __ Dsra32(TMP, dividend, 31);
3228 __ Dsubu(out, dividend, TMP);
3229 __ Andi(out, out, 1);
3230 __ Daddu(out, out, TMP);
3231 } else {
3232 __ Dsra32(TMP, dividend, 31);
3233 if (ctz_imm > 32) {
3234 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3235 } else {
3236 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3237 }
3238 __ Daddu(out, dividend, TMP);
3239 if (IsUint<16>(abs_imm - 1)) {
3240 __ Andi(out, out, abs_imm - 1);
3241 } else {
3242 if (ctz_imm > 32) {
3243 __ Dsll(out, out, 64 - ctz_imm);
3244 __ Dsrl(out, out, 64 - ctz_imm);
3245 } else {
3246 __ Dsll32(out, out, 32 - ctz_imm);
3247 __ Dsrl32(out, out, 32 - ctz_imm);
3248 }
3249 }
3250 __ Dsubu(out, out, TMP);
3251 }
3252 }
3253 }
3254}
3255
3256void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3257 DCHECK(instruction->IsDiv() || instruction->IsRem());
3258
3259 LocationSummary* locations = instruction->GetLocations();
3260 Location second = locations->InAt(1);
3261 DCHECK(second.IsConstant());
3262
3263 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3264 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3265 int64_t imm = Int64FromConstant(second.GetConstant());
3266
3267 Primitive::Type type = instruction->GetResultType();
3268 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
3269
3270 int64_t magic;
3271 int shift;
3272 CalculateMagicAndShiftForDivRem(imm,
3273 (type == Primitive::kPrimLong),
3274 &magic,
3275 &shift);
3276
3277 if (type == Primitive::kPrimInt) {
3278 __ LoadConst32(TMP, magic);
3279 __ MuhR6(TMP, dividend, TMP);
3280
3281 if (imm > 0 && magic < 0) {
3282 __ Addu(TMP, TMP, dividend);
3283 } else if (imm < 0 && magic > 0) {
3284 __ Subu(TMP, TMP, dividend);
3285 }
3286
3287 if (shift != 0) {
3288 __ Sra(TMP, TMP, shift);
3289 }
3290
3291 if (instruction->IsDiv()) {
3292 __ Sra(out, TMP, 31);
3293 __ Subu(out, TMP, out);
3294 } else {
3295 __ Sra(AT, TMP, 31);
3296 __ Subu(AT, TMP, AT);
3297 __ LoadConst32(TMP, imm);
3298 __ MulR6(TMP, AT, TMP);
3299 __ Subu(out, dividend, TMP);
3300 }
3301 } else {
3302 __ LoadConst64(TMP, magic);
3303 __ Dmuh(TMP, dividend, TMP);
3304
3305 if (imm > 0 && magic < 0) {
3306 __ Daddu(TMP, TMP, dividend);
3307 } else if (imm < 0 && magic > 0) {
3308 __ Dsubu(TMP, TMP, dividend);
3309 }
3310
3311 if (shift >= 32) {
3312 __ Dsra32(TMP, TMP, shift - 32);
3313 } else if (shift > 0) {
3314 __ Dsra(TMP, TMP, shift);
3315 }
3316
3317 if (instruction->IsDiv()) {
3318 __ Dsra32(out, TMP, 31);
3319 __ Dsubu(out, TMP, out);
3320 } else {
3321 __ Dsra32(AT, TMP, 31);
3322 __ Dsubu(AT, TMP, AT);
3323 __ LoadConst64(TMP, imm);
3324 __ Dmul(TMP, AT, TMP);
3325 __ Dsubu(out, dividend, TMP);
3326 }
3327 }
3328}
3329
3330void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3331 DCHECK(instruction->IsDiv() || instruction->IsRem());
3332 Primitive::Type type = instruction->GetResultType();
3333 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
3334
3335 LocationSummary* locations = instruction->GetLocations();
3336 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3337 Location second = locations->InAt(1);
3338
3339 if (second.IsConstant()) {
3340 int64_t imm = Int64FromConstant(second.GetConstant());
3341 if (imm == 0) {
3342 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3343 } else if (imm == 1 || imm == -1) {
3344 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003345 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003346 DivRemByPowerOfTwo(instruction);
3347 } else {
3348 DCHECK(imm <= -2 || imm >= 2);
3349 GenerateDivRemWithAnyConstant(instruction);
3350 }
3351 } else {
3352 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3353 GpuRegister divisor = second.AsRegister<GpuRegister>();
3354 if (instruction->IsDiv()) {
3355 if (type == Primitive::kPrimInt)
3356 __ DivR6(out, dividend, divisor);
3357 else
3358 __ Ddiv(out, dividend, divisor);
3359 } else {
3360 if (type == Primitive::kPrimInt)
3361 __ ModR6(out, dividend, divisor);
3362 else
3363 __ Dmod(out, dividend, divisor);
3364 }
3365 }
3366}
3367
Alexey Frunze4dda3372015-06-01 18:31:49 -07003368void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
3369 LocationSummary* locations =
3370 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3371 switch (div->GetResultType()) {
3372 case Primitive::kPrimInt:
3373 case Primitive::kPrimLong:
3374 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003375 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003376 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3377 break;
3378
3379 case Primitive::kPrimFloat:
3380 case Primitive::kPrimDouble:
3381 locations->SetInAt(0, Location::RequiresFpuRegister());
3382 locations->SetInAt(1, Location::RequiresFpuRegister());
3383 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3384 break;
3385
3386 default:
3387 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3388 }
3389}
3390
3391void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
3392 Primitive::Type type = instruction->GetType();
3393 LocationSummary* locations = instruction->GetLocations();
3394
3395 switch (type) {
3396 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07003397 case Primitive::kPrimLong:
3398 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003399 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003400 case Primitive::kPrimFloat:
3401 case Primitive::kPrimDouble: {
3402 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3403 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3404 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3405 if (type == Primitive::kPrimFloat)
3406 __ DivS(dst, lhs, rhs);
3407 else
3408 __ DivD(dst, lhs, rhs);
3409 break;
3410 }
3411 default:
3412 LOG(FATAL) << "Unexpected div type " << type;
3413 }
3414}
3415
3416void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003417 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003418 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003419}
3420
3421void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3422 SlowPathCodeMIPS64* slow_path =
3423 new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS64(instruction);
3424 codegen_->AddSlowPath(slow_path);
3425 Location value = instruction->GetLocations()->InAt(0);
3426
3427 Primitive::Type type = instruction->GetType();
3428
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003429 if (!Primitive::IsIntegralType(type)) {
3430 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003431 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003432 }
3433
3434 if (value.IsConstant()) {
3435 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
3436 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003437 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003438 } else {
3439 // A division by a non-null constant is valid. We don't need to perform
3440 // any check, so simply fall through.
3441 }
3442 } else {
3443 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3444 }
3445}
3446
3447void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
3448 LocationSummary* locations =
3449 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3450 locations->SetOut(Location::ConstantLocation(constant));
3451}
3452
3453void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3454 // Will be generated at use site.
3455}
3456
3457void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
3458 exit->SetLocations(nullptr);
3459}
3460
3461void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3462}
3463
3464void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
3465 LocationSummary* locations =
3466 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3467 locations->SetOut(Location::ConstantLocation(constant));
3468}
3469
3470void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3471 // Will be generated at use site.
3472}
3473
David Brazdilfc6a86a2015-06-26 10:33:45 +00003474void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003475 DCHECK(!successor->IsExitBlock());
3476 HBasicBlock* block = got->GetBlock();
3477 HInstruction* previous = got->GetPrevious();
3478 HLoopInformation* info = block->GetLoopInformation();
3479
3480 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3481 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3482 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3483 return;
3484 }
3485 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3486 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3487 }
3488 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003489 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003490 }
3491}
3492
David Brazdilfc6a86a2015-06-26 10:33:45 +00003493void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
3494 got->SetLocations(nullptr);
3495}
3496
3497void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
3498 HandleGoto(got, got->GetSuccessor());
3499}
3500
3501void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3502 try_boundary->SetLocations(nullptr);
3503}
3504
3505void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3506 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3507 if (!successor->IsExitBlock()) {
3508 HandleGoto(try_boundary, successor);
3509 }
3510}
3511
Alexey Frunze299a9392015-12-08 16:08:02 -08003512void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
3513 bool is64bit,
3514 LocationSummary* locations) {
3515 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3516 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3517 Location rhs_location = locations->InAt(1);
3518 GpuRegister rhs_reg = ZERO;
3519 int64_t rhs_imm = 0;
3520 bool use_imm = rhs_location.IsConstant();
3521 if (use_imm) {
3522 if (is64bit) {
3523 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3524 } else {
3525 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3526 }
3527 } else {
3528 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3529 }
3530 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3531
3532 switch (cond) {
3533 case kCondEQ:
3534 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003535 if (use_imm && IsInt<16>(-rhs_imm)) {
3536 if (rhs_imm == 0) {
3537 if (cond == kCondEQ) {
3538 __ Sltiu(dst, lhs, 1);
3539 } else {
3540 __ Sltu(dst, ZERO, lhs);
3541 }
3542 } else {
3543 if (is64bit) {
3544 __ Daddiu(dst, lhs, -rhs_imm);
3545 } else {
3546 __ Addiu(dst, lhs, -rhs_imm);
3547 }
3548 if (cond == kCondEQ) {
3549 __ Sltiu(dst, dst, 1);
3550 } else {
3551 __ Sltu(dst, ZERO, dst);
3552 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003553 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003554 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003555 if (use_imm && IsUint<16>(rhs_imm)) {
3556 __ Xori(dst, lhs, rhs_imm);
3557 } else {
3558 if (use_imm) {
3559 rhs_reg = TMP;
3560 __ LoadConst64(rhs_reg, rhs_imm);
3561 }
3562 __ Xor(dst, lhs, rhs_reg);
3563 }
3564 if (cond == kCondEQ) {
3565 __ Sltiu(dst, dst, 1);
3566 } else {
3567 __ Sltu(dst, ZERO, dst);
3568 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003569 }
3570 break;
3571
3572 case kCondLT:
3573 case kCondGE:
3574 if (use_imm && IsInt<16>(rhs_imm)) {
3575 __ Slti(dst, lhs, rhs_imm);
3576 } else {
3577 if (use_imm) {
3578 rhs_reg = TMP;
3579 __ LoadConst64(rhs_reg, rhs_imm);
3580 }
3581 __ Slt(dst, lhs, rhs_reg);
3582 }
3583 if (cond == kCondGE) {
3584 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3585 // only the slt instruction but no sge.
3586 __ Xori(dst, dst, 1);
3587 }
3588 break;
3589
3590 case kCondLE:
3591 case kCondGT:
3592 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3593 // Simulate lhs <= rhs via lhs < rhs + 1.
3594 __ Slti(dst, lhs, rhs_imm_plus_one);
3595 if (cond == kCondGT) {
3596 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3597 // only the slti instruction but no sgti.
3598 __ Xori(dst, dst, 1);
3599 }
3600 } else {
3601 if (use_imm) {
3602 rhs_reg = TMP;
3603 __ LoadConst64(rhs_reg, rhs_imm);
3604 }
3605 __ Slt(dst, rhs_reg, lhs);
3606 if (cond == kCondLE) {
3607 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3608 // only the slt instruction but no sle.
3609 __ Xori(dst, dst, 1);
3610 }
3611 }
3612 break;
3613
3614 case kCondB:
3615 case kCondAE:
3616 if (use_imm && IsInt<16>(rhs_imm)) {
3617 // Sltiu sign-extends its 16-bit immediate operand before
3618 // the comparison and thus lets us compare directly with
3619 // unsigned values in the ranges [0, 0x7fff] and
3620 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3621 __ Sltiu(dst, lhs, rhs_imm);
3622 } else {
3623 if (use_imm) {
3624 rhs_reg = TMP;
3625 __ LoadConst64(rhs_reg, rhs_imm);
3626 }
3627 __ Sltu(dst, lhs, rhs_reg);
3628 }
3629 if (cond == kCondAE) {
3630 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3631 // only the sltu instruction but no sgeu.
3632 __ Xori(dst, dst, 1);
3633 }
3634 break;
3635
3636 case kCondBE:
3637 case kCondA:
3638 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3639 // Simulate lhs <= rhs via lhs < rhs + 1.
3640 // Note that this only works if rhs + 1 does not overflow
3641 // to 0, hence the check above.
3642 // Sltiu sign-extends its 16-bit immediate operand before
3643 // the comparison and thus lets us compare directly with
3644 // unsigned values in the ranges [0, 0x7fff] and
3645 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3646 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3647 if (cond == kCondA) {
3648 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3649 // only the sltiu instruction but no sgtiu.
3650 __ Xori(dst, dst, 1);
3651 }
3652 } else {
3653 if (use_imm) {
3654 rhs_reg = TMP;
3655 __ LoadConst64(rhs_reg, rhs_imm);
3656 }
3657 __ Sltu(dst, rhs_reg, lhs);
3658 if (cond == kCondBE) {
3659 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3660 // only the sltu instruction but no sleu.
3661 __ Xori(dst, dst, 1);
3662 }
3663 }
3664 break;
3665 }
3666}
3667
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003668bool InstructionCodeGeneratorMIPS64::MaterializeIntLongCompare(IfCondition cond,
3669 bool is64bit,
3670 LocationSummary* input_locations,
3671 GpuRegister dst) {
3672 GpuRegister lhs = input_locations->InAt(0).AsRegister<GpuRegister>();
3673 Location rhs_location = input_locations->InAt(1);
3674 GpuRegister rhs_reg = ZERO;
3675 int64_t rhs_imm = 0;
3676 bool use_imm = rhs_location.IsConstant();
3677 if (use_imm) {
3678 if (is64bit) {
3679 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3680 } else {
3681 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3682 }
3683 } else {
3684 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3685 }
3686 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3687
3688 switch (cond) {
3689 case kCondEQ:
3690 case kCondNE:
3691 if (use_imm && IsInt<16>(-rhs_imm)) {
3692 if (is64bit) {
3693 __ Daddiu(dst, lhs, -rhs_imm);
3694 } else {
3695 __ Addiu(dst, lhs, -rhs_imm);
3696 }
3697 } else if (use_imm && IsUint<16>(rhs_imm)) {
3698 __ Xori(dst, lhs, rhs_imm);
3699 } else {
3700 if (use_imm) {
3701 rhs_reg = TMP;
3702 __ LoadConst64(rhs_reg, rhs_imm);
3703 }
3704 __ Xor(dst, lhs, rhs_reg);
3705 }
3706 return (cond == kCondEQ);
3707
3708 case kCondLT:
3709 case kCondGE:
3710 if (use_imm && IsInt<16>(rhs_imm)) {
3711 __ Slti(dst, lhs, rhs_imm);
3712 } else {
3713 if (use_imm) {
3714 rhs_reg = TMP;
3715 __ LoadConst64(rhs_reg, rhs_imm);
3716 }
3717 __ Slt(dst, lhs, rhs_reg);
3718 }
3719 return (cond == kCondGE);
3720
3721 case kCondLE:
3722 case kCondGT:
3723 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3724 // Simulate lhs <= rhs via lhs < rhs + 1.
3725 __ Slti(dst, lhs, rhs_imm_plus_one);
3726 return (cond == kCondGT);
3727 } else {
3728 if (use_imm) {
3729 rhs_reg = TMP;
3730 __ LoadConst64(rhs_reg, rhs_imm);
3731 }
3732 __ Slt(dst, rhs_reg, lhs);
3733 return (cond == kCondLE);
3734 }
3735
3736 case kCondB:
3737 case kCondAE:
3738 if (use_imm && IsInt<16>(rhs_imm)) {
3739 // Sltiu sign-extends its 16-bit immediate operand before
3740 // the comparison and thus lets us compare directly with
3741 // unsigned values in the ranges [0, 0x7fff] and
3742 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3743 __ Sltiu(dst, lhs, rhs_imm);
3744 } else {
3745 if (use_imm) {
3746 rhs_reg = TMP;
3747 __ LoadConst64(rhs_reg, rhs_imm);
3748 }
3749 __ Sltu(dst, lhs, rhs_reg);
3750 }
3751 return (cond == kCondAE);
3752
3753 case kCondBE:
3754 case kCondA:
3755 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3756 // Simulate lhs <= rhs via lhs < rhs + 1.
3757 // Note that this only works if rhs + 1 does not overflow
3758 // to 0, hence the check above.
3759 // Sltiu sign-extends its 16-bit immediate operand before
3760 // the comparison and thus lets us compare directly with
3761 // unsigned values in the ranges [0, 0x7fff] and
3762 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3763 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3764 return (cond == kCondA);
3765 } else {
3766 if (use_imm) {
3767 rhs_reg = TMP;
3768 __ LoadConst64(rhs_reg, rhs_imm);
3769 }
3770 __ Sltu(dst, rhs_reg, lhs);
3771 return (cond == kCondBE);
3772 }
3773 }
3774}
3775
Alexey Frunze299a9392015-12-08 16:08:02 -08003776void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
3777 bool is64bit,
3778 LocationSummary* locations,
3779 Mips64Label* label) {
3780 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3781 Location rhs_location = locations->InAt(1);
3782 GpuRegister rhs_reg = ZERO;
3783 int64_t rhs_imm = 0;
3784 bool use_imm = rhs_location.IsConstant();
3785 if (use_imm) {
3786 if (is64bit) {
3787 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3788 } else {
3789 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3790 }
3791 } else {
3792 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3793 }
3794
3795 if (use_imm && rhs_imm == 0) {
3796 switch (cond) {
3797 case kCondEQ:
3798 case kCondBE: // <= 0 if zero
3799 __ Beqzc(lhs, label);
3800 break;
3801 case kCondNE:
3802 case kCondA: // > 0 if non-zero
3803 __ Bnezc(lhs, label);
3804 break;
3805 case kCondLT:
3806 __ Bltzc(lhs, label);
3807 break;
3808 case kCondGE:
3809 __ Bgezc(lhs, label);
3810 break;
3811 case kCondLE:
3812 __ Blezc(lhs, label);
3813 break;
3814 case kCondGT:
3815 __ Bgtzc(lhs, label);
3816 break;
3817 case kCondB: // always false
3818 break;
3819 case kCondAE: // always true
3820 __ Bc(label);
3821 break;
3822 }
3823 } else {
3824 if (use_imm) {
3825 rhs_reg = TMP;
3826 __ LoadConst64(rhs_reg, rhs_imm);
3827 }
3828 switch (cond) {
3829 case kCondEQ:
3830 __ Beqc(lhs, rhs_reg, label);
3831 break;
3832 case kCondNE:
3833 __ Bnec(lhs, rhs_reg, label);
3834 break;
3835 case kCondLT:
3836 __ Bltc(lhs, rhs_reg, label);
3837 break;
3838 case kCondGE:
3839 __ Bgec(lhs, rhs_reg, label);
3840 break;
3841 case kCondLE:
3842 __ Bgec(rhs_reg, lhs, label);
3843 break;
3844 case kCondGT:
3845 __ Bltc(rhs_reg, lhs, label);
3846 break;
3847 case kCondB:
3848 __ Bltuc(lhs, rhs_reg, label);
3849 break;
3850 case kCondAE:
3851 __ Bgeuc(lhs, rhs_reg, label);
3852 break;
3853 case kCondBE:
3854 __ Bgeuc(rhs_reg, lhs, label);
3855 break;
3856 case kCondA:
3857 __ Bltuc(rhs_reg, lhs, label);
3858 break;
3859 }
3860 }
3861}
3862
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003863void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
3864 bool gt_bias,
3865 Primitive::Type type,
3866 LocationSummary* locations) {
3867 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3868 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3869 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3870 if (type == Primitive::kPrimFloat) {
3871 switch (cond) {
3872 case kCondEQ:
3873 __ CmpEqS(FTMP, lhs, rhs);
3874 __ Mfc1(dst, FTMP);
3875 __ Andi(dst, dst, 1);
3876 break;
3877 case kCondNE:
3878 __ CmpEqS(FTMP, lhs, rhs);
3879 __ Mfc1(dst, FTMP);
3880 __ Addiu(dst, dst, 1);
3881 break;
3882 case kCondLT:
3883 if (gt_bias) {
3884 __ CmpLtS(FTMP, lhs, rhs);
3885 } else {
3886 __ CmpUltS(FTMP, lhs, rhs);
3887 }
3888 __ Mfc1(dst, FTMP);
3889 __ Andi(dst, dst, 1);
3890 break;
3891 case kCondLE:
3892 if (gt_bias) {
3893 __ CmpLeS(FTMP, lhs, rhs);
3894 } else {
3895 __ CmpUleS(FTMP, lhs, rhs);
3896 }
3897 __ Mfc1(dst, FTMP);
3898 __ Andi(dst, dst, 1);
3899 break;
3900 case kCondGT:
3901 if (gt_bias) {
3902 __ CmpUltS(FTMP, rhs, lhs);
3903 } else {
3904 __ CmpLtS(FTMP, rhs, lhs);
3905 }
3906 __ Mfc1(dst, FTMP);
3907 __ Andi(dst, dst, 1);
3908 break;
3909 case kCondGE:
3910 if (gt_bias) {
3911 __ CmpUleS(FTMP, rhs, lhs);
3912 } else {
3913 __ CmpLeS(FTMP, rhs, lhs);
3914 }
3915 __ Mfc1(dst, FTMP);
3916 __ Andi(dst, dst, 1);
3917 break;
3918 default:
3919 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3920 UNREACHABLE();
3921 }
3922 } else {
3923 DCHECK_EQ(type, Primitive::kPrimDouble);
3924 switch (cond) {
3925 case kCondEQ:
3926 __ CmpEqD(FTMP, lhs, rhs);
3927 __ Mfc1(dst, FTMP);
3928 __ Andi(dst, dst, 1);
3929 break;
3930 case kCondNE:
3931 __ CmpEqD(FTMP, lhs, rhs);
3932 __ Mfc1(dst, FTMP);
3933 __ Addiu(dst, dst, 1);
3934 break;
3935 case kCondLT:
3936 if (gt_bias) {
3937 __ CmpLtD(FTMP, lhs, rhs);
3938 } else {
3939 __ CmpUltD(FTMP, lhs, rhs);
3940 }
3941 __ Mfc1(dst, FTMP);
3942 __ Andi(dst, dst, 1);
3943 break;
3944 case kCondLE:
3945 if (gt_bias) {
3946 __ CmpLeD(FTMP, lhs, rhs);
3947 } else {
3948 __ CmpUleD(FTMP, lhs, rhs);
3949 }
3950 __ Mfc1(dst, FTMP);
3951 __ Andi(dst, dst, 1);
3952 break;
3953 case kCondGT:
3954 if (gt_bias) {
3955 __ CmpUltD(FTMP, rhs, lhs);
3956 } else {
3957 __ CmpLtD(FTMP, rhs, lhs);
3958 }
3959 __ Mfc1(dst, FTMP);
3960 __ Andi(dst, dst, 1);
3961 break;
3962 case kCondGE:
3963 if (gt_bias) {
3964 __ CmpUleD(FTMP, rhs, lhs);
3965 } else {
3966 __ CmpLeD(FTMP, rhs, lhs);
3967 }
3968 __ Mfc1(dst, FTMP);
3969 __ Andi(dst, dst, 1);
3970 break;
3971 default:
3972 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3973 UNREACHABLE();
3974 }
3975 }
3976}
3977
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003978bool InstructionCodeGeneratorMIPS64::MaterializeFpCompare(IfCondition cond,
3979 bool gt_bias,
3980 Primitive::Type type,
3981 LocationSummary* input_locations,
3982 FpuRegister dst) {
3983 FpuRegister lhs = input_locations->InAt(0).AsFpuRegister<FpuRegister>();
3984 FpuRegister rhs = input_locations->InAt(1).AsFpuRegister<FpuRegister>();
3985 if (type == Primitive::kPrimFloat) {
3986 switch (cond) {
3987 case kCondEQ:
3988 __ CmpEqS(dst, lhs, rhs);
3989 return false;
3990 case kCondNE:
3991 __ CmpEqS(dst, lhs, rhs);
3992 return true;
3993 case kCondLT:
3994 if (gt_bias) {
3995 __ CmpLtS(dst, lhs, rhs);
3996 } else {
3997 __ CmpUltS(dst, lhs, rhs);
3998 }
3999 return false;
4000 case kCondLE:
4001 if (gt_bias) {
4002 __ CmpLeS(dst, lhs, rhs);
4003 } else {
4004 __ CmpUleS(dst, lhs, rhs);
4005 }
4006 return false;
4007 case kCondGT:
4008 if (gt_bias) {
4009 __ CmpUltS(dst, rhs, lhs);
4010 } else {
4011 __ CmpLtS(dst, rhs, lhs);
4012 }
4013 return false;
4014 case kCondGE:
4015 if (gt_bias) {
4016 __ CmpUleS(dst, rhs, lhs);
4017 } else {
4018 __ CmpLeS(dst, rhs, lhs);
4019 }
4020 return false;
4021 default:
4022 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4023 UNREACHABLE();
4024 }
4025 } else {
4026 DCHECK_EQ(type, Primitive::kPrimDouble);
4027 switch (cond) {
4028 case kCondEQ:
4029 __ CmpEqD(dst, lhs, rhs);
4030 return false;
4031 case kCondNE:
4032 __ CmpEqD(dst, lhs, rhs);
4033 return true;
4034 case kCondLT:
4035 if (gt_bias) {
4036 __ CmpLtD(dst, lhs, rhs);
4037 } else {
4038 __ CmpUltD(dst, lhs, rhs);
4039 }
4040 return false;
4041 case kCondLE:
4042 if (gt_bias) {
4043 __ CmpLeD(dst, lhs, rhs);
4044 } else {
4045 __ CmpUleD(dst, lhs, rhs);
4046 }
4047 return false;
4048 case kCondGT:
4049 if (gt_bias) {
4050 __ CmpUltD(dst, rhs, lhs);
4051 } else {
4052 __ CmpLtD(dst, rhs, lhs);
4053 }
4054 return false;
4055 case kCondGE:
4056 if (gt_bias) {
4057 __ CmpUleD(dst, rhs, lhs);
4058 } else {
4059 __ CmpLeD(dst, rhs, lhs);
4060 }
4061 return false;
4062 default:
4063 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4064 UNREACHABLE();
4065 }
4066 }
4067}
4068
Alexey Frunze299a9392015-12-08 16:08:02 -08004069void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
4070 bool gt_bias,
4071 Primitive::Type type,
4072 LocationSummary* locations,
4073 Mips64Label* label) {
4074 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4075 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
4076 if (type == Primitive::kPrimFloat) {
4077 switch (cond) {
4078 case kCondEQ:
4079 __ CmpEqS(FTMP, lhs, rhs);
4080 __ Bc1nez(FTMP, label);
4081 break;
4082 case kCondNE:
4083 __ CmpEqS(FTMP, lhs, rhs);
4084 __ Bc1eqz(FTMP, label);
4085 break;
4086 case kCondLT:
4087 if (gt_bias) {
4088 __ CmpLtS(FTMP, lhs, rhs);
4089 } else {
4090 __ CmpUltS(FTMP, lhs, rhs);
4091 }
4092 __ Bc1nez(FTMP, label);
4093 break;
4094 case kCondLE:
4095 if (gt_bias) {
4096 __ CmpLeS(FTMP, lhs, rhs);
4097 } else {
4098 __ CmpUleS(FTMP, lhs, rhs);
4099 }
4100 __ Bc1nez(FTMP, label);
4101 break;
4102 case kCondGT:
4103 if (gt_bias) {
4104 __ CmpUltS(FTMP, rhs, lhs);
4105 } else {
4106 __ CmpLtS(FTMP, rhs, lhs);
4107 }
4108 __ Bc1nez(FTMP, label);
4109 break;
4110 case kCondGE:
4111 if (gt_bias) {
4112 __ CmpUleS(FTMP, rhs, lhs);
4113 } else {
4114 __ CmpLeS(FTMP, rhs, lhs);
4115 }
4116 __ Bc1nez(FTMP, label);
4117 break;
4118 default:
4119 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004120 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004121 }
4122 } else {
4123 DCHECK_EQ(type, Primitive::kPrimDouble);
4124 switch (cond) {
4125 case kCondEQ:
4126 __ CmpEqD(FTMP, lhs, rhs);
4127 __ Bc1nez(FTMP, label);
4128 break;
4129 case kCondNE:
4130 __ CmpEqD(FTMP, lhs, rhs);
4131 __ Bc1eqz(FTMP, label);
4132 break;
4133 case kCondLT:
4134 if (gt_bias) {
4135 __ CmpLtD(FTMP, lhs, rhs);
4136 } else {
4137 __ CmpUltD(FTMP, lhs, rhs);
4138 }
4139 __ Bc1nez(FTMP, label);
4140 break;
4141 case kCondLE:
4142 if (gt_bias) {
4143 __ CmpLeD(FTMP, lhs, rhs);
4144 } else {
4145 __ CmpUleD(FTMP, lhs, rhs);
4146 }
4147 __ Bc1nez(FTMP, label);
4148 break;
4149 case kCondGT:
4150 if (gt_bias) {
4151 __ CmpUltD(FTMP, rhs, lhs);
4152 } else {
4153 __ CmpLtD(FTMP, rhs, lhs);
4154 }
4155 __ Bc1nez(FTMP, label);
4156 break;
4157 case kCondGE:
4158 if (gt_bias) {
4159 __ CmpUleD(FTMP, rhs, lhs);
4160 } else {
4161 __ CmpLeD(FTMP, rhs, lhs);
4162 }
4163 __ Bc1nez(FTMP, label);
4164 break;
4165 default:
4166 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004167 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004168 }
4169 }
4170}
4171
Alexey Frunze4dda3372015-06-01 18:31:49 -07004172void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00004173 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004174 Mips64Label* true_target,
4175 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00004176 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004177
David Brazdil0debae72015-11-12 18:37:00 +00004178 if (true_target == nullptr && false_target == nullptr) {
4179 // Nothing to do. The code always falls through.
4180 return;
4181 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00004182 // Constant condition, statically compared against "true" (integer value 1).
4183 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00004184 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004185 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004186 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004187 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00004188 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00004189 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004190 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00004191 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004192 }
David Brazdil0debae72015-11-12 18:37:00 +00004193 return;
4194 }
4195
4196 // The following code generates these patterns:
4197 // (1) true_target == nullptr && false_target != nullptr
4198 // - opposite condition true => branch to false_target
4199 // (2) true_target != nullptr && false_target == nullptr
4200 // - condition true => branch to true_target
4201 // (3) true_target != nullptr && false_target != nullptr
4202 // - condition true => branch to true_target
4203 // - branch to false_target
4204 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004205 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00004206 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004207 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00004208 if (true_target == nullptr) {
4209 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
4210 } else {
4211 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
4212 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004213 } else {
4214 // The condition instruction has not been materialized, use its inputs as
4215 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00004216 HCondition* condition = cond->AsCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08004217 Primitive::Type type = condition->InputAt(0)->GetType();
4218 LocationSummary* locations = cond->GetLocations();
4219 IfCondition if_cond = condition->GetCondition();
4220 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00004221
David Brazdil0debae72015-11-12 18:37:00 +00004222 if (true_target == nullptr) {
4223 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08004224 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00004225 }
4226
Alexey Frunze299a9392015-12-08 16:08:02 -08004227 switch (type) {
4228 default:
4229 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
4230 break;
4231 case Primitive::kPrimLong:
4232 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
4233 break;
4234 case Primitive::kPrimFloat:
4235 case Primitive::kPrimDouble:
4236 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
4237 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004238 }
4239 }
David Brazdil0debae72015-11-12 18:37:00 +00004240
4241 // If neither branch falls through (case 3), the conditional branch to `true_target`
4242 // was already emitted (case 2) and we need to emit a jump to `false_target`.
4243 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004244 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004245 }
4246}
4247
4248void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
4249 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00004250 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004251 locations->SetInAt(0, Location::RequiresRegister());
4252 }
4253}
4254
4255void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00004256 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
4257 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004258 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004259 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004260 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004261 nullptr : codegen_->GetLabelOf(false_successor);
4262 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004263}
4264
4265void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
4266 LocationSummary* locations = new (GetGraph()->GetArena())
4267 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01004268 InvokeRuntimeCallingConvention calling_convention;
4269 RegisterSet caller_saves = RegisterSet::Empty();
4270 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4271 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00004272 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004273 locations->SetInAt(0, Location::RequiresRegister());
4274 }
4275}
4276
4277void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08004278 SlowPathCodeMIPS64* slow_path =
4279 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00004280 GenerateTestAndBranch(deoptimize,
4281 /* condition_input_index */ 0,
4282 slow_path->GetEntryLabel(),
4283 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004284}
4285
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004286// This function returns true if a conditional move can be generated for HSelect.
4287// Otherwise it returns false and HSelect must be implemented in terms of conditonal
4288// branches and regular moves.
4289//
4290// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
4291//
4292// While determining feasibility of a conditional move and setting inputs/outputs
4293// are two distinct tasks, this function does both because they share quite a bit
4294// of common logic.
4295static bool CanMoveConditionally(HSelect* select, LocationSummary* locations_to_set) {
4296 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
4297 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4298 HCondition* condition = cond->AsCondition();
4299
4300 Primitive::Type cond_type = materialized ? Primitive::kPrimInt : condition->InputAt(0)->GetType();
4301 Primitive::Type dst_type = select->GetType();
4302
4303 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
4304 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
4305 bool is_true_value_zero_constant =
4306 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
4307 bool is_false_value_zero_constant =
4308 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
4309
4310 bool can_move_conditionally = false;
4311 bool use_const_for_false_in = false;
4312 bool use_const_for_true_in = false;
4313
4314 if (!cond->IsConstant()) {
4315 if (!Primitive::IsFloatingPointType(cond_type)) {
4316 if (!Primitive::IsFloatingPointType(dst_type)) {
4317 // Moving int/long on int/long condition.
4318 if (is_true_value_zero_constant) {
4319 // seleqz out_reg, false_reg, cond_reg
4320 can_move_conditionally = true;
4321 use_const_for_true_in = true;
4322 } else if (is_false_value_zero_constant) {
4323 // selnez out_reg, true_reg, cond_reg
4324 can_move_conditionally = true;
4325 use_const_for_false_in = true;
4326 } else if (materialized) {
4327 // Not materializing unmaterialized int conditions
4328 // to keep the instruction count low.
4329 // selnez AT, true_reg, cond_reg
4330 // seleqz TMP, false_reg, cond_reg
4331 // or out_reg, AT, TMP
4332 can_move_conditionally = true;
4333 }
4334 } else {
4335 // Moving float/double on int/long condition.
4336 if (materialized) {
4337 // Not materializing unmaterialized int conditions
4338 // to keep the instruction count low.
4339 can_move_conditionally = true;
4340 if (is_true_value_zero_constant) {
4341 // sltu TMP, ZERO, cond_reg
4342 // mtc1 TMP, temp_cond_reg
4343 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4344 use_const_for_true_in = true;
4345 } else if (is_false_value_zero_constant) {
4346 // sltu TMP, ZERO, cond_reg
4347 // mtc1 TMP, temp_cond_reg
4348 // selnez.fmt out_reg, true_reg, temp_cond_reg
4349 use_const_for_false_in = true;
4350 } else {
4351 // sltu TMP, ZERO, cond_reg
4352 // mtc1 TMP, temp_cond_reg
4353 // sel.fmt temp_cond_reg, false_reg, true_reg
4354 // mov.fmt out_reg, temp_cond_reg
4355 }
4356 }
4357 }
4358 } else {
4359 if (!Primitive::IsFloatingPointType(dst_type)) {
4360 // Moving int/long on float/double condition.
4361 can_move_conditionally = true;
4362 if (is_true_value_zero_constant) {
4363 // mfc1 TMP, temp_cond_reg
4364 // seleqz out_reg, false_reg, TMP
4365 use_const_for_true_in = true;
4366 } else if (is_false_value_zero_constant) {
4367 // mfc1 TMP, temp_cond_reg
4368 // selnez out_reg, true_reg, TMP
4369 use_const_for_false_in = true;
4370 } else {
4371 // mfc1 TMP, temp_cond_reg
4372 // selnez AT, true_reg, TMP
4373 // seleqz TMP, false_reg, TMP
4374 // or out_reg, AT, TMP
4375 }
4376 } else {
4377 // Moving float/double on float/double condition.
4378 can_move_conditionally = true;
4379 if (is_true_value_zero_constant) {
4380 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4381 use_const_for_true_in = true;
4382 } else if (is_false_value_zero_constant) {
4383 // selnez.fmt out_reg, true_reg, temp_cond_reg
4384 use_const_for_false_in = true;
4385 } else {
4386 // sel.fmt temp_cond_reg, false_reg, true_reg
4387 // mov.fmt out_reg, temp_cond_reg
4388 }
4389 }
4390 }
4391 }
4392
4393 if (can_move_conditionally) {
4394 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
4395 } else {
4396 DCHECK(!use_const_for_false_in);
4397 DCHECK(!use_const_for_true_in);
4398 }
4399
4400 if (locations_to_set != nullptr) {
4401 if (use_const_for_false_in) {
4402 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
4403 } else {
4404 locations_to_set->SetInAt(0,
4405 Primitive::IsFloatingPointType(dst_type)
4406 ? Location::RequiresFpuRegister()
4407 : Location::RequiresRegister());
4408 }
4409 if (use_const_for_true_in) {
4410 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
4411 } else {
4412 locations_to_set->SetInAt(1,
4413 Primitive::IsFloatingPointType(dst_type)
4414 ? Location::RequiresFpuRegister()
4415 : Location::RequiresRegister());
4416 }
4417 if (materialized) {
4418 locations_to_set->SetInAt(2, Location::RequiresRegister());
4419 }
4420
4421 if (can_move_conditionally) {
4422 locations_to_set->SetOut(Primitive::IsFloatingPointType(dst_type)
4423 ? Location::RequiresFpuRegister()
4424 : Location::RequiresRegister());
4425 } else {
4426 locations_to_set->SetOut(Location::SameAsFirstInput());
4427 }
4428 }
4429
4430 return can_move_conditionally;
4431}
4432
4433
4434void InstructionCodeGeneratorMIPS64::GenConditionalMove(HSelect* select) {
4435 LocationSummary* locations = select->GetLocations();
4436 Location dst = locations->Out();
4437 Location false_src = locations->InAt(0);
4438 Location true_src = locations->InAt(1);
4439 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4440 GpuRegister cond_reg = TMP;
4441 FpuRegister fcond_reg = FTMP;
4442 Primitive::Type cond_type = Primitive::kPrimInt;
4443 bool cond_inverted = false;
4444 Primitive::Type dst_type = select->GetType();
4445
4446 if (IsBooleanValueOrMaterializedCondition(cond)) {
4447 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<GpuRegister>();
4448 } else {
4449 HCondition* condition = cond->AsCondition();
4450 LocationSummary* cond_locations = cond->GetLocations();
4451 IfCondition if_cond = condition->GetCondition();
4452 cond_type = condition->InputAt(0)->GetType();
4453 switch (cond_type) {
4454 default:
4455 cond_inverted = MaterializeIntLongCompare(if_cond,
4456 /* is64bit */ false,
4457 cond_locations,
4458 cond_reg);
4459 break;
4460 case Primitive::kPrimLong:
4461 cond_inverted = MaterializeIntLongCompare(if_cond,
4462 /* is64bit */ true,
4463 cond_locations,
4464 cond_reg);
4465 break;
4466 case Primitive::kPrimFloat:
4467 case Primitive::kPrimDouble:
4468 cond_inverted = MaterializeFpCompare(if_cond,
4469 condition->IsGtBias(),
4470 cond_type,
4471 cond_locations,
4472 fcond_reg);
4473 break;
4474 }
4475 }
4476
4477 if (true_src.IsConstant()) {
4478 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
4479 }
4480 if (false_src.IsConstant()) {
4481 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
4482 }
4483
4484 switch (dst_type) {
4485 default:
4486 if (Primitive::IsFloatingPointType(cond_type)) {
4487 __ Mfc1(cond_reg, fcond_reg);
4488 }
4489 if (true_src.IsConstant()) {
4490 if (cond_inverted) {
4491 __ Selnez(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4492 } else {
4493 __ Seleqz(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4494 }
4495 } else if (false_src.IsConstant()) {
4496 if (cond_inverted) {
4497 __ Seleqz(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4498 } else {
4499 __ Selnez(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4500 }
4501 } else {
4502 DCHECK_NE(cond_reg, AT);
4503 if (cond_inverted) {
4504 __ Seleqz(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4505 __ Selnez(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4506 } else {
4507 __ Selnez(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4508 __ Seleqz(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4509 }
4510 __ Or(dst.AsRegister<GpuRegister>(), AT, TMP);
4511 }
4512 break;
4513 case Primitive::kPrimFloat: {
4514 if (!Primitive::IsFloatingPointType(cond_type)) {
4515 // sel*.fmt tests bit 0 of the condition register, account for that.
4516 __ Sltu(TMP, ZERO, cond_reg);
4517 __ Mtc1(TMP, fcond_reg);
4518 }
4519 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4520 if (true_src.IsConstant()) {
4521 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4522 if (cond_inverted) {
4523 __ SelnezS(dst_reg, src_reg, fcond_reg);
4524 } else {
4525 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4526 }
4527 } else if (false_src.IsConstant()) {
4528 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4529 if (cond_inverted) {
4530 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4531 } else {
4532 __ SelnezS(dst_reg, src_reg, fcond_reg);
4533 }
4534 } else {
4535 if (cond_inverted) {
4536 __ SelS(fcond_reg,
4537 true_src.AsFpuRegister<FpuRegister>(),
4538 false_src.AsFpuRegister<FpuRegister>());
4539 } else {
4540 __ SelS(fcond_reg,
4541 false_src.AsFpuRegister<FpuRegister>(),
4542 true_src.AsFpuRegister<FpuRegister>());
4543 }
4544 __ MovS(dst_reg, fcond_reg);
4545 }
4546 break;
4547 }
4548 case Primitive::kPrimDouble: {
4549 if (!Primitive::IsFloatingPointType(cond_type)) {
4550 // sel*.fmt tests bit 0 of the condition register, account for that.
4551 __ Sltu(TMP, ZERO, cond_reg);
4552 __ Mtc1(TMP, fcond_reg);
4553 }
4554 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4555 if (true_src.IsConstant()) {
4556 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4557 if (cond_inverted) {
4558 __ SelnezD(dst_reg, src_reg, fcond_reg);
4559 } else {
4560 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4561 }
4562 } else if (false_src.IsConstant()) {
4563 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4564 if (cond_inverted) {
4565 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4566 } else {
4567 __ SelnezD(dst_reg, src_reg, fcond_reg);
4568 }
4569 } else {
4570 if (cond_inverted) {
4571 __ SelD(fcond_reg,
4572 true_src.AsFpuRegister<FpuRegister>(),
4573 false_src.AsFpuRegister<FpuRegister>());
4574 } else {
4575 __ SelD(fcond_reg,
4576 false_src.AsFpuRegister<FpuRegister>(),
4577 true_src.AsFpuRegister<FpuRegister>());
4578 }
4579 __ MovD(dst_reg, fcond_reg);
4580 }
4581 break;
4582 }
4583 }
4584}
4585
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004586void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4587 LocationSummary* locations = new (GetGraph()->GetArena())
4588 LocationSummary(flag, LocationSummary::kNoCall);
4589 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07004590}
4591
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004592void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4593 __ LoadFromOffset(kLoadWord,
4594 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
4595 SP,
4596 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07004597}
4598
David Brazdil74eb1b22015-12-14 11:44:01 +00004599void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
4600 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004601 CanMoveConditionally(select, locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00004602}
4603
4604void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004605 if (CanMoveConditionally(select, /* locations_to_set */ nullptr)) {
4606 GenConditionalMove(select);
4607 } else {
4608 LocationSummary* locations = select->GetLocations();
4609 Mips64Label false_target;
4610 GenerateTestAndBranch(select,
4611 /* condition_input_index */ 2,
4612 /* true_target */ nullptr,
4613 &false_target);
4614 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
4615 __ Bind(&false_target);
4616 }
David Brazdil74eb1b22015-12-14 11:44:01 +00004617}
4618
David Srbecky0cf44932015-12-09 14:09:59 +00004619void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
4620 new (GetGraph()->GetArena()) LocationSummary(info);
4621}
4622
David Srbeckyd28f4a02016-03-14 17:14:24 +00004623void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
4624 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00004625}
4626
4627void CodeGeneratorMIPS64::GenerateNop() {
4628 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00004629}
4630
Alexey Frunze4dda3372015-06-01 18:31:49 -07004631void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08004632 const FieldInfo& field_info) {
4633 Primitive::Type field_type = field_info.GetFieldType();
4634 bool object_field_get_with_read_barrier =
4635 kEmitCompilerReadBarrier && (field_type == Primitive::kPrimNot);
4636 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
4637 instruction,
4638 object_field_get_with_read_barrier
4639 ? LocationSummary::kCallOnSlowPath
4640 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004641 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4642 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4643 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004644 locations->SetInAt(0, Location::RequiresRegister());
4645 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4646 locations->SetOut(Location::RequiresFpuRegister());
4647 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004648 // The output overlaps in the case of an object field get with
4649 // read barriers enabled: we do not want the move to overwrite the
4650 // object's location, as we need it to emit the read barrier.
4651 locations->SetOut(Location::RequiresRegister(),
4652 object_field_get_with_read_barrier
4653 ? Location::kOutputOverlap
4654 : Location::kNoOutputOverlap);
4655 }
4656 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4657 // We need a temporary register for the read barrier marking slow
4658 // path in CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004659 if (!kBakerReadBarrierThunksEnableForFields) {
4660 locations->AddTemp(Location::RequiresRegister());
4661 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004662 }
4663}
4664
4665void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
4666 const FieldInfo& field_info) {
4667 Primitive::Type type = field_info.GetFieldType();
4668 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004669 Location obj_loc = locations->InAt(0);
4670 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
4671 Location dst_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004672 LoadOperandType load_type = kLoadUnsignedByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004673 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004674 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004675 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4676
Alexey Frunze4dda3372015-06-01 18:31:49 -07004677 switch (type) {
4678 case Primitive::kPrimBoolean:
4679 load_type = kLoadUnsignedByte;
4680 break;
4681 case Primitive::kPrimByte:
4682 load_type = kLoadSignedByte;
4683 break;
4684 case Primitive::kPrimShort:
4685 load_type = kLoadSignedHalfword;
4686 break;
4687 case Primitive::kPrimChar:
4688 load_type = kLoadUnsignedHalfword;
4689 break;
4690 case Primitive::kPrimInt:
4691 case Primitive::kPrimFloat:
4692 load_type = kLoadWord;
4693 break;
4694 case Primitive::kPrimLong:
4695 case Primitive::kPrimDouble:
4696 load_type = kLoadDoubleword;
4697 break;
4698 case Primitive::kPrimNot:
4699 load_type = kLoadUnsignedWord;
4700 break;
4701 case Primitive::kPrimVoid:
4702 LOG(FATAL) << "Unreachable type " << type;
4703 UNREACHABLE();
4704 }
4705 if (!Primitive::IsFloatingPointType(type)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004706 DCHECK(dst_loc.IsRegister());
4707 GpuRegister dst = dst_loc.AsRegister<GpuRegister>();
4708 if (type == Primitive::kPrimNot) {
4709 // /* HeapReference<Object> */ dst = *(obj + offset)
4710 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004711 Location temp_loc =
4712 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08004713 // Note that a potential implicit null check is handled in this
4714 // CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier call.
4715 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4716 dst_loc,
4717 obj,
4718 offset,
4719 temp_loc,
4720 /* needs_null_check */ true);
4721 if (is_volatile) {
4722 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4723 }
4724 } else {
4725 __ LoadFromOffset(kLoadUnsignedWord, dst, obj, offset, null_checker);
4726 if (is_volatile) {
4727 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4728 }
4729 // If read barriers are enabled, emit read barriers other than
4730 // Baker's using a slow path (and also unpoison the loaded
4731 // reference, if heap poisoning is enabled).
4732 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
4733 }
4734 } else {
4735 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
4736 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004737 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004738 DCHECK(dst_loc.IsFpuRegister());
4739 FpuRegister dst = dst_loc.AsFpuRegister<FpuRegister>();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004740 __ LoadFpuFromOffset(load_type, dst, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004741 }
Alexey Frunzec061de12017-02-14 13:27:23 -08004742
Alexey Frunze15958152017-02-09 19:08:30 -08004743 // Memory barriers, in the case of references, are handled in the
4744 // previous switch statement.
4745 if (is_volatile && (type != Primitive::kPrimNot)) {
4746 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Alexey Frunzec061de12017-02-14 13:27:23 -08004747 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004748}
4749
4750void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
4751 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
4752 LocationSummary* locations =
4753 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4754 locations->SetInAt(0, Location::RequiresRegister());
4755 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004756 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004757 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004758 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004759 }
4760}
4761
4762void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004763 const FieldInfo& field_info,
4764 bool value_can_be_null) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004765 Primitive::Type type = field_info.GetFieldType();
4766 LocationSummary* locations = instruction->GetLocations();
4767 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004768 Location value_location = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004769 StoreOperandType store_type = kStoreByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004770 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004771 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4772 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004773 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4774
Alexey Frunze4dda3372015-06-01 18:31:49 -07004775 switch (type) {
4776 case Primitive::kPrimBoolean:
4777 case Primitive::kPrimByte:
4778 store_type = kStoreByte;
4779 break;
4780 case Primitive::kPrimShort:
4781 case Primitive::kPrimChar:
4782 store_type = kStoreHalfword;
4783 break;
4784 case Primitive::kPrimInt:
4785 case Primitive::kPrimFloat:
4786 case Primitive::kPrimNot:
4787 store_type = kStoreWord;
4788 break;
4789 case Primitive::kPrimLong:
4790 case Primitive::kPrimDouble:
4791 store_type = kStoreDoubleword;
4792 break;
4793 case Primitive::kPrimVoid:
4794 LOG(FATAL) << "Unreachable type " << type;
4795 UNREACHABLE();
4796 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004797
Alexey Frunze15958152017-02-09 19:08:30 -08004798 if (is_volatile) {
4799 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
4800 }
4801
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004802 if (value_location.IsConstant()) {
4803 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
4804 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
4805 } else {
4806 if (!Primitive::IsFloatingPointType(type)) {
4807 DCHECK(value_location.IsRegister());
4808 GpuRegister src = value_location.AsRegister<GpuRegister>();
4809 if (kPoisonHeapReferences && needs_write_barrier) {
4810 // Note that in the case where `value` is a null reference,
4811 // we do not enter this block, as a null reference does not
4812 // need poisoning.
4813 DCHECK_EQ(type, Primitive::kPrimNot);
4814 __ PoisonHeapReference(TMP, src);
4815 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
4816 } else {
4817 __ StoreToOffset(store_type, src, obj, offset, null_checker);
4818 }
4819 } else {
4820 DCHECK(value_location.IsFpuRegister());
4821 FpuRegister src = value_location.AsFpuRegister<FpuRegister>();
4822 __ StoreFpuToOffset(store_type, src, obj, offset, null_checker);
4823 }
4824 }
Alexey Frunze15958152017-02-09 19:08:30 -08004825
Alexey Frunzec061de12017-02-14 13:27:23 -08004826 if (needs_write_barrier) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004827 DCHECK(value_location.IsRegister());
4828 GpuRegister src = value_location.AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004829 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004830 }
Alexey Frunze15958152017-02-09 19:08:30 -08004831
4832 if (is_volatile) {
4833 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
4834 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004835}
4836
4837void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4838 HandleFieldGet(instruction, instruction->GetFieldInfo());
4839}
4840
4841void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4842 HandleFieldGet(instruction, instruction->GetFieldInfo());
4843}
4844
4845void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4846 HandleFieldSet(instruction, instruction->GetFieldInfo());
4847}
4848
4849void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004850 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004851}
4852
Alexey Frunze15958152017-02-09 19:08:30 -08004853void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadOneRegister(
4854 HInstruction* instruction,
4855 Location out,
4856 uint32_t offset,
4857 Location maybe_temp,
4858 ReadBarrierOption read_barrier_option) {
4859 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4860 if (read_barrier_option == kWithReadBarrier) {
4861 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004862 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
4863 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4864 }
Alexey Frunze15958152017-02-09 19:08:30 -08004865 if (kUseBakerReadBarrier) {
4866 // Load with fast path based Baker's read barrier.
4867 // /* HeapReference<Object> */ out = *(out + offset)
4868 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4869 out,
4870 out_reg,
4871 offset,
4872 maybe_temp,
4873 /* needs_null_check */ false);
4874 } else {
4875 // Load with slow path based read barrier.
4876 // Save the value of `out` into `maybe_temp` before overwriting it
4877 // in the following move operation, as we will need it for the
4878 // read barrier below.
4879 __ Move(maybe_temp.AsRegister<GpuRegister>(), out_reg);
4880 // /* HeapReference<Object> */ out = *(out + offset)
4881 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4882 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4883 }
4884 } else {
4885 // Plain load with no read barrier.
4886 // /* HeapReference<Object> */ out = *(out + offset)
4887 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4888 __ MaybeUnpoisonHeapReference(out_reg);
4889 }
4890}
4891
4892void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadTwoRegisters(
4893 HInstruction* instruction,
4894 Location out,
4895 Location obj,
4896 uint32_t offset,
4897 Location maybe_temp,
4898 ReadBarrierOption read_barrier_option) {
4899 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4900 GpuRegister obj_reg = obj.AsRegister<GpuRegister>();
4901 if (read_barrier_option == kWithReadBarrier) {
4902 CHECK(kEmitCompilerReadBarrier);
4903 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004904 if (!kBakerReadBarrierThunksEnableForFields) {
4905 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4906 }
Alexey Frunze15958152017-02-09 19:08:30 -08004907 // Load with fast path based Baker's read barrier.
4908 // /* HeapReference<Object> */ out = *(obj + offset)
4909 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4910 out,
4911 obj_reg,
4912 offset,
4913 maybe_temp,
4914 /* needs_null_check */ false);
4915 } else {
4916 // Load with slow path based read barrier.
4917 // /* HeapReference<Object> */ out = *(obj + offset)
4918 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
4919 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4920 }
4921 } else {
4922 // Plain load with no read barrier.
4923 // /* HeapReference<Object> */ out = *(obj + offset)
4924 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
4925 __ MaybeUnpoisonHeapReference(out_reg);
4926 }
4927}
4928
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004929static inline int GetBakerMarkThunkNumber(GpuRegister reg) {
4930 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 20, "Expecting equal");
4931 if (reg >= V0 && reg <= T2) { // 13 consequtive regs.
4932 return reg - V0;
4933 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
4934 return 13 + (reg - S2);
4935 } else if (reg == S8) { // One more.
4936 return 19;
4937 }
4938 LOG(FATAL) << "Unexpected register " << reg;
4939 UNREACHABLE();
4940}
4941
4942static inline int GetBakerMarkFieldArrayThunkDisplacement(GpuRegister reg, bool short_offset) {
4943 int num = GetBakerMarkThunkNumber(reg) +
4944 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
4945 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
4946}
4947
4948static inline int GetBakerMarkGcRootThunkDisplacement(GpuRegister reg) {
4949 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
4950 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
4951}
4952
4953void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(HInstruction* instruction,
4954 Location root,
4955 GpuRegister obj,
4956 uint32_t offset,
4957 ReadBarrierOption read_barrier_option,
4958 Mips64Label* label_low) {
4959 if (label_low != nullptr) {
4960 DCHECK_EQ(offset, 0x5678u);
4961 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08004962 GpuRegister root_reg = root.AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08004963 if (read_barrier_option == kWithReadBarrier) {
4964 DCHECK(kEmitCompilerReadBarrier);
4965 if (kUseBakerReadBarrier) {
4966 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4967 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004968 if (kBakerReadBarrierThunksEnableForGcRoots) {
4969 // Note that we do not actually check the value of `GetIsGcMarking()`
4970 // to decide whether to mark the loaded GC root or not. Instead, we
4971 // load into `temp` (T9) the read barrier mark introspection entrypoint.
4972 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
4973 // vice versa.
4974 //
4975 // We use thunks for the slow path. That thunk checks the reference
4976 // and jumps to the entrypoint if needed.
4977 //
4978 // temp = Thread::Current()->pReadBarrierMarkReg00
4979 // // AKA &art_quick_read_barrier_mark_introspection.
4980 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
4981 // if (temp != nullptr) {
4982 // temp = &gc_root_thunk<root_reg>
4983 // root = temp(root)
4984 // }
Alexey Frunze15958152017-02-09 19:08:30 -08004985
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004986 const int32_t entry_point_offset =
4987 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
4988 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
4989 int16_t offset_low = Low16Bits(offset);
4990 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
4991 // extension in lwu.
4992 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
4993 GpuRegister base = short_offset ? obj : TMP;
4994 // Loading the entrypoint does not require a load acquire since it is only changed when
4995 // threads are suspended or running a checkpoint.
4996 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
4997 if (!short_offset) {
4998 DCHECK(!label_low);
4999 __ Daui(base, obj, offset_high);
5000 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07005001 Mips64Label skip_call;
5002 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005003 if (label_low != nullptr) {
5004 DCHECK(short_offset);
5005 __ Bind(label_low);
5006 }
5007 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5008 __ LoadFromOffset(kLoadUnsignedWord, root_reg, base, offset_low); // Single instruction
5009 // in delay slot.
5010 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005011 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005012 } else {
5013 // Note that we do not actually check the value of `GetIsGcMarking()`
5014 // to decide whether to mark the loaded GC root or not. Instead, we
5015 // load into `temp` (T9) the read barrier mark entry point corresponding
5016 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
5017 // is false, and vice versa.
5018 //
5019 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5020 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5021 // if (temp != null) {
5022 // root = temp(root)
5023 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005024
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005025 if (label_low != nullptr) {
5026 __ Bind(label_low);
5027 }
5028 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5029 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5030 static_assert(
5031 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5032 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5033 "have different sizes.");
5034 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5035 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5036 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08005037
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005038 // Slow path marking the GC root `root`.
5039 Location temp = Location::RegisterLocation(T9);
5040 SlowPathCodeMIPS64* slow_path =
5041 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS64(
5042 instruction,
5043 root,
5044 /*entrypoint*/ temp);
5045 codegen_->AddSlowPath(slow_path);
5046
5047 const int32_t entry_point_offset =
5048 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(root.reg() - 1);
5049 // Loading the entrypoint does not require a load acquire since it is only changed when
5050 // threads are suspended or running a checkpoint.
5051 __ LoadFromOffset(kLoadDoubleword, temp.AsRegister<GpuRegister>(), TR, entry_point_offset);
5052 __ Bnezc(temp.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
5053 __ Bind(slow_path->GetExitLabel());
5054 }
Alexey Frunze15958152017-02-09 19:08:30 -08005055 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005056 if (label_low != nullptr) {
5057 __ Bind(label_low);
5058 }
Alexey Frunze15958152017-02-09 19:08:30 -08005059 // GC root loaded through a slow path for read barriers other
5060 // than Baker's.
5061 // /* GcRoot<mirror::Object>* */ root = obj + offset
5062 __ Daddiu64(root_reg, obj, static_cast<int32_t>(offset));
5063 // /* mirror::Object* */ root = root->Read()
5064 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5065 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005066 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005067 if (label_low != nullptr) {
5068 __ Bind(label_low);
5069 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005070 // Plain GC root load with no read barrier.
5071 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5072 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5073 // Note that GC roots are not affected by heap poisoning, thus we
5074 // do not have to unpoison `root_reg` here.
5075 }
5076}
5077
Alexey Frunze15958152017-02-09 19:08:30 -08005078void CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5079 Location ref,
5080 GpuRegister obj,
5081 uint32_t offset,
5082 Location temp,
5083 bool needs_null_check) {
5084 DCHECK(kEmitCompilerReadBarrier);
5085 DCHECK(kUseBakerReadBarrier);
5086
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005087 if (kBakerReadBarrierThunksEnableForFields) {
5088 // Note that we do not actually check the value of `GetIsGcMarking()`
5089 // to decide whether to mark the loaded reference or not. Instead, we
5090 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5091 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5092 // vice versa.
5093 //
5094 // We use thunks for the slow path. That thunk checks the reference
5095 // and jumps to the entrypoint if needed. If the holder is not gray,
5096 // it issues a load-load memory barrier and returns to the original
5097 // reference load.
5098 //
5099 // temp = Thread::Current()->pReadBarrierMarkReg00
5100 // // AKA &art_quick_read_barrier_mark_introspection.
5101 // if (temp != nullptr) {
5102 // temp = &field_array_thunk<holder_reg>
5103 // temp()
5104 // }
5105 // not_gray_return_address:
5106 // // If the offset is too large to fit into the lw instruction, we
5107 // // use an adjusted base register (TMP) here. This register
5108 // // receives bits 16 ... 31 of the offset before the thunk invocation
5109 // // and the thunk benefits from it.
5110 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
5111 // gray_return_address:
5112
5113 DCHECK(temp.IsInvalid());
5114 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5115 const int32_t entry_point_offset =
5116 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5117 // There may have or may have not been a null check if the field offset is smaller than
5118 // the page size.
5119 // There must've been a null check in case it's actually a load from an array.
5120 // We will, however, perform an explicit null check in the thunk as it's easier to
5121 // do it than not.
5122 if (instruction->IsArrayGet()) {
5123 DCHECK(!needs_null_check);
5124 }
5125 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
5126 // Loading the entrypoint does not require a load acquire since it is only changed when
5127 // threads are suspended or running a checkpoint.
5128 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5129 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07005130 Mips64Label skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005131 if (short_offset) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07005132 __ Beqzc(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005133 __ Nop(); // In forbidden slot.
5134 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005135 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005136 // /* HeapReference<Object> */ ref = *(obj + offset)
5137 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset); // Single instruction.
5138 } else {
5139 int16_t offset_low = Low16Bits(offset);
5140 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lwu.
Alexey Frunze0cab6562017-07-25 15:19:36 -07005141 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005142 __ Daui(TMP, obj, offset_high); // In delay slot.
5143 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005144 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005145 // /* HeapReference<Object> */ ref = *(obj + offset)
5146 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset_low); // Single instruction.
5147 }
5148 if (needs_null_check) {
5149 MaybeRecordImplicitNullCheck(instruction);
5150 }
5151 __ MaybeUnpoisonHeapReference(ref_reg);
5152 return;
5153 }
5154
Alexey Frunze15958152017-02-09 19:08:30 -08005155 // /* HeapReference<Object> */ ref = *(obj + offset)
5156 Location no_index = Location::NoLocation();
5157 ScaleFactor no_scale_factor = TIMES_1;
5158 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5159 ref,
5160 obj,
5161 offset,
5162 no_index,
5163 no_scale_factor,
5164 temp,
5165 needs_null_check);
5166}
5167
5168void CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5169 Location ref,
5170 GpuRegister obj,
5171 uint32_t data_offset,
5172 Location index,
5173 Location temp,
5174 bool needs_null_check) {
5175 DCHECK(kEmitCompilerReadBarrier);
5176 DCHECK(kUseBakerReadBarrier);
5177
5178 static_assert(
5179 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5180 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005181 ScaleFactor scale_factor = TIMES_4;
5182
5183 if (kBakerReadBarrierThunksEnableForArrays) {
5184 // Note that we do not actually check the value of `GetIsGcMarking()`
5185 // to decide whether to mark the loaded reference or not. Instead, we
5186 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5187 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5188 // vice versa.
5189 //
5190 // We use thunks for the slow path. That thunk checks the reference
5191 // and jumps to the entrypoint if needed. If the holder is not gray,
5192 // it issues a load-load memory barrier and returns to the original
5193 // reference load.
5194 //
5195 // temp = Thread::Current()->pReadBarrierMarkReg00
5196 // // AKA &art_quick_read_barrier_mark_introspection.
5197 // if (temp != nullptr) {
5198 // temp = &field_array_thunk<holder_reg>
5199 // temp()
5200 // }
5201 // not_gray_return_address:
5202 // // The element address is pre-calculated in the TMP register before the
5203 // // thunk invocation and the thunk benefits from it.
5204 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
5205 // gray_return_address:
5206
5207 DCHECK(temp.IsInvalid());
5208 DCHECK(index.IsValid());
5209 const int32_t entry_point_offset =
5210 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5211 // We will not do the explicit null check in the thunk as some form of a null check
5212 // must've been done earlier.
5213 DCHECK(!needs_null_check);
5214 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
5215 // Loading the entrypoint does not require a load acquire since it is only changed when
5216 // threads are suspended or running a checkpoint.
5217 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005218 Mips64Label skip_call;
5219 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005220 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5221 GpuRegister index_reg = index.AsRegister<GpuRegister>();
5222 __ Dlsa(TMP, index_reg, obj, scale_factor); // In delay slot.
5223 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005224 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005225 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
5226 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
5227 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, data_offset); // Single instruction.
5228 __ MaybeUnpoisonHeapReference(ref_reg);
5229 return;
5230 }
5231
Alexey Frunze15958152017-02-09 19:08:30 -08005232 // /* HeapReference<Object> */ ref =
5233 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08005234 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5235 ref,
5236 obj,
5237 data_offset,
5238 index,
5239 scale_factor,
5240 temp,
5241 needs_null_check);
5242}
5243
5244void CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5245 Location ref,
5246 GpuRegister obj,
5247 uint32_t offset,
5248 Location index,
5249 ScaleFactor scale_factor,
5250 Location temp,
5251 bool needs_null_check,
5252 bool always_update_field) {
5253 DCHECK(kEmitCompilerReadBarrier);
5254 DCHECK(kUseBakerReadBarrier);
5255
5256 // In slow path based read barriers, the read barrier call is
5257 // inserted after the original load. However, in fast path based
5258 // Baker's read barriers, we need to perform the load of
5259 // mirror::Object::monitor_ *before* the original reference load.
5260 // This load-load ordering is required by the read barrier.
5261 // The fast path/slow path (for Baker's algorithm) should look like:
5262 //
5263 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5264 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5265 // HeapReference<Object> ref = *src; // Original reference load.
5266 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5267 // if (is_gray) {
5268 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5269 // }
5270 //
5271 // Note: the original implementation in ReadBarrier::Barrier is
5272 // slightly more complex as it performs additional checks that we do
5273 // not do here for performance reasons.
5274
5275 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5276 GpuRegister temp_reg = temp.AsRegister<GpuRegister>();
5277 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5278
5279 // /* int32_t */ monitor = obj->monitor_
5280 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
5281 if (needs_null_check) {
5282 MaybeRecordImplicitNullCheck(instruction);
5283 }
5284 // /* LockWord */ lock_word = LockWord(monitor)
5285 static_assert(sizeof(LockWord) == sizeof(int32_t),
5286 "art::LockWord and int32_t have different sizes.");
5287
5288 __ Sync(0); // Barrier to prevent load-load reordering.
5289
5290 // The actual reference load.
5291 if (index.IsValid()) {
5292 // Load types involving an "index": ArrayGet,
5293 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5294 // intrinsics.
5295 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5296 if (index.IsConstant()) {
5297 size_t computed_offset =
5298 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
5299 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, computed_offset);
5300 } else {
5301 GpuRegister index_reg = index.AsRegister<GpuRegister>();
Chris Larsencd0295d2017-03-31 15:26:54 -07005302 if (scale_factor == TIMES_1) {
5303 __ Daddu(TMP, index_reg, obj);
5304 } else {
5305 __ Dlsa(TMP, index_reg, obj, scale_factor);
5306 }
Alexey Frunze15958152017-02-09 19:08:30 -08005307 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset);
5308 }
5309 } else {
5310 // /* HeapReference<Object> */ ref = *(obj + offset)
5311 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset);
5312 }
5313
5314 // Object* ref = ref_addr->AsMirrorPtr()
5315 __ MaybeUnpoisonHeapReference(ref_reg);
5316
5317 // Slow path marking the object `ref` when it is gray.
5318 SlowPathCodeMIPS64* slow_path;
5319 if (always_update_field) {
5320 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 only supports address
5321 // of the form `obj + field_offset`, where `obj` is a register and
5322 // `field_offset` is a register. Thus `offset` and `scale_factor`
5323 // above are expected to be null in this code path.
5324 DCHECK_EQ(offset, 0u);
5325 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
5326 slow_path = new (GetGraph()->GetArena())
5327 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(instruction,
5328 ref,
5329 obj,
5330 /* field_offset */ index,
5331 temp_reg);
5332 } else {
5333 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS64(instruction, ref);
5334 }
5335 AddSlowPath(slow_path);
5336
5337 // if (rb_state == ReadBarrier::GrayState())
5338 // ref = ReadBarrier::Mark(ref);
5339 // Given the numeric representation, it's enough to check the low bit of the
5340 // rb_state. We do that by shifting the bit into the sign bit (31) and
5341 // performing a branch on less than zero.
5342 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5343 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
5344 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
5345 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
5346 __ Bltzc(temp_reg, slow_path->GetEntryLabel());
5347 __ Bind(slow_path->GetExitLabel());
5348}
5349
5350void CodeGeneratorMIPS64::GenerateReadBarrierSlow(HInstruction* instruction,
5351 Location out,
5352 Location ref,
5353 Location obj,
5354 uint32_t offset,
5355 Location index) {
5356 DCHECK(kEmitCompilerReadBarrier);
5357
5358 // Insert a slow path based read barrier *after* the reference load.
5359 //
5360 // If heap poisoning is enabled, the unpoisoning of the loaded
5361 // reference will be carried out by the runtime within the slow
5362 // path.
5363 //
5364 // Note that `ref` currently does not get unpoisoned (when heap
5365 // poisoning is enabled), which is alright as the `ref` argument is
5366 // not used by the artReadBarrierSlow entry point.
5367 //
5368 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5369 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena())
5370 ReadBarrierForHeapReferenceSlowPathMIPS64(instruction, out, ref, obj, offset, index);
5371 AddSlowPath(slow_path);
5372
5373 __ Bc(slow_path->GetEntryLabel());
5374 __ Bind(slow_path->GetExitLabel());
5375}
5376
5377void CodeGeneratorMIPS64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5378 Location out,
5379 Location ref,
5380 Location obj,
5381 uint32_t offset,
5382 Location index) {
5383 if (kEmitCompilerReadBarrier) {
5384 // Baker's read barriers shall be handled by the fast path
5385 // (CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier).
5386 DCHECK(!kUseBakerReadBarrier);
5387 // If heap poisoning is enabled, unpoisoning will be taken care of
5388 // by the runtime within the slow path.
5389 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
5390 } else if (kPoisonHeapReferences) {
5391 __ UnpoisonHeapReference(out.AsRegister<GpuRegister>());
5392 }
5393}
5394
5395void CodeGeneratorMIPS64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5396 Location out,
5397 Location root) {
5398 DCHECK(kEmitCompilerReadBarrier);
5399
5400 // Insert a slow path based read barrier *after* the GC root load.
5401 //
5402 // Note that GC roots are not affected by heap poisoning, so we do
5403 // not need to do anything special for this here.
5404 SlowPathCodeMIPS64* slow_path =
5405 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS64(instruction, out, root);
5406 AddSlowPath(slow_path);
5407
5408 __ Bc(slow_path->GetEntryLabel());
5409 __ Bind(slow_path->GetExitLabel());
5410}
5411
Alexey Frunze4dda3372015-06-01 18:31:49 -07005412void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005413 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5414 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07005415 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005416 switch (type_check_kind) {
5417 case TypeCheckKind::kExactCheck:
5418 case TypeCheckKind::kAbstractClassCheck:
5419 case TypeCheckKind::kClassHierarchyCheck:
5420 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08005421 call_kind =
5422 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005423 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005424 break;
5425 case TypeCheckKind::kArrayCheck:
5426 case TypeCheckKind::kUnresolvedCheck:
5427 case TypeCheckKind::kInterfaceCheck:
5428 call_kind = LocationSummary::kCallOnSlowPath;
5429 break;
5430 }
5431
Alexey Frunze4dda3372015-06-01 18:31:49 -07005432 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005433 if (baker_read_barrier_slow_path) {
5434 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5435 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005436 locations->SetInAt(0, Location::RequiresRegister());
5437 locations->SetInAt(1, Location::RequiresRegister());
5438 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005439 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005440 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08005441 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005442}
5443
5444void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005445 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005446 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08005447 Location obj_loc = locations->InAt(0);
5448 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005449 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08005450 Location out_loc = locations->Out();
5451 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5452 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
5453 DCHECK_LE(num_temps, 1u);
5454 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005455 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5456 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5457 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5458 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005459 Mips64Label done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005460 SlowPathCodeMIPS64* slow_path = nullptr;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005461
5462 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005463 // Avoid this check if we know `obj` is not null.
5464 if (instruction->MustDoNullCheck()) {
5465 __ Move(out, ZERO);
5466 __ Beqzc(obj, &done);
5467 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005468
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005469 switch (type_check_kind) {
5470 case TypeCheckKind::kExactCheck: {
5471 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005472 GenerateReferenceLoadTwoRegisters(instruction,
5473 out_loc,
5474 obj_loc,
5475 class_offset,
5476 maybe_temp_loc,
5477 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005478 // Classes must be equal for the instanceof to succeed.
5479 __ Xor(out, out, cls);
5480 __ Sltiu(out, out, 1);
5481 break;
5482 }
5483
5484 case TypeCheckKind::kAbstractClassCheck: {
5485 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005486 GenerateReferenceLoadTwoRegisters(instruction,
5487 out_loc,
5488 obj_loc,
5489 class_offset,
5490 maybe_temp_loc,
5491 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005492 // If the class is abstract, we eagerly fetch the super class of the
5493 // object to avoid doing a comparison we know will fail.
5494 Mips64Label loop;
5495 __ Bind(&loop);
5496 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005497 GenerateReferenceLoadOneRegister(instruction,
5498 out_loc,
5499 super_offset,
5500 maybe_temp_loc,
5501 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005502 // If `out` is null, we use it for the result, and jump to `done`.
5503 __ Beqzc(out, &done);
5504 __ Bnec(out, cls, &loop);
5505 __ LoadConst32(out, 1);
5506 break;
5507 }
5508
5509 case TypeCheckKind::kClassHierarchyCheck: {
5510 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005511 GenerateReferenceLoadTwoRegisters(instruction,
5512 out_loc,
5513 obj_loc,
5514 class_offset,
5515 maybe_temp_loc,
5516 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005517 // Walk over the class hierarchy to find a match.
5518 Mips64Label loop, success;
5519 __ Bind(&loop);
5520 __ Beqc(out, cls, &success);
5521 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005522 GenerateReferenceLoadOneRegister(instruction,
5523 out_loc,
5524 super_offset,
5525 maybe_temp_loc,
5526 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005527 __ Bnezc(out, &loop);
5528 // If `out` is null, we use it for the result, and jump to `done`.
5529 __ Bc(&done);
5530 __ Bind(&success);
5531 __ LoadConst32(out, 1);
5532 break;
5533 }
5534
5535 case TypeCheckKind::kArrayObjectCheck: {
5536 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005537 GenerateReferenceLoadTwoRegisters(instruction,
5538 out_loc,
5539 obj_loc,
5540 class_offset,
5541 maybe_temp_loc,
5542 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005543 // Do an exact check.
5544 Mips64Label success;
5545 __ Beqc(out, cls, &success);
5546 // Otherwise, we need to check that the object's class is a non-primitive array.
5547 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08005548 GenerateReferenceLoadOneRegister(instruction,
5549 out_loc,
5550 component_offset,
5551 maybe_temp_loc,
5552 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005553 // If `out` is null, we use it for the result, and jump to `done`.
5554 __ Beqzc(out, &done);
5555 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
5556 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
5557 __ Sltiu(out, out, 1);
5558 __ Bc(&done);
5559 __ Bind(&success);
5560 __ LoadConst32(out, 1);
5561 break;
5562 }
5563
5564 case TypeCheckKind::kArrayCheck: {
5565 // No read barrier since the slow path will retry upon failure.
5566 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005567 GenerateReferenceLoadTwoRegisters(instruction,
5568 out_loc,
5569 obj_loc,
5570 class_offset,
5571 maybe_temp_loc,
5572 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005573 DCHECK(locations->OnlyCallsOnSlowPath());
5574 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
5575 /* is_fatal */ false);
5576 codegen_->AddSlowPath(slow_path);
5577 __ Bnec(out, cls, slow_path->GetEntryLabel());
5578 __ LoadConst32(out, 1);
5579 break;
5580 }
5581
5582 case TypeCheckKind::kUnresolvedCheck:
5583 case TypeCheckKind::kInterfaceCheck: {
5584 // Note that we indeed only call on slow path, but we always go
5585 // into the slow path for the unresolved and interface check
5586 // cases.
5587 //
5588 // We cannot directly call the InstanceofNonTrivial runtime
5589 // entry point without resorting to a type checking slow path
5590 // here (i.e. by calling InvokeRuntime directly), as it would
5591 // require to assign fixed registers for the inputs of this
5592 // HInstanceOf instruction (following the runtime calling
5593 // convention), which might be cluttered by the potential first
5594 // read barrier emission at the beginning of this method.
5595 //
5596 // TODO: Introduce a new runtime entry point taking the object
5597 // to test (instead of its class) as argument, and let it deal
5598 // with the read barrier issues. This will let us refactor this
5599 // case of the `switch` code as it was previously (with a direct
5600 // call to the runtime not using a type checking slow path).
5601 // This should also be beneficial for the other cases above.
5602 DCHECK(locations->OnlyCallsOnSlowPath());
5603 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
5604 /* is_fatal */ false);
5605 codegen_->AddSlowPath(slow_path);
5606 __ Bc(slow_path->GetEntryLabel());
5607 break;
5608 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005609 }
5610
5611 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005612
5613 if (slow_path != nullptr) {
5614 __ Bind(slow_path->GetExitLabel());
5615 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005616}
5617
5618void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
5619 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
5620 locations->SetOut(Location::ConstantLocation(constant));
5621}
5622
5623void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
5624 // Will be generated at use site.
5625}
5626
5627void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
5628 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
5629 locations->SetOut(Location::ConstantLocation(constant));
5630}
5631
5632void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
5633 // Will be generated at use site.
5634}
5635
Calin Juravle175dc732015-08-25 15:42:32 +01005636void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5637 // The trampoline uses the same calling convention as dex calling conventions,
5638 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
5639 // the method_idx.
5640 HandleInvoke(invoke);
5641}
5642
5643void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5644 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
5645}
5646
Alexey Frunze4dda3372015-06-01 18:31:49 -07005647void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
5648 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
5649 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
5650}
5651
5652void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5653 HandleInvoke(invoke);
5654 // The register T0 is required to be used for the hidden argument in
5655 // art_quick_imt_conflict_trampoline, so add the hidden argument.
5656 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
5657}
5658
5659void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5660 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
5661 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005662 Location receiver = invoke->GetLocations()->InAt(0);
5663 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005664 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005665
5666 // Set the hidden argument.
5667 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
5668 invoke->GetDexMethodIndex());
5669
5670 // temp = object->GetClass();
5671 if (receiver.IsStackSlot()) {
5672 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
5673 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
5674 } else {
5675 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
5676 }
5677 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005678 // Instead of simply (possibly) unpoisoning `temp` here, we should
5679 // emit a read barrier for the previous class reference load.
5680 // However this is not required in practice, as this is an
5681 // intermediate/temporary reference and because the current
5682 // concurrent copying collector keeps the from-space memory
5683 // intact/accessible until the end of the marking phase (the
5684 // concurrent copying collector may not in the future).
5685 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005686 __ LoadFromOffset(kLoadDoubleword, temp, temp,
5687 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
5688 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005689 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005690 // temp = temp->GetImtEntryAt(method_offset);
5691 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5692 // T9 = temp->GetEntryPoint();
5693 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5694 // T9();
5695 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005696 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005697 DCHECK(!codegen_->IsLeafMethod());
5698 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
5699}
5700
5701void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07005702 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5703 if (intrinsic.TryDispatch(invoke)) {
5704 return;
5705 }
5706
Alexey Frunze4dda3372015-06-01 18:31:49 -07005707 HandleInvoke(invoke);
5708}
5709
5710void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005711 // Explicit clinit checks triggered by static invokes must have been pruned by
5712 // art::PrepareForRegisterAllocation.
5713 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005714
Chris Larsen3039e382015-08-26 07:54:08 -07005715 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5716 if (intrinsic.TryDispatch(invoke)) {
5717 return;
5718 }
5719
Alexey Frunze4dda3372015-06-01 18:31:49 -07005720 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005721}
5722
Orion Hodsonac141392017-01-13 11:53:47 +00005723void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5724 HandleInvoke(invoke);
5725}
5726
5727void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5728 codegen_->GenerateInvokePolymorphicCall(invoke);
5729}
5730
Chris Larsen3039e382015-08-26 07:54:08 -07005731static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005732 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07005733 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
5734 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005735 return true;
5736 }
5737 return false;
5738}
5739
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005740HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08005741 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005742 bool fallback_load = false;
5743 switch (desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005744 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005745 case HLoadString::LoadKind::kBootImageInternTable:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005746 case HLoadString::LoadKind::kBssEntry:
5747 DCHECK(!Runtime::Current()->UseJitCompilation());
5748 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005749 case HLoadString::LoadKind::kJitTableAddress:
5750 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005751 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005752 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005753 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko764d4542017-05-16 10:31:41 +01005754 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005755 }
5756 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005757 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005758 }
5759 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005760}
5761
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005762HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
5763 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005764 bool fallback_load = false;
5765 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005766 case HLoadClass::LoadKind::kInvalid:
5767 LOG(FATAL) << "UNREACHABLE";
5768 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005769 case HLoadClass::LoadKind::kReferrersClass:
5770 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005771 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005772 case HLoadClass::LoadKind::kBootImageClassTable:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005773 case HLoadClass::LoadKind::kBssEntry:
5774 DCHECK(!Runtime::Current()->UseJitCompilation());
5775 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005776 case HLoadClass::LoadKind::kJitTableAddress:
5777 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005778 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005779 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005780 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005781 break;
5782 }
5783 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005784 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005785 }
5786 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005787}
5788
Vladimir Markodc151b22015-10-15 18:02:30 +01005789HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
5790 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01005791 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08005792 // On MIPS64 we support all dispatch types.
5793 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01005794}
5795
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005796void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(
5797 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005798 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00005799 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08005800 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
5801 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
5802
Alexey Frunze19f6c692016-11-30 19:19:55 -08005803 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005804 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00005805 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005806 uint32_t offset =
5807 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00005808 __ LoadFromOffset(kLoadDoubleword,
5809 temp.AsRegister<GpuRegister>(),
5810 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005811 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00005812 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005813 }
Vladimir Marko58155012015-08-19 12:49:41 +00005814 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00005815 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00005816 break;
Vladimir Marko65979462017-05-19 17:25:12 +01005817 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
5818 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005819 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko65979462017-05-19 17:25:12 +01005820 NewPcRelativeMethodPatch(invoke->GetTargetMethod());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005821 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
5822 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
5823 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Vladimir Marko65979462017-05-19 17:25:12 +01005824 __ Daddiu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5825 break;
5826 }
Vladimir Marko58155012015-08-19 12:49:41 +00005827 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005828 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
5829 kLoadDoubleword,
5830 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00005831 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005832 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005833 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005834 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005835 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
5836 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
5837 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunze19f6c692016-11-30 19:19:55 -08005838 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5839 break;
5840 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005841 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
5842 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
5843 return; // No code pointer retrieval; the runtime performs the call directly.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005844 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005845 }
5846
Alexey Frunze19f6c692016-11-30 19:19:55 -08005847 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00005848 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005849 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00005850 break;
Vladimir Marko58155012015-08-19 12:49:41 +00005851 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
5852 // T9 = callee_method->entry_point_from_quick_compiled_code_;
5853 __ LoadFromOffset(kLoadDoubleword,
5854 T9,
5855 callee_method.AsRegister<GpuRegister>(),
5856 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07005857 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00005858 // T9()
5859 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005860 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00005861 break;
5862 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005863 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
5864
Alexey Frunze4dda3372015-06-01 18:31:49 -07005865 DCHECK(!IsLeafMethod());
5866}
5867
5868void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005869 // Explicit clinit checks triggered by static invokes must have been pruned by
5870 // art::PrepareForRegisterAllocation.
5871 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005872
5873 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5874 return;
5875 }
5876
5877 LocationSummary* locations = invoke->GetLocations();
5878 codegen_->GenerateStaticOrDirectCall(invoke,
5879 locations->HasTemps()
5880 ? locations->GetTemp(0)
5881 : Location::NoLocation());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005882}
5883
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005884void CodeGeneratorMIPS64::GenerateVirtualCall(
5885 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005886 // Use the calling convention instead of the location of the receiver, as
5887 // intrinsics may have put the receiver in a different register. In the intrinsics
5888 // slow path, the arguments have been moved to the right place, so here we are
5889 // guaranteed that the receiver is the first register of the calling convention.
5890 InvokeDexCallingConvention calling_convention;
5891 GpuRegister receiver = calling_convention.GetRegisterAt(0);
5892
Alexey Frunze53afca12015-11-05 16:34:23 -08005893 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005894 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5895 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
5896 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005897 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005898
5899 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005900 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08005901 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005902 // Instead of simply (possibly) unpoisoning `temp` here, we should
5903 // emit a read barrier for the previous class reference load.
5904 // However this is not required in practice, as this is an
5905 // intermediate/temporary reference and because the current
5906 // concurrent copying collector keeps the from-space memory
5907 // intact/accessible until the end of the marking phase (the
5908 // concurrent copying collector may not in the future).
5909 __ MaybeUnpoisonHeapReference(temp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005910 // temp = temp->GetMethodAt(method_offset);
5911 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5912 // T9 = temp->GetEntryPoint();
5913 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5914 // T9();
5915 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005916 __ Nop();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005917 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Alexey Frunze53afca12015-11-05 16:34:23 -08005918}
5919
5920void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
5921 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5922 return;
5923 }
5924
5925 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005926 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005927}
5928
5929void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005930 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005931 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005932 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005933 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
5934 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005935 return;
5936 }
Vladimir Marko41559982017-01-06 14:04:23 +00005937 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005938
Alexey Frunze15958152017-02-09 19:08:30 -08005939 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5940 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunzef63f5692016-12-13 17:43:11 -08005941 ? LocationSummary::kCallOnSlowPath
5942 : LocationSummary::kNoCall;
5943 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005944 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
5945 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5946 }
Vladimir Marko41559982017-01-06 14:04:23 +00005947 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005948 locations->SetInAt(0, Location::RequiresRegister());
5949 }
5950 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005951 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5952 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5953 // Rely on the type resolution or initialization and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005954 // Request a temp to hold the BSS entry location for the slow path.
5955 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005956 RegisterSet caller_saves = RegisterSet::Empty();
5957 InvokeRuntimeCallingConvention calling_convention;
5958 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5959 locations->SetCustomSlowPathCallerSaves(caller_saves);
5960 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005961 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07005962 }
5963 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005964}
5965
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005966// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5967// move.
5968void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005969 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005970 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005971 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01005972 return;
5973 }
Vladimir Marko41559982017-01-06 14:04:23 +00005974 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005975
Vladimir Marko41559982017-01-06 14:04:23 +00005976 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005977 Location out_loc = locations->Out();
5978 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5979 GpuRegister current_method_reg = ZERO;
5980 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005981 load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005982 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
5983 }
5984
Alexey Frunze15958152017-02-09 19:08:30 -08005985 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5986 ? kWithoutReadBarrier
5987 : kCompilerReadBarrierOption;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005988 bool generate_null_check = false;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005989 CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high = nullptr;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005990 switch (load_kind) {
5991 case HLoadClass::LoadKind::kReferrersClass:
5992 DCHECK(!cls->CanCallRuntime());
5993 DCHECK(!cls->MustGenerateClinitCheck());
5994 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5995 GenerateGcRootFieldLoad(cls,
5996 out_loc,
5997 current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08005998 ArtMethod::DeclaringClassOffset().Int32Value(),
5999 read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006000 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006001 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006002 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08006003 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006004 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Alexey Frunzef63f5692016-12-13 17:43:11 -08006005 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006006 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6007 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
6008 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006009 __ Daddiu(out, AT, /* placeholder */ 0x5678);
6010 break;
6011 }
6012 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08006013 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006014 uint32_t address = dchecked_integral_cast<uint32_t>(
6015 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
6016 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006017 __ LoadLiteral(out,
6018 kLoadUnsignedWord,
6019 codegen_->DeduplicateBootImageAddressLiteral(address));
6020 break;
6021 }
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006022 case HLoadClass::LoadKind::kBootImageClassTable: {
6023 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6024 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6025 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
6026 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6027 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
6028 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6029 __ Lwu(out, AT, /* placeholder */ 0x5678);
6030 // Extract the reference from the slot data, i.e. clear the hash bits.
6031 int32_t masked_hash = ClassTable::TableSlot::MaskHash(
6032 ComputeModifiedUtf8Hash(cls->GetDexFile().StringByTypeIdx(cls->GetTypeIndex())));
6033 if (masked_hash != 0) {
6034 __ Daddiu(out, out, -masked_hash);
6035 }
6036 break;
6037 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006038 case HLoadClass::LoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006039 bss_info_high = codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
6040 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6041 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
6042 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
6043 GpuRegister temp = non_baker_read_barrier
6044 ? out
6045 : locations->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006046 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high, temp);
6047 GenerateGcRootFieldLoad(cls,
6048 out_loc,
6049 temp,
6050 /* placeholder */ 0x5678,
6051 read_barrier_option,
6052 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006053 generate_null_check = true;
6054 break;
6055 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006056 case HLoadClass::LoadKind::kJitTableAddress:
6057 __ LoadLiteral(out,
6058 kLoadUnsignedWord,
6059 codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
6060 cls->GetTypeIndex(),
6061 cls->GetClass()));
Alexey Frunze15958152017-02-09 19:08:30 -08006062 GenerateGcRootFieldLoad(cls, out_loc, out, 0, read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006063 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006064 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006065 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006066 LOG(FATAL) << "UNREACHABLE";
6067 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006068 }
6069
6070 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6071 DCHECK(cls->CanCallRuntime());
6072 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006073 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck(), bss_info_high);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006074 codegen_->AddSlowPath(slow_path);
6075 if (generate_null_check) {
6076 __ Beqzc(out, slow_path->GetEntryLabel());
6077 }
6078 if (cls->MustGenerateClinitCheck()) {
6079 GenerateClassInitializationCheck(slow_path, out);
6080 } else {
6081 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006082 }
6083 }
6084}
6085
David Brazdilcb1c0552015-08-04 16:22:25 +01006086static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006087 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01006088}
6089
Alexey Frunze4dda3372015-06-01 18:31:49 -07006090void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
6091 LocationSummary* locations =
6092 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
6093 locations->SetOut(Location::RequiresRegister());
6094}
6095
6096void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
6097 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01006098 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
6099}
6100
6101void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
6102 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
6103}
6104
6105void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6106 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006107}
6108
Alexey Frunze4dda3372015-06-01 18:31:49 -07006109void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006110 HLoadString::LoadKind load_kind = load->GetLoadKind();
6111 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00006112 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006113 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006114 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006115 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzef63f5692016-12-13 17:43:11 -08006116 } else {
6117 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006118 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6119 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6120 // Rely on the pResolveString and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006121 // Request a temp to hold the BSS entry location for the slow path.
6122 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006123 RegisterSet caller_saves = RegisterSet::Empty();
6124 InvokeRuntimeCallingConvention calling_convention;
6125 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6126 locations->SetCustomSlowPathCallerSaves(caller_saves);
6127 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006128 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006129 }
6130 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08006131 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006132}
6133
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006134// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6135// move.
6136void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006137 HLoadString::LoadKind load_kind = load->GetLoadKind();
6138 LocationSummary* locations = load->GetLocations();
6139 Location out_loc = locations->Out();
6140 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6141
6142 switch (load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006143 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
6144 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006145 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006146 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006147 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6148 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
6149 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006150 __ Daddiu(out, AT, /* placeholder */ 0x5678);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006151 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006152 }
6153 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006154 uint32_t address = dchecked_integral_cast<uint32_t>(
6155 reinterpret_cast<uintptr_t>(load->GetString().Get()));
6156 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006157 __ LoadLiteral(out,
6158 kLoadUnsignedWord,
6159 codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006160 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006161 }
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006162 case HLoadString::LoadKind::kBootImageInternTable: {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006163 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006164 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006165 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006166 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6167 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006168 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6169 __ Lwu(out, AT, /* placeholder */ 0x5678);
6170 return;
6171 }
6172 case HLoadString::LoadKind::kBssEntry: {
6173 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6174 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6175 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
6176 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6177 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006178 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
6179 GpuRegister temp = non_baker_read_barrier
6180 ? out
6181 : locations->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006182 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, temp);
Alexey Frunze15958152017-02-09 19:08:30 -08006183 GenerateGcRootFieldLoad(load,
6184 out_loc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006185 temp,
Alexey Frunze15958152017-02-09 19:08:30 -08006186 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006187 kCompilerReadBarrierOption,
6188 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006189 SlowPathCodeMIPS64* slow_path =
6190 new (GetGraph()->GetArena()) LoadStringSlowPathMIPS64(load, info_high);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006191 codegen_->AddSlowPath(slow_path);
6192 __ Beqzc(out, slow_path->GetEntryLabel());
6193 __ Bind(slow_path->GetExitLabel());
6194 return;
6195 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006196 case HLoadString::LoadKind::kJitTableAddress:
6197 __ LoadLiteral(out,
6198 kLoadUnsignedWord,
6199 codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
6200 load->GetStringIndex(),
6201 load->GetString()));
Alexey Frunze15958152017-02-09 19:08:30 -08006202 GenerateGcRootFieldLoad(load, out_loc, out, 0, kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08006203 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006204 default:
6205 break;
6206 }
6207
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006208 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006209 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006210 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006211 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006212 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
6213 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6214 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006215}
6216
Alexey Frunze4dda3372015-06-01 18:31:49 -07006217void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
6218 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
6219 locations->SetOut(Location::ConstantLocation(constant));
6220}
6221
6222void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
6223 // Will be generated at use site.
6224}
6225
6226void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
6227 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006228 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006229 InvokeRuntimeCallingConvention calling_convention;
6230 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6231}
6232
6233void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006234 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07006235 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01006236 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006237 if (instruction->IsEnter()) {
6238 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6239 } else {
6240 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6241 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006242}
6243
6244void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
6245 LocationSummary* locations =
6246 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
6247 switch (mul->GetResultType()) {
6248 case Primitive::kPrimInt:
6249 case Primitive::kPrimLong:
6250 locations->SetInAt(0, Location::RequiresRegister());
6251 locations->SetInAt(1, Location::RequiresRegister());
6252 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6253 break;
6254
6255 case Primitive::kPrimFloat:
6256 case Primitive::kPrimDouble:
6257 locations->SetInAt(0, Location::RequiresFpuRegister());
6258 locations->SetInAt(1, Location::RequiresFpuRegister());
6259 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6260 break;
6261
6262 default:
6263 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
6264 }
6265}
6266
6267void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
6268 Primitive::Type type = instruction->GetType();
6269 LocationSummary* locations = instruction->GetLocations();
6270
6271 switch (type) {
6272 case Primitive::kPrimInt:
6273 case Primitive::kPrimLong: {
6274 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6275 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6276 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
6277 if (type == Primitive::kPrimInt)
6278 __ MulR6(dst, lhs, rhs);
6279 else
6280 __ Dmul(dst, lhs, rhs);
6281 break;
6282 }
6283 case Primitive::kPrimFloat:
6284 case Primitive::kPrimDouble: {
6285 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6286 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
6287 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
6288 if (type == Primitive::kPrimFloat)
6289 __ MulS(dst, lhs, rhs);
6290 else
6291 __ MulD(dst, lhs, rhs);
6292 break;
6293 }
6294 default:
6295 LOG(FATAL) << "Unexpected mul type " << type;
6296 }
6297}
6298
6299void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
6300 LocationSummary* locations =
6301 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
6302 switch (neg->GetResultType()) {
6303 case Primitive::kPrimInt:
6304 case Primitive::kPrimLong:
6305 locations->SetInAt(0, Location::RequiresRegister());
6306 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6307 break;
6308
6309 case Primitive::kPrimFloat:
6310 case Primitive::kPrimDouble:
6311 locations->SetInAt(0, Location::RequiresFpuRegister());
6312 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6313 break;
6314
6315 default:
6316 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
6317 }
6318}
6319
6320void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
6321 Primitive::Type type = instruction->GetType();
6322 LocationSummary* locations = instruction->GetLocations();
6323
6324 switch (type) {
6325 case Primitive::kPrimInt:
6326 case Primitive::kPrimLong: {
6327 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6328 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6329 if (type == Primitive::kPrimInt)
6330 __ Subu(dst, ZERO, src);
6331 else
6332 __ Dsubu(dst, ZERO, src);
6333 break;
6334 }
6335 case Primitive::kPrimFloat:
6336 case Primitive::kPrimDouble: {
6337 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6338 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
6339 if (type == Primitive::kPrimFloat)
6340 __ NegS(dst, src);
6341 else
6342 __ NegD(dst, src);
6343 break;
6344 }
6345 default:
6346 LOG(FATAL) << "Unexpected neg type " << type;
6347 }
6348}
6349
6350void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
6351 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006352 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006353 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006354 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006355 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6356 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006357}
6358
6359void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006360 // Note: if heap poisoning is enabled, the entry point takes care
6361 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02006362 QuickEntrypointEnum entrypoint =
6363 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
6364 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006365 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02006366 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006367}
6368
6369void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
6370 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006371 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006372 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00006373 if (instruction->IsStringAlloc()) {
6374 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
6375 } else {
6376 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00006377 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006378 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
6379}
6380
6381void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006382 // Note: if heap poisoning is enabled, the entry point takes care
6383 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00006384 if (instruction->IsStringAlloc()) {
6385 // String is allocated through StringFactory. Call NewEmptyString entry point.
6386 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02006387 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07006388 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00006389 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
6390 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
6391 __ Jalr(T9);
6392 __ Nop();
6393 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6394 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01006395 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00006396 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00006397 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006398}
6399
6400void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
6401 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
6402 locations->SetInAt(0, Location::RequiresRegister());
6403 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6404}
6405
6406void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
6407 Primitive::Type type = instruction->GetType();
6408 LocationSummary* locations = instruction->GetLocations();
6409
6410 switch (type) {
6411 case Primitive::kPrimInt:
6412 case Primitive::kPrimLong: {
6413 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6414 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6415 __ Nor(dst, src, ZERO);
6416 break;
6417 }
6418
6419 default:
6420 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
6421 }
6422}
6423
6424void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6425 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
6426 locations->SetInAt(0, Location::RequiresRegister());
6427 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6428}
6429
6430void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6431 LocationSummary* locations = instruction->GetLocations();
6432 __ Xori(locations->Out().AsRegister<GpuRegister>(),
6433 locations->InAt(0).AsRegister<GpuRegister>(),
6434 1);
6435}
6436
6437void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006438 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
6439 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006440}
6441
Calin Juravle2ae48182016-03-16 14:05:09 +00006442void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
6443 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006444 return;
6445 }
6446 Location obj = instruction->GetLocations()->InAt(0);
6447
6448 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00006449 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006450}
6451
Calin Juravle2ae48182016-03-16 14:05:09 +00006452void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006453 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00006454 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006455
6456 Location obj = instruction->GetLocations()->InAt(0);
6457
6458 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
6459}
6460
6461void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00006462 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006463}
6464
6465void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
6466 HandleBinaryOp(instruction);
6467}
6468
6469void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
6470 HandleBinaryOp(instruction);
6471}
6472
6473void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
6474 LOG(FATAL) << "Unreachable";
6475}
6476
6477void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
6478 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6479}
6480
6481void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
6482 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
6483 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
6484 if (location.IsStackSlot()) {
6485 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6486 } else if (location.IsDoubleStackSlot()) {
6487 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6488 }
6489 locations->SetOut(location);
6490}
6491
6492void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
6493 ATTRIBUTE_UNUSED) {
6494 // Nothing to do, the parameter is already at its location.
6495}
6496
6497void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
6498 LocationSummary* locations =
6499 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6500 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
6501}
6502
6503void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
6504 ATTRIBUTE_UNUSED) {
6505 // Nothing to do, the method is already at its location.
6506}
6507
6508void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
6509 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01006510 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006511 locations->SetInAt(i, Location::Any());
6512 }
6513 locations->SetOut(Location::Any());
6514}
6515
6516void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
6517 LOG(FATAL) << "Unreachable";
6518}
6519
6520void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
6521 Primitive::Type type = rem->GetResultType();
6522 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006523 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
6524 : LocationSummary::kNoCall;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006525 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
6526
6527 switch (type) {
6528 case Primitive::kPrimInt:
6529 case Primitive::kPrimLong:
6530 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07006531 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006532 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6533 break;
6534
6535 case Primitive::kPrimFloat:
6536 case Primitive::kPrimDouble: {
6537 InvokeRuntimeCallingConvention calling_convention;
6538 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
6539 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
6540 locations->SetOut(calling_convention.GetReturnLocation(type));
6541 break;
6542 }
6543
6544 default:
6545 LOG(FATAL) << "Unexpected rem type " << type;
6546 }
6547}
6548
6549void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
6550 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006551
6552 switch (type) {
6553 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07006554 case Primitive::kPrimLong:
6555 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006556 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006557
6558 case Primitive::kPrimFloat:
6559 case Primitive::kPrimDouble: {
Serban Constantinescufc734082016-07-19 17:18:07 +01006560 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
6561 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006562 if (type == Primitive::kPrimFloat) {
6563 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
6564 } else {
6565 CheckEntrypointTypes<kQuickFmod, double, double, double>();
6566 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006567 break;
6568 }
6569 default:
6570 LOG(FATAL) << "Unexpected rem type " << type;
6571 }
6572}
6573
Igor Murashkind01745e2017-04-05 16:40:31 -07006574void LocationsBuilderMIPS64::VisitConstructorFence(HConstructorFence* constructor_fence) {
6575 constructor_fence->SetLocations(nullptr);
6576}
6577
6578void InstructionCodeGeneratorMIPS64::VisitConstructorFence(
6579 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
6580 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
6581}
6582
Alexey Frunze4dda3372015-06-01 18:31:49 -07006583void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
6584 memory_barrier->SetLocations(nullptr);
6585}
6586
6587void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
6588 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
6589}
6590
6591void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
6592 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
6593 Primitive::Type return_type = ret->InputAt(0)->GetType();
6594 locations->SetInAt(0, Mips64ReturnLocation(return_type));
6595}
6596
6597void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
6598 codegen_->GenerateFrameExit();
6599}
6600
6601void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
6602 ret->SetLocations(nullptr);
6603}
6604
6605void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
6606 codegen_->GenerateFrameExit();
6607}
6608
Alexey Frunze92d90602015-12-18 18:16:36 -08006609void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
6610 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00006611}
6612
Alexey Frunze92d90602015-12-18 18:16:36 -08006613void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
6614 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00006615}
6616
Alexey Frunze4dda3372015-06-01 18:31:49 -07006617void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
6618 HandleShift(shl);
6619}
6620
6621void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
6622 HandleShift(shl);
6623}
6624
6625void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
6626 HandleShift(shr);
6627}
6628
6629void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
6630 HandleShift(shr);
6631}
6632
Alexey Frunze4dda3372015-06-01 18:31:49 -07006633void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
6634 HandleBinaryOp(instruction);
6635}
6636
6637void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
6638 HandleBinaryOp(instruction);
6639}
6640
6641void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6642 HandleFieldGet(instruction, instruction->GetFieldInfo());
6643}
6644
6645void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6646 HandleFieldGet(instruction, instruction->GetFieldInfo());
6647}
6648
6649void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
6650 HandleFieldSet(instruction, instruction->GetFieldInfo());
6651}
6652
6653void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01006654 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006655}
6656
Calin Juravlee460d1d2015-09-29 04:52:17 +01006657void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
6658 HUnresolvedInstanceFieldGet* instruction) {
6659 FieldAccessCallingConventionMIPS64 calling_convention;
6660 codegen_->CreateUnresolvedFieldLocationSummary(
6661 instruction, instruction->GetFieldType(), calling_convention);
6662}
6663
6664void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
6665 HUnresolvedInstanceFieldGet* instruction) {
6666 FieldAccessCallingConventionMIPS64 calling_convention;
6667 codegen_->GenerateUnresolvedFieldAccess(instruction,
6668 instruction->GetFieldType(),
6669 instruction->GetFieldIndex(),
6670 instruction->GetDexPc(),
6671 calling_convention);
6672}
6673
6674void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
6675 HUnresolvedInstanceFieldSet* instruction) {
6676 FieldAccessCallingConventionMIPS64 calling_convention;
6677 codegen_->CreateUnresolvedFieldLocationSummary(
6678 instruction, instruction->GetFieldType(), calling_convention);
6679}
6680
6681void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
6682 HUnresolvedInstanceFieldSet* instruction) {
6683 FieldAccessCallingConventionMIPS64 calling_convention;
6684 codegen_->GenerateUnresolvedFieldAccess(instruction,
6685 instruction->GetFieldType(),
6686 instruction->GetFieldIndex(),
6687 instruction->GetDexPc(),
6688 calling_convention);
6689}
6690
6691void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
6692 HUnresolvedStaticFieldGet* instruction) {
6693 FieldAccessCallingConventionMIPS64 calling_convention;
6694 codegen_->CreateUnresolvedFieldLocationSummary(
6695 instruction, instruction->GetFieldType(), calling_convention);
6696}
6697
6698void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
6699 HUnresolvedStaticFieldGet* instruction) {
6700 FieldAccessCallingConventionMIPS64 calling_convention;
6701 codegen_->GenerateUnresolvedFieldAccess(instruction,
6702 instruction->GetFieldType(),
6703 instruction->GetFieldIndex(),
6704 instruction->GetDexPc(),
6705 calling_convention);
6706}
6707
6708void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
6709 HUnresolvedStaticFieldSet* instruction) {
6710 FieldAccessCallingConventionMIPS64 calling_convention;
6711 codegen_->CreateUnresolvedFieldLocationSummary(
6712 instruction, instruction->GetFieldType(), calling_convention);
6713}
6714
6715void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
6716 HUnresolvedStaticFieldSet* instruction) {
6717 FieldAccessCallingConventionMIPS64 calling_convention;
6718 codegen_->GenerateUnresolvedFieldAccess(instruction,
6719 instruction->GetFieldType(),
6720 instruction->GetFieldIndex(),
6721 instruction->GetDexPc(),
6722 calling_convention);
6723}
6724
Alexey Frunze4dda3372015-06-01 18:31:49 -07006725void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01006726 LocationSummary* locations =
6727 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02006728 // In suspend check slow path, usually there are no caller-save registers at all.
6729 // If SIMD instructions are present, however, we force spilling all live SIMD
6730 // registers in full width (since the runtime only saves/restores lower part).
6731 locations->SetCustomSlowPathCallerSaves(
6732 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006733}
6734
6735void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
6736 HBasicBlock* block = instruction->GetBlock();
6737 if (block->GetLoopInformation() != nullptr) {
6738 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
6739 // The back edge will generate the suspend check.
6740 return;
6741 }
6742 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
6743 // The goto will generate the suspend check.
6744 return;
6745 }
6746 GenerateSuspendCheck(instruction, nullptr);
6747}
6748
Alexey Frunze4dda3372015-06-01 18:31:49 -07006749void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
6750 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006751 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006752 InvokeRuntimeCallingConvention calling_convention;
6753 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6754}
6755
6756void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006757 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006758 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
6759}
6760
6761void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
6762 Primitive::Type input_type = conversion->GetInputType();
6763 Primitive::Type result_type = conversion->GetResultType();
6764 DCHECK_NE(input_type, result_type);
6765
6766 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
6767 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
6768 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
6769 }
6770
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006771 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion);
6772
6773 if (Primitive::IsFloatingPointType(input_type)) {
6774 locations->SetInAt(0, Location::RequiresFpuRegister());
6775 } else {
6776 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006777 }
6778
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006779 if (Primitive::IsFloatingPointType(result_type)) {
6780 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006781 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006782 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006783 }
6784}
6785
6786void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
6787 LocationSummary* locations = conversion->GetLocations();
6788 Primitive::Type result_type = conversion->GetResultType();
6789 Primitive::Type input_type = conversion->GetInputType();
6790
6791 DCHECK_NE(input_type, result_type);
6792
6793 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
6794 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6795 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6796
6797 switch (result_type) {
6798 case Primitive::kPrimChar:
6799 __ Andi(dst, src, 0xFFFF);
6800 break;
6801 case Primitive::kPrimByte:
Vladimir Markob52bbde2016-02-12 12:06:05 +00006802 if (input_type == Primitive::kPrimLong) {
6803 // Type conversion from long to types narrower than int is a result of code
6804 // transformations. To avoid unpredictable results for SEB and SEH, we first
6805 // need to sign-extend the low 32-bit value into bits 32 through 63.
6806 __ Sll(dst, src, 0);
6807 __ Seb(dst, dst);
6808 } else {
6809 __ Seb(dst, src);
6810 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006811 break;
6812 case Primitive::kPrimShort:
Vladimir Markob52bbde2016-02-12 12:06:05 +00006813 if (input_type == Primitive::kPrimLong) {
6814 // Type conversion from long to types narrower than int is a result of code
6815 // transformations. To avoid unpredictable results for SEB and SEH, we first
6816 // need to sign-extend the low 32-bit value into bits 32 through 63.
6817 __ Sll(dst, src, 0);
6818 __ Seh(dst, dst);
6819 } else {
6820 __ Seh(dst, src);
6821 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006822 break;
6823 case Primitive::kPrimInt:
6824 case Primitive::kPrimLong:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01006825 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
6826 // conversions, except when the input and output registers are the same and we are not
6827 // converting longs to shorter types. In these cases, do nothing.
6828 if ((input_type == Primitive::kPrimLong) || (dst != src)) {
6829 __ Sll(dst, src, 0);
6830 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006831 break;
6832
6833 default:
6834 LOG(FATAL) << "Unexpected type conversion from " << input_type
6835 << " to " << result_type;
6836 }
6837 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006838 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6839 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6840 if (input_type == Primitive::kPrimLong) {
6841 __ Dmtc1(src, FTMP);
6842 if (result_type == Primitive::kPrimFloat) {
6843 __ Cvtsl(dst, FTMP);
6844 } else {
6845 __ Cvtdl(dst, FTMP);
6846 }
6847 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006848 __ Mtc1(src, FTMP);
6849 if (result_type == Primitive::kPrimFloat) {
6850 __ Cvtsw(dst, FTMP);
6851 } else {
6852 __ Cvtdw(dst, FTMP);
6853 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006854 }
6855 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
6856 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006857 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6858 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006859
6860 if (result_type == Primitive::kPrimLong) {
Roland Levillain888d0672015-11-23 18:53:50 +00006861 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006862 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006863 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006864 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006865 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006866 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006867 } else {
6868 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006869 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006870 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006871 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006872 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006873 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006874 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006875 } else if (Primitive::IsFloatingPointType(result_type) &&
6876 Primitive::IsFloatingPointType(input_type)) {
6877 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6878 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
6879 if (result_type == Primitive::kPrimFloat) {
6880 __ Cvtsd(dst, src);
6881 } else {
6882 __ Cvtds(dst, src);
6883 }
6884 } else {
6885 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
6886 << " to " << result_type;
6887 }
6888}
6889
6890void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
6891 HandleShift(ushr);
6892}
6893
6894void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
6895 HandleShift(ushr);
6896}
6897
6898void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
6899 HandleBinaryOp(instruction);
6900}
6901
6902void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
6903 HandleBinaryOp(instruction);
6904}
6905
6906void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6907 // Nothing to do, this should be removed during prepare for register allocator.
6908 LOG(FATAL) << "Unreachable";
6909}
6910
6911void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6912 // Nothing to do, this should be removed during prepare for register allocator.
6913 LOG(FATAL) << "Unreachable";
6914}
6915
6916void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006917 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006918}
6919
6920void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006921 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006922}
6923
6924void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006925 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006926}
6927
6928void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006929 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006930}
6931
6932void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006933 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006934}
6935
6936void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006937 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006938}
6939
6940void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006941 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006942}
6943
6944void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006945 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006946}
6947
6948void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006949 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006950}
6951
6952void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006953 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006954}
6955
6956void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006957 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006958}
6959
6960void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006961 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006962}
6963
Aart Bike9f37602015-10-09 11:15:55 -07006964void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006965 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006966}
6967
6968void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006969 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006970}
6971
6972void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006973 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006974}
6975
6976void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006977 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006978}
6979
6980void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006981 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006982}
6983
6984void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006985 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006986}
6987
6988void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006989 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006990}
6991
6992void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006993 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006994}
6995
Mark Mendellfe57faa2015-09-18 09:26:15 -04006996// Simple implementation of packed switch - generate cascaded compare/jumps.
6997void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6998 LocationSummary* locations =
6999 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
7000 locations->SetInAt(0, Location::RequiresRegister());
7001}
7002
Alexey Frunze0960ac52016-12-20 17:24:59 -08007003void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
7004 int32_t lower_bound,
7005 uint32_t num_entries,
7006 HBasicBlock* switch_block,
7007 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007008 // Create a set of compare/jumps.
7009 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08007010 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007011 // Jump to default if index is negative
7012 // Note: We don't check the case that index is positive while value < lower_bound, because in
7013 // this case, index >= num_entries must be true. So that we can save one branch instruction.
7014 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
7015
Alexey Frunze0960ac52016-12-20 17:24:59 -08007016 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007017 // Jump to successors[0] if value == lower_bound.
7018 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
7019 int32_t last_index = 0;
7020 for (; num_entries - last_index > 2; last_index += 2) {
7021 __ Addiu(temp_reg, temp_reg, -2);
7022 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
7023 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
7024 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
7025 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
7026 }
7027 if (num_entries - last_index == 2) {
7028 // The last missing case_value.
7029 __ Addiu(temp_reg, temp_reg, -1);
7030 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007031 }
7032
7033 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08007034 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07007035 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007036 }
7037}
7038
Alexey Frunze0960ac52016-12-20 17:24:59 -08007039void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
7040 int32_t lower_bound,
7041 uint32_t num_entries,
7042 HBasicBlock* switch_block,
7043 HBasicBlock* default_block) {
7044 // Create a jump table.
7045 std::vector<Mips64Label*> labels(num_entries);
7046 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7047 for (uint32_t i = 0; i < num_entries; i++) {
7048 labels[i] = codegen_->GetLabelOf(successors[i]);
7049 }
7050 JumpTable* table = __ CreateJumpTable(std::move(labels));
7051
7052 // Is the value in range?
7053 __ Addiu32(TMP, value_reg, -lower_bound);
7054 __ LoadConst32(AT, num_entries);
7055 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
7056
7057 // We are in the range of the table.
7058 // Load the target address from the jump table, indexing by the value.
7059 __ LoadLabelAddress(AT, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07007060 __ Dlsa(TMP, TMP, AT, 2);
Alexey Frunze0960ac52016-12-20 17:24:59 -08007061 __ Lw(TMP, TMP, 0);
7062 // Compute the absolute target address by adding the table start address
7063 // (the table contains offsets to targets relative to its start).
7064 __ Daddu(TMP, TMP, AT);
7065 // And jump.
7066 __ Jr(TMP);
7067 __ Nop();
7068}
7069
7070void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7071 int32_t lower_bound = switch_instr->GetStartValue();
7072 uint32_t num_entries = switch_instr->GetNumEntries();
7073 LocationSummary* locations = switch_instr->GetLocations();
7074 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
7075 HBasicBlock* switch_block = switch_instr->GetBlock();
7076 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7077
7078 if (num_entries > kPackedSwitchJumpTableThreshold) {
7079 GenTableBasedPackedSwitch(value_reg,
7080 lower_bound,
7081 num_entries,
7082 switch_block,
7083 default_block);
7084 } else {
7085 GenPackedSwitchWithCompares(value_reg,
7086 lower_bound,
7087 num_entries,
7088 switch_block,
7089 default_block);
7090 }
7091}
7092
Chris Larsenc9905a62017-03-13 17:06:18 -07007093void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7094 LocationSummary* locations =
7095 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
7096 locations->SetInAt(0, Location::RequiresRegister());
7097 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007098}
7099
Chris Larsenc9905a62017-03-13 17:06:18 -07007100void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7101 LocationSummary* locations = instruction->GetLocations();
7102 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
7103 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7104 instruction->GetIndex(), kMips64PointerSize).SizeValue();
7105 __ LoadFromOffset(kLoadDoubleword,
7106 locations->Out().AsRegister<GpuRegister>(),
7107 locations->InAt(0).AsRegister<GpuRegister>(),
7108 method_offset);
7109 } else {
7110 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
7111 instruction->GetIndex(), kMips64PointerSize));
7112 __ LoadFromOffset(kLoadDoubleword,
7113 locations->Out().AsRegister<GpuRegister>(),
7114 locations->InAt(0).AsRegister<GpuRegister>(),
7115 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
7116 __ LoadFromOffset(kLoadDoubleword,
7117 locations->Out().AsRegister<GpuRegister>(),
7118 locations->Out().AsRegister<GpuRegister>(),
7119 method_offset);
7120 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007121}
7122
Alexey Frunze4dda3372015-06-01 18:31:49 -07007123} // namespace mips64
7124} // namespace art