blob: 119e0f6b76dddf3fe8d09c2d53edb80cfbecc831 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips64/asm_support_mips64.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070020#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070022#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080023#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070028#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "intrinsics_mips64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010030#include "linker/linker_patch.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070031#include "mirror/array-inl.h"
32#include "mirror/class-inl.h"
33#include "offsets.h"
34#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070035#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070036#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070037#include "utils/stack_checks.h"
38
39namespace art {
40namespace mips64 {
41
42static constexpr int kCurrentMethodStackOffset = 0;
43static constexpr GpuRegister kMethodRegisterArgument = A0;
44
Alexey Frunze4147fcc2017-06-17 19:57:27 -070045// Flags controlling the use of thunks for Baker read barriers.
46constexpr bool kBakerReadBarrierThunksEnableForFields = true;
47constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
48constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
49
Alexey Frunze4dda3372015-06-01 18:31:49 -070050Location Mips64ReturnLocation(Primitive::Type return_type) {
51 switch (return_type) {
52 case Primitive::kPrimBoolean:
53 case Primitive::kPrimByte:
54 case Primitive::kPrimChar:
55 case Primitive::kPrimShort:
56 case Primitive::kPrimInt:
57 case Primitive::kPrimNot:
58 case Primitive::kPrimLong:
59 return Location::RegisterLocation(V0);
60
61 case Primitive::kPrimFloat:
62 case Primitive::kPrimDouble:
63 return Location::FpuRegisterLocation(F0);
64
65 case Primitive::kPrimVoid:
66 return Location();
67 }
68 UNREACHABLE();
69}
70
71Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(Primitive::Type type) const {
72 return Mips64ReturnLocation(type);
73}
74
75Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
76 return Location::RegisterLocation(kMethodRegisterArgument);
77}
78
79Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(Primitive::Type type) {
80 Location next_location;
81 if (type == Primitive::kPrimVoid) {
82 LOG(FATAL) << "Unexpected parameter type " << type;
83 }
84
85 if (Primitive::IsFloatingPointType(type) &&
86 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
87 next_location = Location::FpuRegisterLocation(
88 calling_convention.GetFpuRegisterAt(float_index_++));
89 gp_index_++;
90 } else if (!Primitive::IsFloatingPointType(type) &&
91 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
92 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
93 float_index_++;
94 } else {
95 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
96 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
97 : Location::StackSlot(stack_offset);
98 }
99
100 // Space on the stack is reserved for all arguments.
101 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
102
Alexey Frunze4dda3372015-06-01 18:31:49 -0700103 return next_location;
104}
105
106Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
107 return Mips64ReturnLocation(type);
108}
109
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100110// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
111#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700112#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700113
114class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
115 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000116 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700117
118 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100119 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700120 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
121 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000122 if (instruction_->CanThrowIntoCatchBlock()) {
123 // Live registers will be restored in the catch block if caught.
124 SaveLiveRegisters(codegen, instruction_->GetLocations());
125 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700126 // We're moving two locations to locations that could overlap, so we need a parallel
127 // move resolver.
128 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100129 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700130 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
131 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100132 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700133 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
134 Primitive::kPrimInt);
Serban Constantinescufc734082016-07-19 17:18:07 +0100135 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
136 ? kQuickThrowStringBounds
137 : kQuickThrowArrayBounds;
138 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100139 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700140 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
141 }
142
Alexandre Rames8158f282015-08-07 10:26:17 +0100143 bool IsFatal() const OVERRIDE { return true; }
144
Roland Levillain46648892015-06-19 16:07:18 +0100145 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
146
Alexey Frunze4dda3372015-06-01 18:31:49 -0700147 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700148 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
149};
150
151class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
152 public:
Alexey Frunzec61c0762017-04-10 13:54:23 -0700153 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction)
154 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700155
156 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
157 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
158 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100159 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700160 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
161 }
162
Alexandre Rames8158f282015-08-07 10:26:17 +0100163 bool IsFatal() const OVERRIDE { return true; }
164
Roland Levillain46648892015-06-19 16:07:18 +0100165 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
166
Alexey Frunze4dda3372015-06-01 18:31:49 -0700167 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700168 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
169};
170
171class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
172 public:
173 LoadClassSlowPathMIPS64(HLoadClass* cls,
174 HInstruction* at,
175 uint32_t dex_pc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700176 bool do_clinit,
177 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high = nullptr)
178 : SlowPathCodeMIPS64(at),
179 cls_(cls),
180 dex_pc_(dex_pc),
181 do_clinit_(do_clinit),
182 bss_info_high_(bss_info_high) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700183 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
184 }
185
186 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000187 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700188 Location out = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700189 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700190 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
191 InvokeRuntimeCallingConvention calling_convention;
192 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
193 const bool is_load_class_bss_entry =
194 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700195 __ Bind(GetEntryLabel());
196 SaveLiveRegisters(codegen, locations);
197
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700198 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
199 GpuRegister entry_address = kNoGpuRegister;
200 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
201 GpuRegister temp = locations->GetTemp(0).AsRegister<GpuRegister>();
202 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
203 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
204 // kSaveEverything call.
205 entry_address = temp_is_a0 ? out.AsRegister<GpuRegister>() : temp;
206 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
207 if (temp_is_a0) {
208 __ Move(entry_address, temp);
209 }
210 }
211
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000212 dex::TypeIndex type_index = cls_->GetTypeIndex();
213 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100214 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
215 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000216 mips64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700217 if (do_clinit_) {
218 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
219 } else {
220 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
221 }
222
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700223 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
224 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
225 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
226 DCHECK(bss_info_high_);
227 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
228 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, bss_info_high_);
229 __ Bind(&info_low->label);
230 __ StoreToOffset(kStoreWord,
231 calling_convention.GetRegisterAt(0),
232 entry_address,
233 /* placeholder */ 0x5678);
234 }
235
Alexey Frunze4dda3372015-06-01 18:31:49 -0700236 // Move the class to the desired location.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700237 if (out.IsValid()) {
238 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000239 Primitive::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700240 mips64_codegen->MoveLocation(out,
241 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
242 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700243 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700244 RestoreLiveRegisters(codegen, locations);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700245
246 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
247 if (is_load_class_bss_entry && !baker_or_no_read_barriers) {
248 // For non-Baker read barriers we need to re-calculate the address of
249 // the class entry.
250 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000251 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700252 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
253 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, info_high);
254 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, info_low);
255 __ StoreToOffset(kStoreWord, out.AsRegister<GpuRegister>(), TMP, /* placeholder */ 0x5678);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000256 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700257 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700258 }
259
Roland Levillain46648892015-06-19 16:07:18 +0100260 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
261
Alexey Frunze4dda3372015-06-01 18:31:49 -0700262 private:
263 // The class this slow path will load.
264 HLoadClass* const cls_;
265
Alexey Frunze4dda3372015-06-01 18:31:49 -0700266 // The dex PC of `at_`.
267 const uint32_t dex_pc_;
268
269 // Whether to initialize the class.
270 const bool do_clinit_;
271
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700272 // Pointer to the high half PC-relative patch info for HLoadClass/kBssEntry.
273 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high_;
274
Alexey Frunze4dda3372015-06-01 18:31:49 -0700275 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
276};
277
278class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
279 public:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700280 explicit LoadStringSlowPathMIPS64(HLoadString* instruction,
281 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high)
282 : SlowPathCodeMIPS64(instruction), bss_info_high_(bss_info_high) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700283
284 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700285 DCHECK(instruction_->IsLoadString());
286 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700287 LocationSummary* locations = instruction_->GetLocations();
288 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700289 HLoadString* load = instruction_->AsLoadString();
290 const dex::StringIndex string_index = load->GetStringIndex();
291 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700292 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700293 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
294 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700295 __ Bind(GetEntryLabel());
296 SaveLiveRegisters(codegen, locations);
297
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700298 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
299 GpuRegister entry_address = kNoGpuRegister;
300 if (baker_or_no_read_barriers) {
301 GpuRegister temp = locations->GetTemp(0).AsRegister<GpuRegister>();
302 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
303 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
304 // kSaveEverything call.
305 entry_address = temp_is_a0 ? out : temp;
306 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
307 if (temp_is_a0) {
308 __ Move(entry_address, temp);
309 }
310 }
311
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000312 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100313 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700314 instruction_,
315 instruction_->GetDexPc(),
316 this);
317 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700318
319 // Store the resolved string to the BSS entry.
320 if (baker_or_no_read_barriers) {
321 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
322 DCHECK(bss_info_high_);
323 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100324 mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(),
325 string_index,
326 bss_info_high_);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700327 __ Bind(&info_low->label);
328 __ StoreToOffset(kStoreWord,
329 calling_convention.GetRegisterAt(0),
330 entry_address,
331 /* placeholder */ 0x5678);
332 }
333
Alexey Frunze4dda3372015-06-01 18:31:49 -0700334 Primitive::Type type = instruction_->GetType();
335 mips64_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700336 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700337 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700338 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800339
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700340 // Store the resolved string to the BSS entry.
341 if (!baker_or_no_read_barriers) {
342 // For non-Baker read barriers we need to re-calculate the address of
343 // the string entry.
344 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100345 mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700346 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100347 mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700348 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, info_low);
349 __ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
350 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700351 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700352 }
353
Roland Levillain46648892015-06-19 16:07:18 +0100354 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
355
Alexey Frunze4dda3372015-06-01 18:31:49 -0700356 private:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700357 // Pointer to the high half PC-relative patch info.
358 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high_;
359
Alexey Frunze4dda3372015-06-01 18:31:49 -0700360 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
361};
362
363class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
364 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000365 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700366
367 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
368 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
369 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000370 if (instruction_->CanThrowIntoCatchBlock()) {
371 // Live registers will be restored in the catch block if caught.
372 SaveLiveRegisters(codegen, instruction_->GetLocations());
373 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100374 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700375 instruction_,
376 instruction_->GetDexPc(),
377 this);
378 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
379 }
380
Alexandre Rames8158f282015-08-07 10:26:17 +0100381 bool IsFatal() const OVERRIDE { return true; }
382
Roland Levillain46648892015-06-19 16:07:18 +0100383 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
384
Alexey Frunze4dda3372015-06-01 18:31:49 -0700385 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700386 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
387};
388
389class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
390 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100391 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000392 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700393
394 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200395 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700396 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
397 __ Bind(GetEntryLabel());
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200398 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufc734082016-07-19 17:18:07 +0100399 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700400 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200401 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700402 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700403 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700404 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700405 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700406 }
407 }
408
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700409 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700410 DCHECK(successor_ == nullptr);
411 return &return_label_;
412 }
413
Roland Levillain46648892015-06-19 16:07:18 +0100414 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
415
Alexey Frunze4dda3372015-06-01 18:31:49 -0700416 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700417 // If not null, the block to branch to after the suspend check.
418 HBasicBlock* const successor_;
419
420 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700421 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700422
423 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
424};
425
426class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
427 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800428 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction, bool is_fatal)
429 : SlowPathCodeMIPS64(instruction), is_fatal_(is_fatal) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700430
431 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
432 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800433
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100434 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700435 DCHECK(instruction_->IsCheckCast()
436 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
437 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
438
439 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800440 if (!is_fatal_) {
441 SaveLiveRegisters(codegen, locations);
442 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700443
444 // We're moving two locations to locations that could overlap, so we need a parallel
445 // move resolver.
446 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800447 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700448 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
449 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800450 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700451 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
452 Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700453 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100454 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800455 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700456 Primitive::Type ret_type = instruction_->GetType();
457 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
458 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700459 } else {
460 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800461 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
462 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700463 }
464
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800465 if (!is_fatal_) {
466 RestoreLiveRegisters(codegen, locations);
467 __ Bc(GetExitLabel());
468 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700469 }
470
Roland Levillain46648892015-06-19 16:07:18 +0100471 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
472
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800473 bool IsFatal() const OVERRIDE { return is_fatal_; }
474
Alexey Frunze4dda3372015-06-01 18:31:49 -0700475 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800476 const bool is_fatal_;
477
Alexey Frunze4dda3372015-06-01 18:31:49 -0700478 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
479};
480
481class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
482 public:
Aart Bik42249c32016-01-07 15:33:50 -0800483 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000484 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700485
486 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800487 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700488 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100489 LocationSummary* locations = instruction_->GetLocations();
490 SaveLiveRegisters(codegen, locations);
491 InvokeRuntimeCallingConvention calling_convention;
492 __ LoadConst32(calling_convention.GetRegisterAt(0),
493 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufc734082016-07-19 17:18:07 +0100494 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100495 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700496 }
497
Roland Levillain46648892015-06-19 16:07:18 +0100498 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
499
Alexey Frunze4dda3372015-06-01 18:31:49 -0700500 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700501 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
502};
503
Alexey Frunze15958152017-02-09 19:08:30 -0800504class ArraySetSlowPathMIPS64 : public SlowPathCodeMIPS64 {
505 public:
506 explicit ArraySetSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
507
508 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
509 LocationSummary* locations = instruction_->GetLocations();
510 __ Bind(GetEntryLabel());
511 SaveLiveRegisters(codegen, locations);
512
513 InvokeRuntimeCallingConvention calling_convention;
514 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
515 parallel_move.AddMove(
516 locations->InAt(0),
517 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
518 Primitive::kPrimNot,
519 nullptr);
520 parallel_move.AddMove(
521 locations->InAt(1),
522 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
523 Primitive::kPrimInt,
524 nullptr);
525 parallel_move.AddMove(
526 locations->InAt(2),
527 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
528 Primitive::kPrimNot,
529 nullptr);
530 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
531
532 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
533 mips64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
534 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
535 RestoreLiveRegisters(codegen, locations);
536 __ Bc(GetExitLabel());
537 }
538
539 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS64"; }
540
541 private:
542 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS64);
543};
544
545// Slow path marking an object reference `ref` during a read
546// barrier. The field `obj.field` in the object `obj` holding this
547// reference does not get updated by this slow path after marking (see
548// ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 below for that).
549//
550// This means that after the execution of this slow path, `ref` will
551// always be up-to-date, but `obj.field` may not; i.e., after the
552// flip, `ref` will be a to-space reference, but `obj.field` will
553// probably still be a from-space reference (unless it gets updated by
554// another thread, or if another thread installed another object
555// reference (different from `ref`) in `obj.field`).
556//
557// If `entrypoint` is a valid location it is assumed to already be
558// holding the entrypoint. The case where the entrypoint is passed in
559// is for the GcRoot read barrier.
560class ReadBarrierMarkSlowPathMIPS64 : public SlowPathCodeMIPS64 {
561 public:
562 ReadBarrierMarkSlowPathMIPS64(HInstruction* instruction,
563 Location ref,
564 Location entrypoint = Location::NoLocation())
565 : SlowPathCodeMIPS64(instruction), ref_(ref), entrypoint_(entrypoint) {
566 DCHECK(kEmitCompilerReadBarrier);
567 }
568
569 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
570
571 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
572 LocationSummary* locations = instruction_->GetLocations();
573 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
574 DCHECK(locations->CanCall());
575 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
576 DCHECK(instruction_->IsInstanceFieldGet() ||
577 instruction_->IsStaticFieldGet() ||
578 instruction_->IsArrayGet() ||
579 instruction_->IsArraySet() ||
580 instruction_->IsLoadClass() ||
581 instruction_->IsLoadString() ||
582 instruction_->IsInstanceOf() ||
583 instruction_->IsCheckCast() ||
584 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
585 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
586 << "Unexpected instruction in read barrier marking slow path: "
587 << instruction_->DebugName();
588
589 __ Bind(GetEntryLabel());
590 // No need to save live registers; it's taken care of by the
591 // entrypoint. Also, there is no need to update the stack mask,
592 // as this runtime call will not trigger a garbage collection.
593 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
594 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
595 (S2 <= ref_reg && ref_reg <= S7) ||
596 (ref_reg == S8)) << ref_reg;
597 // "Compact" slow path, saving two moves.
598 //
599 // Instead of using the standard runtime calling convention (input
600 // and output in A0 and V0 respectively):
601 //
602 // A0 <- ref
603 // V0 <- ReadBarrierMark(A0)
604 // ref <- V0
605 //
606 // we just use rX (the register containing `ref`) as input and output
607 // of a dedicated entrypoint:
608 //
609 // rX <- ReadBarrierMarkRegX(rX)
610 //
611 if (entrypoint_.IsValid()) {
612 mips64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
613 DCHECK_EQ(entrypoint_.AsRegister<GpuRegister>(), T9);
614 __ Jalr(entrypoint_.AsRegister<GpuRegister>());
615 __ Nop();
616 } else {
617 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100618 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800619 // This runtime call does not require a stack map.
620 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
621 instruction_,
622 this);
623 }
624 __ Bc(GetExitLabel());
625 }
626
627 private:
628 // The location (register) of the marked object reference.
629 const Location ref_;
630
631 // The location of the entrypoint if already loaded.
632 const Location entrypoint_;
633
634 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS64);
635};
636
637// Slow path marking an object reference `ref` during a read barrier,
638// and if needed, atomically updating the field `obj.field` in the
639// object `obj` holding this reference after marking (contrary to
640// ReadBarrierMarkSlowPathMIPS64 above, which never tries to update
641// `obj.field`).
642//
643// This means that after the execution of this slow path, both `ref`
644// and `obj.field` will be up-to-date; i.e., after the flip, both will
645// hold the same to-space reference (unless another thread installed
646// another object reference (different from `ref`) in `obj.field`).
647class ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 : public SlowPathCodeMIPS64 {
648 public:
649 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(HInstruction* instruction,
650 Location ref,
651 GpuRegister obj,
652 Location field_offset,
653 GpuRegister temp1)
654 : SlowPathCodeMIPS64(instruction),
655 ref_(ref),
656 obj_(obj),
657 field_offset_(field_offset),
658 temp1_(temp1) {
659 DCHECK(kEmitCompilerReadBarrier);
660 }
661
662 const char* GetDescription() const OVERRIDE {
663 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS64";
664 }
665
666 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
667 LocationSummary* locations = instruction_->GetLocations();
668 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
669 DCHECK(locations->CanCall());
670 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
671 // This slow path is only used by the UnsafeCASObject intrinsic.
672 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
673 << "Unexpected instruction in read barrier marking and field updating slow path: "
674 << instruction_->DebugName();
675 DCHECK(instruction_->GetLocations()->Intrinsified());
676 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
677 DCHECK(field_offset_.IsRegister()) << field_offset_;
678
679 __ Bind(GetEntryLabel());
680
681 // Save the old reference.
682 // Note that we cannot use AT or TMP to save the old reference, as those
683 // are used by the code that follows, but we need the old reference after
684 // the call to the ReadBarrierMarkRegX entry point.
685 DCHECK_NE(temp1_, AT);
686 DCHECK_NE(temp1_, TMP);
687 __ Move(temp1_, ref_reg);
688
689 // No need to save live registers; it's taken care of by the
690 // entrypoint. Also, there is no need to update the stack mask,
691 // as this runtime call will not trigger a garbage collection.
692 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
693 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
694 (S2 <= ref_reg && ref_reg <= S7) ||
695 (ref_reg == S8)) << ref_reg;
696 // "Compact" slow path, saving two moves.
697 //
698 // Instead of using the standard runtime calling convention (input
699 // and output in A0 and V0 respectively):
700 //
701 // A0 <- ref
702 // V0 <- ReadBarrierMark(A0)
703 // ref <- V0
704 //
705 // we just use rX (the register containing `ref`) as input and output
706 // of a dedicated entrypoint:
707 //
708 // rX <- ReadBarrierMarkRegX(rX)
709 //
710 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100711 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800712 // This runtime call does not require a stack map.
713 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
714 instruction_,
715 this);
716
717 // If the new reference is different from the old reference,
718 // update the field in the holder (`*(obj_ + field_offset_)`).
719 //
720 // Note that this field could also hold a different object, if
721 // another thread had concurrently changed it. In that case, the
722 // the compare-and-set (CAS) loop below would abort, leaving the
723 // field as-is.
724 Mips64Label done;
725 __ Beqc(temp1_, ref_reg, &done);
726
727 // Update the the holder's field atomically. This may fail if
728 // mutator updates before us, but it's OK. This is achieved
729 // using a strong compare-and-set (CAS) operation with relaxed
730 // memory synchronization ordering, where the expected value is
731 // the old reference and the desired value is the new reference.
732
733 // Convenience aliases.
734 GpuRegister base = obj_;
735 GpuRegister offset = field_offset_.AsRegister<GpuRegister>();
736 GpuRegister expected = temp1_;
737 GpuRegister value = ref_reg;
738 GpuRegister tmp_ptr = TMP; // Pointer to actual memory.
739 GpuRegister tmp = AT; // Value in memory.
740
741 __ Daddu(tmp_ptr, base, offset);
742
743 if (kPoisonHeapReferences) {
744 __ PoisonHeapReference(expected);
745 // Do not poison `value` if it is the same register as
746 // `expected`, which has just been poisoned.
747 if (value != expected) {
748 __ PoisonHeapReference(value);
749 }
750 }
751
752 // do {
753 // tmp = [r_ptr] - expected;
754 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
755
756 Mips64Label loop_head, exit_loop;
757 __ Bind(&loop_head);
758 __ Ll(tmp, tmp_ptr);
759 // The LL instruction sign-extends the 32-bit value, but
760 // 32-bit references must be zero-extended. Zero-extend `tmp`.
761 __ Dext(tmp, tmp, 0, 32);
762 __ Bnec(tmp, expected, &exit_loop);
763 __ Move(tmp, value);
764 __ Sc(tmp, tmp_ptr);
765 __ Beqzc(tmp, &loop_head);
766 __ Bind(&exit_loop);
767
768 if (kPoisonHeapReferences) {
769 __ UnpoisonHeapReference(expected);
770 // Do not unpoison `value` if it is the same register as
771 // `expected`, which has just been unpoisoned.
772 if (value != expected) {
773 __ UnpoisonHeapReference(value);
774 }
775 }
776
777 __ Bind(&done);
778 __ Bc(GetExitLabel());
779 }
780
781 private:
782 // The location (register) of the marked object reference.
783 const Location ref_;
784 // The register containing the object holding the marked object reference field.
785 const GpuRegister obj_;
786 // The location of the offset of the marked reference field within `obj_`.
787 Location field_offset_;
788
789 const GpuRegister temp1_;
790
791 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS64);
792};
793
794// Slow path generating a read barrier for a heap reference.
795class ReadBarrierForHeapReferenceSlowPathMIPS64 : public SlowPathCodeMIPS64 {
796 public:
797 ReadBarrierForHeapReferenceSlowPathMIPS64(HInstruction* instruction,
798 Location out,
799 Location ref,
800 Location obj,
801 uint32_t offset,
802 Location index)
803 : SlowPathCodeMIPS64(instruction),
804 out_(out),
805 ref_(ref),
806 obj_(obj),
807 offset_(offset),
808 index_(index) {
809 DCHECK(kEmitCompilerReadBarrier);
810 // If `obj` is equal to `out` or `ref`, it means the initial object
811 // has been overwritten by (or after) the heap object reference load
812 // to be instrumented, e.g.:
813 //
814 // __ LoadFromOffset(kLoadWord, out, out, offset);
815 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
816 //
817 // In that case, we have lost the information about the original
818 // object, and the emitted read barrier cannot work properly.
819 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
820 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
821 }
822
823 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
824 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
825 LocationSummary* locations = instruction_->GetLocations();
826 Primitive::Type type = Primitive::kPrimNot;
827 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
828 DCHECK(locations->CanCall());
829 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
830 DCHECK(instruction_->IsInstanceFieldGet() ||
831 instruction_->IsStaticFieldGet() ||
832 instruction_->IsArrayGet() ||
833 instruction_->IsInstanceOf() ||
834 instruction_->IsCheckCast() ||
835 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
836 << "Unexpected instruction in read barrier for heap reference slow path: "
837 << instruction_->DebugName();
838
839 __ Bind(GetEntryLabel());
840 SaveLiveRegisters(codegen, locations);
841
842 // We may have to change the index's value, but as `index_` is a
843 // constant member (like other "inputs" of this slow path),
844 // introduce a copy of it, `index`.
845 Location index = index_;
846 if (index_.IsValid()) {
847 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
848 if (instruction_->IsArrayGet()) {
849 // Compute the actual memory offset and store it in `index`.
850 GpuRegister index_reg = index_.AsRegister<GpuRegister>();
851 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
852 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
853 // We are about to change the value of `index_reg` (see the
854 // calls to art::mips64::Mips64Assembler::Sll and
855 // art::mips64::MipsAssembler::Addiu32 below), but it has
856 // not been saved by the previous call to
857 // art::SlowPathCode::SaveLiveRegisters, as it is a
858 // callee-save register --
859 // art::SlowPathCode::SaveLiveRegisters does not consider
860 // callee-save registers, as it has been designed with the
861 // assumption that callee-save registers are supposed to be
862 // handled by the called function. So, as a callee-save
863 // register, `index_reg` _would_ eventually be saved onto
864 // the stack, but it would be too late: we would have
865 // changed its value earlier. Therefore, we manually save
866 // it here into another freely available register,
867 // `free_reg`, chosen of course among the caller-save
868 // registers (as a callee-save `free_reg` register would
869 // exhibit the same problem).
870 //
871 // Note we could have requested a temporary register from
872 // the register allocator instead; but we prefer not to, as
873 // this is a slow path, and we know we can find a
874 // caller-save register that is available.
875 GpuRegister free_reg = FindAvailableCallerSaveRegister(codegen);
876 __ Move(free_reg, index_reg);
877 index_reg = free_reg;
878 index = Location::RegisterLocation(index_reg);
879 } else {
880 // The initial register stored in `index_` has already been
881 // saved in the call to art::SlowPathCode::SaveLiveRegisters
882 // (as it is not a callee-save register), so we can freely
883 // use it.
884 }
885 // Shifting the index value contained in `index_reg` by the scale
886 // factor (2) cannot overflow in practice, as the runtime is
887 // unable to allocate object arrays with a size larger than
888 // 2^26 - 1 (that is, 2^28 - 4 bytes).
889 __ Sll(index_reg, index_reg, TIMES_4);
890 static_assert(
891 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
892 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
893 __ Addiu32(index_reg, index_reg, offset_);
894 } else {
895 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
896 // intrinsics, `index_` is not shifted by a scale factor of 2
897 // (as in the case of ArrayGet), as it is actually an offset
898 // to an object field within an object.
899 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
900 DCHECK(instruction_->GetLocations()->Intrinsified());
901 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
902 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
903 << instruction_->AsInvoke()->GetIntrinsic();
904 DCHECK_EQ(offset_, 0U);
905 DCHECK(index_.IsRegister());
906 }
907 }
908
909 // We're moving two or three locations to locations that could
910 // overlap, so we need a parallel move resolver.
911 InvokeRuntimeCallingConvention calling_convention;
912 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
913 parallel_move.AddMove(ref_,
914 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
915 Primitive::kPrimNot,
916 nullptr);
917 parallel_move.AddMove(obj_,
918 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
919 Primitive::kPrimNot,
920 nullptr);
921 if (index.IsValid()) {
922 parallel_move.AddMove(index,
923 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
924 Primitive::kPrimInt,
925 nullptr);
926 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
927 } else {
928 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
929 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
930 }
931 mips64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
932 instruction_,
933 instruction_->GetDexPc(),
934 this);
935 CheckEntrypointTypes<
936 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
937 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
938
939 RestoreLiveRegisters(codegen, locations);
940 __ Bc(GetExitLabel());
941 }
942
943 const char* GetDescription() const OVERRIDE {
944 return "ReadBarrierForHeapReferenceSlowPathMIPS64";
945 }
946
947 private:
948 GpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
949 size_t ref = static_cast<int>(ref_.AsRegister<GpuRegister>());
950 size_t obj = static_cast<int>(obj_.AsRegister<GpuRegister>());
951 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
952 if (i != ref &&
953 i != obj &&
954 !codegen->IsCoreCalleeSaveRegister(i) &&
955 !codegen->IsBlockedCoreRegister(i)) {
956 return static_cast<GpuRegister>(i);
957 }
958 }
959 // We shall never fail to find a free caller-save register, as
960 // there are more than two core caller-save registers on MIPS64
961 // (meaning it is possible to find one which is different from
962 // `ref` and `obj`).
963 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
964 LOG(FATAL) << "Could not find a free caller-save register";
965 UNREACHABLE();
966 }
967
968 const Location out_;
969 const Location ref_;
970 const Location obj_;
971 const uint32_t offset_;
972 // An additional location containing an index to an array.
973 // Only used for HArrayGet and the UnsafeGetObject &
974 // UnsafeGetObjectVolatile intrinsics.
975 const Location index_;
976
977 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS64);
978};
979
980// Slow path generating a read barrier for a GC root.
981class ReadBarrierForRootSlowPathMIPS64 : public SlowPathCodeMIPS64 {
982 public:
983 ReadBarrierForRootSlowPathMIPS64(HInstruction* instruction, Location out, Location root)
984 : SlowPathCodeMIPS64(instruction), out_(out), root_(root) {
985 DCHECK(kEmitCompilerReadBarrier);
986 }
987
988 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
989 LocationSummary* locations = instruction_->GetLocations();
990 Primitive::Type type = Primitive::kPrimNot;
991 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
992 DCHECK(locations->CanCall());
993 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
994 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
995 << "Unexpected instruction in read barrier for GC root slow path: "
996 << instruction_->DebugName();
997
998 __ Bind(GetEntryLabel());
999 SaveLiveRegisters(codegen, locations);
1000
1001 InvokeRuntimeCallingConvention calling_convention;
1002 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
1003 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
1004 root_,
1005 Primitive::kPrimNot);
1006 mips64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
1007 instruction_,
1008 instruction_->GetDexPc(),
1009 this);
1010 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1011 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1012
1013 RestoreLiveRegisters(codegen, locations);
1014 __ Bc(GetExitLabel());
1015 }
1016
1017 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS64"; }
1018
1019 private:
1020 const Location out_;
1021 const Location root_;
1022
1023 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS64);
1024};
1025
Alexey Frunze4dda3372015-06-01 18:31:49 -07001026CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
1027 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001028 const CompilerOptions& compiler_options,
1029 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -07001030 : CodeGenerator(graph,
1031 kNumberOfGpuRegisters,
1032 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001033 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001034 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1035 arraysize(kCoreCalleeSaves)),
1036 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1037 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001038 compiler_options,
1039 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001040 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001041 location_builder_(graph, this),
1042 instruction_visitor_(graph, this),
1043 move_resolver_(graph->GetArena(), this),
Goran Jakovljevic19680d32017-05-11 10:38:36 +02001044 assembler_(graph->GetArena(), &isa_features),
Alexey Frunze19f6c692016-11-30 19:19:55 -08001045 isa_features_(isa_features),
Alexey Frunzef63f5692016-12-13 17:43:11 -08001046 uint32_literals_(std::less<uint32_t>(),
1047 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -08001048 uint64_literals_(std::less<uint64_t>(),
1049 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001050 pc_relative_method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001051 method_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunzef63f5692016-12-13 17:43:11 -08001052 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001053 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001054 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001055 string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001056 jit_string_patches_(StringReferenceValueComparator(),
1057 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1058 jit_class_patches_(TypeReferenceValueComparator(),
1059 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001060 // Save RA (containing the return address) to mimic Quick.
1061 AddAllocatedRegister(Location::RegisterLocation(RA));
1062}
1063
1064#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001065// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1066#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001067#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -07001068
1069void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001070 // Ensure that we fix up branches.
1071 __ FinalizeCode();
1072
1073 // Adjust native pc offsets in stack maps.
1074 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001075 uint32_t old_position =
1076 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips64);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001077 uint32_t new_position = __ GetAdjustedPosition(old_position);
1078 DCHECK_GE(new_position, old_position);
1079 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
1080 }
1081
1082 // Adjust pc offsets for the disassembly information.
1083 if (disasm_info_ != nullptr) {
1084 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1085 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1086 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1087 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1088 it.second.start = __ GetAdjustedPosition(it.second.start);
1089 it.second.end = __ GetAdjustedPosition(it.second.end);
1090 }
1091 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1092 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1093 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1094 }
1095 }
1096
Alexey Frunze4dda3372015-06-01 18:31:49 -07001097 CodeGenerator::Finalize(allocator);
1098}
1099
1100Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
1101 return codegen_->GetAssembler();
1102}
1103
1104void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001105 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001106 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1107}
1108
1109void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001110 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001111 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
1112}
1113
1114void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
1115 // Pop reg
1116 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +02001117 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001118}
1119
1120void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
1121 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +02001122 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001123 __ Sd(GpuRegister(reg), SP, 0);
1124}
1125
1126void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
1127 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
1128 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
1129 // Allocate a scratch register other than TMP, if available.
1130 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1131 // automatically unspilled when the scratch scope object is destroyed).
1132 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1133 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +02001134 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001135 __ LoadFromOffset(load_type,
1136 GpuRegister(ensure_scratch.GetRegister()),
1137 SP,
1138 index1 + stack_offset);
1139 __ LoadFromOffset(load_type,
1140 TMP,
1141 SP,
1142 index2 + stack_offset);
1143 __ StoreToOffset(store_type,
1144 GpuRegister(ensure_scratch.GetRegister()),
1145 SP,
1146 index2 + stack_offset);
1147 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
1148}
1149
1150static dwarf::Reg DWARFReg(GpuRegister reg) {
1151 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1152}
1153
David Srbeckyba702002016-02-01 18:15:29 +00001154static dwarf::Reg DWARFReg(FpuRegister reg) {
1155 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
1156}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001157
1158void CodeGeneratorMIPS64::GenerateFrameEntry() {
1159 __ Bind(&frame_entry_label_);
1160
1161 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips64) || !IsLeafMethod();
1162
1163 if (do_overflow_check) {
1164 __ LoadFromOffset(kLoadWord,
1165 ZERO,
1166 SP,
1167 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips64)));
1168 RecordPcInfo(nullptr, 0);
1169 }
1170
Alexey Frunze4dda3372015-06-01 18:31:49 -07001171 if (HasEmptyFrame()) {
1172 return;
1173 }
1174
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001175 // Make sure the frame size isn't unreasonably large.
1176 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips64)) {
1177 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips64) << " bytes";
1178 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001179
1180 // Spill callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001181
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001182 uint32_t ofs = GetFrameSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001183 __ IncreaseFrameSize(ofs);
1184
1185 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1186 GpuRegister reg = kCoreCalleeSaves[i];
1187 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001188 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001189 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001190 __ cfi().RelOffset(DWARFReg(reg), ofs);
1191 }
1192 }
1193
1194 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1195 FpuRegister reg = kFpuCalleeSaves[i];
1196 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001197 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001198 __ StoreFpuToOffset(kStoreDoubleword, reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +00001199 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001200 }
1201 }
1202
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001203 // Save the current method if we need it. Note that we do not
1204 // do this in HCurrentMethod, as the instruction might have been removed
1205 // in the SSA graph.
1206 if (RequiresCurrentMethod()) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001207 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001208 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001209
1210 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1211 // Initialize should_deoptimize flag to 0.
1212 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1213 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001214}
1215
1216void CodeGeneratorMIPS64::GenerateFrameExit() {
1217 __ cfi().RememberState();
1218
Alexey Frunze4dda3372015-06-01 18:31:49 -07001219 if (!HasEmptyFrame()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001220 // Restore callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001221
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001222 // For better instruction scheduling restore RA before other registers.
1223 uint32_t ofs = GetFrameSize();
1224 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001225 GpuRegister reg = kCoreCalleeSaves[i];
1226 if (allocated_registers_.ContainsCoreRegister(reg)) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001227 ofs -= kMips64DoublewordSize;
1228 __ LoadFromOffset(kLoadDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001229 __ cfi().Restore(DWARFReg(reg));
1230 }
1231 }
1232
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001233 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1234 FpuRegister reg = kFpuCalleeSaves[i];
1235 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
1236 ofs -= kMips64DoublewordSize;
1237 __ LoadFpuFromOffset(kLoadDoubleword, reg, SP, ofs);
1238 __ cfi().Restore(DWARFReg(reg));
1239 }
1240 }
1241
1242 __ DecreaseFrameSize(GetFrameSize());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001243 }
1244
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001245 __ Jic(RA, 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001246
1247 __ cfi().RestoreState();
1248 __ cfi().DefCFAOffset(GetFrameSize());
1249}
1250
1251void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
1252 __ Bind(GetLabelOf(block));
1253}
1254
1255void CodeGeneratorMIPS64::MoveLocation(Location destination,
1256 Location source,
Calin Juravlee460d1d2015-09-29 04:52:17 +01001257 Primitive::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001258 if (source.Equals(destination)) {
1259 return;
1260 }
1261
1262 // A valid move can always be inferred from the destination and source
1263 // locations. When moving from and to a register, the argument type can be
1264 // used to generate 32bit instead of 64bit moves.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001265 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001266 DCHECK_EQ(unspecified_type, false);
1267
1268 if (destination.IsRegister() || destination.IsFpuRegister()) {
1269 if (unspecified_type) {
1270 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1271 if (source.IsStackSlot() ||
1272 (src_cst != nullptr && (src_cst->IsIntConstant()
1273 || src_cst->IsFloatConstant()
1274 || src_cst->IsNullConstant()))) {
1275 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001276 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001277 } else {
1278 // If the source is a double stack slot or a 64bit constant, a 64bit
1279 // type is appropriate. Else the source is a register, and since the
1280 // type has not been specified, we chose a 64bit type to force a 64bit
1281 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001282 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001283 }
1284 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001285 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1286 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001287 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1288 // Move to GPR/FPR from stack
1289 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001290 if (Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001291 __ LoadFpuFromOffset(load_type,
1292 destination.AsFpuRegister<FpuRegister>(),
1293 SP,
1294 source.GetStackIndex());
1295 } else {
1296 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
1297 __ LoadFromOffset(load_type,
1298 destination.AsRegister<GpuRegister>(),
1299 SP,
1300 source.GetStackIndex());
1301 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001302 } else if (source.IsSIMDStackSlot()) {
1303 __ LoadFpuFromOffset(kLoadQuadword,
1304 destination.AsFpuRegister<FpuRegister>(),
1305 SP,
1306 source.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001307 } else if (source.IsConstant()) {
1308 // Move to GPR/FPR from constant
1309 GpuRegister gpr = AT;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001310 if (!Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001311 gpr = destination.AsRegister<GpuRegister>();
1312 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001313 if (dst_type == Primitive::kPrimInt || dst_type == Primitive::kPrimFloat) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001314 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
1315 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
1316 gpr = ZERO;
1317 } else {
1318 __ LoadConst32(gpr, value);
1319 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001320 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001321 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
1322 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
1323 gpr = ZERO;
1324 } else {
1325 __ LoadConst64(gpr, value);
1326 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001327 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001328 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001329 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001330 } else if (dst_type == Primitive::kPrimDouble) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001331 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
1332 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001333 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001334 if (destination.IsRegister()) {
1335 // Move to GPR from GPR
1336 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
1337 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001338 DCHECK(destination.IsFpuRegister());
1339 if (Primitive::Is64BitType(dst_type)) {
1340 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1341 } else {
1342 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1343 }
1344 }
1345 } else if (source.IsFpuRegister()) {
1346 if (destination.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001347 if (GetGraph()->HasSIMD()) {
1348 __ MoveV(VectorRegisterFrom(destination),
1349 VectorRegisterFrom(source));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001350 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001351 // Move to FPR from FPR
1352 if (dst_type == Primitive::kPrimFloat) {
1353 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1354 } else {
1355 DCHECK_EQ(dst_type, Primitive::kPrimDouble);
1356 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1357 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001358 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001359 } else {
1360 DCHECK(destination.IsRegister());
1361 if (Primitive::Is64BitType(dst_type)) {
1362 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1363 } else {
1364 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1365 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001366 }
1367 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001368 } else if (destination.IsSIMDStackSlot()) {
1369 if (source.IsFpuRegister()) {
1370 __ StoreFpuToOffset(kStoreQuadword,
1371 source.AsFpuRegister<FpuRegister>(),
1372 SP,
1373 destination.GetStackIndex());
1374 } else {
1375 DCHECK(source.IsSIMDStackSlot());
1376 __ LoadFpuFromOffset(kLoadQuadword,
1377 FTMP,
1378 SP,
1379 source.GetStackIndex());
1380 __ StoreFpuToOffset(kStoreQuadword,
1381 FTMP,
1382 SP,
1383 destination.GetStackIndex());
1384 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001385 } else { // The destination is not a register. It must be a stack slot.
1386 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1387 if (source.IsRegister() || source.IsFpuRegister()) {
1388 if (unspecified_type) {
1389 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001390 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001391 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001392 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001393 }
1394 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001395 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1396 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001397 // Move to stack from GPR/FPR
1398 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1399 if (source.IsRegister()) {
1400 __ StoreToOffset(store_type,
1401 source.AsRegister<GpuRegister>(),
1402 SP,
1403 destination.GetStackIndex());
1404 } else {
1405 __ StoreFpuToOffset(store_type,
1406 source.AsFpuRegister<FpuRegister>(),
1407 SP,
1408 destination.GetStackIndex());
1409 }
1410 } else if (source.IsConstant()) {
1411 // Move to stack from constant
1412 HConstant* src_cst = source.GetConstant();
1413 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001414 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001415 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001416 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
1417 if (value != 0) {
1418 gpr = TMP;
1419 __ LoadConst32(gpr, value);
1420 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001421 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001422 DCHECK(destination.IsDoubleStackSlot());
1423 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
1424 if (value != 0) {
1425 gpr = TMP;
1426 __ LoadConst64(gpr, value);
1427 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001428 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001429 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001430 } else {
1431 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
1432 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
1433 // Move to stack from stack
1434 if (destination.IsStackSlot()) {
1435 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1436 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1437 } else {
1438 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
1439 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
1440 }
1441 }
1442 }
1443}
1444
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001445void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, Primitive::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001446 DCHECK(!loc1.IsConstant());
1447 DCHECK(!loc2.IsConstant());
1448
1449 if (loc1.Equals(loc2)) {
1450 return;
1451 }
1452
1453 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
1454 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
1455 bool is_fp_reg1 = loc1.IsFpuRegister();
1456 bool is_fp_reg2 = loc2.IsFpuRegister();
1457
1458 if (loc2.IsRegister() && loc1.IsRegister()) {
1459 // Swap 2 GPRs
1460 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
1461 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
1462 __ Move(TMP, r2);
1463 __ Move(r2, r1);
1464 __ Move(r1, TMP);
1465 } else if (is_fp_reg2 && is_fp_reg1) {
1466 // Swap 2 FPRs
1467 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
1468 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001469 if (type == Primitive::kPrimFloat) {
1470 __ MovS(FTMP, r1);
1471 __ MovS(r1, r2);
1472 __ MovS(r2, FTMP);
1473 } else {
1474 DCHECK_EQ(type, Primitive::kPrimDouble);
1475 __ MovD(FTMP, r1);
1476 __ MovD(r1, r2);
1477 __ MovD(r2, FTMP);
1478 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001479 } else if (is_slot1 != is_slot2) {
1480 // Swap GPR/FPR and stack slot
1481 Location reg_loc = is_slot1 ? loc2 : loc1;
1482 Location mem_loc = is_slot1 ? loc1 : loc2;
1483 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
1484 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1485 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
1486 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
1487 if (reg_loc.IsFpuRegister()) {
1488 __ StoreFpuToOffset(store_type,
1489 reg_loc.AsFpuRegister<FpuRegister>(),
1490 SP,
1491 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001492 if (mem_loc.IsStackSlot()) {
1493 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1494 } else {
1495 DCHECK(mem_loc.IsDoubleStackSlot());
1496 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1497 }
1498 } else {
1499 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
1500 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
1501 }
1502 } else if (is_slot1 && is_slot2) {
1503 move_resolver_.Exchange(loc1.GetStackIndex(),
1504 loc2.GetStackIndex(),
1505 loc1.IsDoubleStackSlot());
1506 } else {
1507 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
1508 }
1509}
1510
Calin Juravle175dc732015-08-25 15:42:32 +01001511void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
1512 DCHECK(location.IsRegister());
1513 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
1514}
1515
Calin Juravlee460d1d2015-09-29 04:52:17 +01001516void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1517 if (location.IsRegister()) {
1518 locations->AddTemp(location);
1519 } else {
1520 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1521 }
1522}
1523
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001524void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
1525 GpuRegister value,
1526 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001527 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001528 GpuRegister card = AT;
1529 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001530 if (value_can_be_null) {
1531 __ Beqzc(value, &done);
1532 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001533 __ LoadFromOffset(kLoadDoubleword,
1534 card,
1535 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001536 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001537 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
1538 __ Daddu(temp, card, temp);
1539 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001540 if (value_can_be_null) {
1541 __ Bind(&done);
1542 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001543}
1544
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001545template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Alexey Frunze19f6c692016-11-30 19:19:55 -08001546inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
1547 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001548 ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001549 for (const PcRelativePatchInfo& info : infos) {
1550 const DexFile& dex_file = info.target_dex_file;
1551 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001552 DCHECK(info.label.IsBound());
1553 uint32_t literal_offset = __ GetLabelLocation(&info.label);
1554 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1555 uint32_t pc_rel_offset = __ GetLabelLocation(&info_high.label);
1556 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Alexey Frunze19f6c692016-11-30 19:19:55 -08001557 }
1558}
1559
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001560void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001561 DCHECK(linker_patches->empty());
1562 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001563 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001564 method_bss_entry_patches_.size() +
Alexey Frunzef63f5692016-12-13 17:43:11 -08001565 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001566 type_bss_entry_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001567 pc_relative_string_patches_.size() +
1568 string_bss_entry_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -08001569 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001570 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001571 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1572 pc_relative_method_patches_, linker_patches);
1573 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1574 pc_relative_type_patches_, linker_patches);
1575 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
1576 pc_relative_string_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001577 } else {
1578 DCHECK(pc_relative_method_patches_.empty());
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001579 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>(
1580 pc_relative_type_patches_, linker_patches);
1581 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>(
1582 pc_relative_string_patches_, linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001583 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001584 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1585 method_bss_entry_patches_, linker_patches);
1586 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1587 type_bss_entry_patches_, linker_patches);
1588 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1589 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001590 DCHECK_EQ(size, linker_patches->size());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001591}
1592
Vladimir Marko65979462017-05-19 17:25:12 +01001593CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001594 MethodReference target_method,
1595 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001596 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001597 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001598 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001599 &pc_relative_method_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001600}
1601
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001602CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001603 MethodReference target_method,
1604 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001605 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001606 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001607 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001608 &method_bss_entry_patches_);
1609}
1610
Alexey Frunzef63f5692016-12-13 17:43:11 -08001611CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001612 const DexFile& dex_file,
1613 dex::TypeIndex type_index,
1614 const PcRelativePatchInfo* info_high) {
1615 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001616}
1617
Vladimir Marko1998cd02017-01-13 13:02:58 +00001618CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001619 const DexFile& dex_file,
1620 dex::TypeIndex type_index,
1621 const PcRelativePatchInfo* info_high) {
1622 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001623}
1624
Vladimir Marko65979462017-05-19 17:25:12 +01001625CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001626 const DexFile& dex_file,
1627 dex::StringIndex string_index,
1628 const PcRelativePatchInfo* info_high) {
1629 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001630}
1631
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001632CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewStringBssEntryPatch(
1633 const DexFile& dex_file,
1634 dex::StringIndex string_index,
1635 const PcRelativePatchInfo* info_high) {
1636 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
1637}
1638
Alexey Frunze19f6c692016-11-30 19:19:55 -08001639CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001640 const DexFile& dex_file,
1641 uint32_t offset_or_index,
1642 const PcRelativePatchInfo* info_high,
1643 ArenaDeque<PcRelativePatchInfo>* patches) {
1644 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001645 return &patches->back();
1646}
1647
Alexey Frunzef63f5692016-12-13 17:43:11 -08001648Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1649 return map->GetOrCreate(
1650 value,
1651 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1652}
1653
Alexey Frunze19f6c692016-11-30 19:19:55 -08001654Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1655 return uint64_literals_.GetOrCreate(
1656 value,
1657 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1658}
1659
Alexey Frunzef63f5692016-12-13 17:43:11 -08001660Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001661 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001662}
1663
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001664void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
1665 GpuRegister out,
1666 PcRelativePatchInfo* info_low) {
1667 DCHECK(!info_high->patch_info_high);
1668 __ Bind(&info_high->label);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001669 // Add the high half of a 32-bit offset to PC.
1670 __ Auipc(out, /* placeholder */ 0x1234);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001671 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001672 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001673 if (info_low != nullptr) {
1674 DCHECK_EQ(info_low->patch_info_high, info_high);
1675 __ Bind(&info_low->label);
1676 }
Alexey Frunze19f6c692016-11-30 19:19:55 -08001677}
1678
Alexey Frunze627c1a02017-01-30 19:28:14 -08001679Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file,
1680 dex::StringIndex string_index,
1681 Handle<mirror::String> handle) {
1682 jit_string_roots_.Overwrite(StringReference(&dex_file, string_index),
1683 reinterpret_cast64<uint64_t>(handle.GetReference()));
1684 return jit_string_patches_.GetOrCreate(
1685 StringReference(&dex_file, string_index),
1686 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1687}
1688
1689Literal* CodeGeneratorMIPS64::DeduplicateJitClassLiteral(const DexFile& dex_file,
1690 dex::TypeIndex type_index,
1691 Handle<mirror::Class> handle) {
1692 jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
1693 reinterpret_cast64<uint64_t>(handle.GetReference()));
1694 return jit_class_patches_.GetOrCreate(
1695 TypeReference(&dex_file, type_index),
1696 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1697}
1698
1699void CodeGeneratorMIPS64::PatchJitRootUse(uint8_t* code,
1700 const uint8_t* roots_data,
1701 const Literal* literal,
1702 uint64_t index_in_table) const {
1703 uint32_t literal_offset = GetAssembler().GetLabelLocation(literal->GetLabel());
1704 uintptr_t address =
1705 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1706 reinterpret_cast<uint32_t*>(code + literal_offset)[0] = dchecked_integral_cast<uint32_t>(address);
1707}
1708
1709void CodeGeneratorMIPS64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1710 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001711 const StringReference& string_reference = entry.first;
1712 Literal* table_entry_literal = entry.second;
1713 const auto it = jit_string_roots_.find(string_reference);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001714 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001715 uint64_t index_in_table = it->second;
1716 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001717 }
1718 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001719 const TypeReference& type_reference = entry.first;
1720 Literal* table_entry_literal = entry.second;
1721 const auto it = jit_class_roots_.find(type_reference);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001722 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001723 uint64_t index_in_table = it->second;
1724 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001725 }
1726}
1727
David Brazdil58282f42016-01-14 12:45:10 +00001728void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001729 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1730 blocked_core_registers_[ZERO] = true;
1731 blocked_core_registers_[K0] = true;
1732 blocked_core_registers_[K1] = true;
1733 blocked_core_registers_[GP] = true;
1734 blocked_core_registers_[SP] = true;
1735 blocked_core_registers_[RA] = true;
1736
Lazar Trsicd9672662015-09-03 17:33:01 +02001737 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1738 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001739 blocked_core_registers_[AT] = true;
1740 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001741 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001742 blocked_fpu_registers_[FTMP] = true;
1743
1744 // Reserve suspend and thread registers.
1745 blocked_core_registers_[S0] = true;
1746 blocked_core_registers_[TR] = true;
1747
1748 // Reserve T9 for function calls
1749 blocked_core_registers_[T9] = true;
1750
Goran Jakovljevic782be112016-06-21 12:39:04 +02001751 if (GetGraph()->IsDebuggable()) {
1752 // Stubs do not save callee-save floating point registers. If the graph
1753 // is debuggable, we need to deal with these registers differently. For
1754 // now, just block them.
1755 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1756 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1757 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001758 }
1759}
1760
Alexey Frunze4dda3372015-06-01 18:31:49 -07001761size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1762 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001763 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001764}
1765
1766size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1767 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001768 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001769}
1770
1771size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001772 __ StoreFpuToOffset(GetGraph()->HasSIMD() ? kStoreQuadword : kStoreDoubleword,
1773 FpuRegister(reg_id),
1774 SP,
1775 stack_index);
1776 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001777}
1778
1779size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001780 __ LoadFpuFromOffset(GetGraph()->HasSIMD() ? kLoadQuadword : kLoadDoubleword,
1781 FpuRegister(reg_id),
1782 SP,
1783 stack_index);
1784 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001785}
1786
1787void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001788 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001789}
1790
1791void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001792 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001793}
1794
Calin Juravle175dc732015-08-25 15:42:32 +01001795void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001796 HInstruction* instruction,
1797 uint32_t dex_pc,
1798 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001799 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001800 GenerateInvokeRuntime(GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Serban Constantinescufc734082016-07-19 17:18:07 +01001801 if (EntrypointRequiresStackMap(entrypoint)) {
1802 RecordPcInfo(instruction, dex_pc, slow_path);
1803 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001804}
1805
Alexey Frunze15958152017-02-09 19:08:30 -08001806void CodeGeneratorMIPS64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1807 HInstruction* instruction,
1808 SlowPathCode* slow_path) {
1809 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1810 GenerateInvokeRuntime(entry_point_offset);
1811}
1812
1813void CodeGeneratorMIPS64::GenerateInvokeRuntime(int32_t entry_point_offset) {
1814 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
1815 __ Jalr(T9);
1816 __ Nop();
1817}
1818
Alexey Frunze4dda3372015-06-01 18:31:49 -07001819void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1820 GpuRegister class_reg) {
1821 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1822 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1823 __ Bltc(TMP, AT, slow_path->GetEntryLabel());
Alexey Frunze15958152017-02-09 19:08:30 -08001824 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1825 __ Sync(0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001826 __ Bind(slow_path->GetExitLabel());
1827}
1828
1829void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1830 __ Sync(0); // only stype 0 is supported
1831}
1832
1833void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1834 HBasicBlock* successor) {
1835 SuspendCheckSlowPathMIPS64* slow_path =
1836 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS64(instruction, successor);
1837 codegen_->AddSlowPath(slow_path);
1838
1839 __ LoadFromOffset(kLoadUnsignedHalfword,
1840 TMP,
1841 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001842 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001843 if (successor == nullptr) {
1844 __ Bnezc(TMP, slow_path->GetEntryLabel());
1845 __ Bind(slow_path->GetReturnLabel());
1846 } else {
1847 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001848 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001849 // slow_path will return to GetLabelOf(successor).
1850 }
1851}
1852
1853InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1854 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001855 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001856 assembler_(codegen->GetAssembler()),
1857 codegen_(codegen) {}
1858
1859void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1860 DCHECK_EQ(instruction->InputCount(), 2U);
1861 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1862 Primitive::Type type = instruction->GetResultType();
1863 switch (type) {
1864 case Primitive::kPrimInt:
1865 case Primitive::kPrimLong: {
1866 locations->SetInAt(0, Location::RequiresRegister());
1867 HInstruction* right = instruction->InputAt(1);
1868 bool can_use_imm = false;
1869 if (right->IsConstant()) {
1870 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1871 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1872 can_use_imm = IsUint<16>(imm);
1873 } else if (instruction->IsAdd()) {
1874 can_use_imm = IsInt<16>(imm);
1875 } else {
1876 DCHECK(instruction->IsSub());
1877 can_use_imm = IsInt<16>(-imm);
1878 }
1879 }
1880 if (can_use_imm)
1881 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1882 else
1883 locations->SetInAt(1, Location::RequiresRegister());
1884 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1885 }
1886 break;
1887
1888 case Primitive::kPrimFloat:
1889 case Primitive::kPrimDouble:
1890 locations->SetInAt(0, Location::RequiresFpuRegister());
1891 locations->SetInAt(1, Location::RequiresFpuRegister());
1892 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1893 break;
1894
1895 default:
1896 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1897 }
1898}
1899
1900void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1901 Primitive::Type type = instruction->GetType();
1902 LocationSummary* locations = instruction->GetLocations();
1903
1904 switch (type) {
1905 case Primitive::kPrimInt:
1906 case Primitive::kPrimLong: {
1907 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1908 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1909 Location rhs_location = locations->InAt(1);
1910
1911 GpuRegister rhs_reg = ZERO;
1912 int64_t rhs_imm = 0;
1913 bool use_imm = rhs_location.IsConstant();
1914 if (use_imm) {
1915 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1916 } else {
1917 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1918 }
1919
1920 if (instruction->IsAnd()) {
1921 if (use_imm)
1922 __ Andi(dst, lhs, rhs_imm);
1923 else
1924 __ And(dst, lhs, rhs_reg);
1925 } else if (instruction->IsOr()) {
1926 if (use_imm)
1927 __ Ori(dst, lhs, rhs_imm);
1928 else
1929 __ Or(dst, lhs, rhs_reg);
1930 } else if (instruction->IsXor()) {
1931 if (use_imm)
1932 __ Xori(dst, lhs, rhs_imm);
1933 else
1934 __ Xor(dst, lhs, rhs_reg);
1935 } else if (instruction->IsAdd()) {
1936 if (type == Primitive::kPrimInt) {
1937 if (use_imm)
1938 __ Addiu(dst, lhs, rhs_imm);
1939 else
1940 __ Addu(dst, lhs, rhs_reg);
1941 } else {
1942 if (use_imm)
1943 __ Daddiu(dst, lhs, rhs_imm);
1944 else
1945 __ Daddu(dst, lhs, rhs_reg);
1946 }
1947 } else {
1948 DCHECK(instruction->IsSub());
1949 if (type == Primitive::kPrimInt) {
1950 if (use_imm)
1951 __ Addiu(dst, lhs, -rhs_imm);
1952 else
1953 __ Subu(dst, lhs, rhs_reg);
1954 } else {
1955 if (use_imm)
1956 __ Daddiu(dst, lhs, -rhs_imm);
1957 else
1958 __ Dsubu(dst, lhs, rhs_reg);
1959 }
1960 }
1961 break;
1962 }
1963 case Primitive::kPrimFloat:
1964 case Primitive::kPrimDouble: {
1965 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
1966 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1967 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1968 if (instruction->IsAdd()) {
1969 if (type == Primitive::kPrimFloat)
1970 __ AddS(dst, lhs, rhs);
1971 else
1972 __ AddD(dst, lhs, rhs);
1973 } else if (instruction->IsSub()) {
1974 if (type == Primitive::kPrimFloat)
1975 __ SubS(dst, lhs, rhs);
1976 else
1977 __ SubD(dst, lhs, rhs);
1978 } else {
1979 LOG(FATAL) << "Unexpected floating-point binary operation";
1980 }
1981 break;
1982 }
1983 default:
1984 LOG(FATAL) << "Unexpected binary operation type " << type;
1985 }
1986}
1987
1988void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001989 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001990
1991 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1992 Primitive::Type type = instr->GetResultType();
1993 switch (type) {
1994 case Primitive::kPrimInt:
1995 case Primitive::kPrimLong: {
1996 locations->SetInAt(0, Location::RequiresRegister());
1997 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001998 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001999 break;
2000 }
2001 default:
2002 LOG(FATAL) << "Unexpected shift type " << type;
2003 }
2004}
2005
2006void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002007 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002008 LocationSummary* locations = instr->GetLocations();
2009 Primitive::Type type = instr->GetType();
2010
2011 switch (type) {
2012 case Primitive::kPrimInt:
2013 case Primitive::kPrimLong: {
2014 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2015 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2016 Location rhs_location = locations->InAt(1);
2017
2018 GpuRegister rhs_reg = ZERO;
2019 int64_t rhs_imm = 0;
2020 bool use_imm = rhs_location.IsConstant();
2021 if (use_imm) {
2022 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2023 } else {
2024 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2025 }
2026
2027 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00002028 uint32_t shift_value = rhs_imm &
2029 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002030
Alexey Frunze92d90602015-12-18 18:16:36 -08002031 if (shift_value == 0) {
2032 if (dst != lhs) {
2033 __ Move(dst, lhs);
2034 }
2035 } else if (type == Primitive::kPrimInt) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002036 if (instr->IsShl()) {
2037 __ Sll(dst, lhs, shift_value);
2038 } else if (instr->IsShr()) {
2039 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002040 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002041 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002042 } else {
2043 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002044 }
2045 } else {
2046 if (shift_value < 32) {
2047 if (instr->IsShl()) {
2048 __ Dsll(dst, lhs, shift_value);
2049 } else if (instr->IsShr()) {
2050 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002051 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002052 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002053 } else {
2054 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002055 }
2056 } else {
2057 shift_value -= 32;
2058 if (instr->IsShl()) {
2059 __ Dsll32(dst, lhs, shift_value);
2060 } else if (instr->IsShr()) {
2061 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002062 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002063 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002064 } else {
2065 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002066 }
2067 }
2068 }
2069 } else {
2070 if (type == Primitive::kPrimInt) {
2071 if (instr->IsShl()) {
2072 __ Sllv(dst, lhs, rhs_reg);
2073 } else if (instr->IsShr()) {
2074 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002075 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002076 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002077 } else {
2078 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002079 }
2080 } else {
2081 if (instr->IsShl()) {
2082 __ Dsllv(dst, lhs, rhs_reg);
2083 } else if (instr->IsShr()) {
2084 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002085 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002086 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002087 } else {
2088 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002089 }
2090 }
2091 }
2092 break;
2093 }
2094 default:
2095 LOG(FATAL) << "Unexpected shift operation type " << type;
2096 }
2097}
2098
2099void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
2100 HandleBinaryOp(instruction);
2101}
2102
2103void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
2104 HandleBinaryOp(instruction);
2105}
2106
2107void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
2108 HandleBinaryOp(instruction);
2109}
2110
2111void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
2112 HandleBinaryOp(instruction);
2113}
2114
2115void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002116 Primitive::Type type = instruction->GetType();
2117 bool object_array_get_with_read_barrier =
2118 kEmitCompilerReadBarrier && (type == Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002119 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08002120 new (GetGraph()->GetArena()) LocationSummary(instruction,
2121 object_array_get_with_read_barrier
2122 ? LocationSummary::kCallOnSlowPath
2123 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002124 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2125 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2126 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002127 locations->SetInAt(0, Location::RequiresRegister());
2128 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexey Frunze15958152017-02-09 19:08:30 -08002129 if (Primitive::IsFloatingPointType(type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002130 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2131 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002132 // The output overlaps in the case of an object array get with
2133 // read barriers enabled: we do not want the move to overwrite the
2134 // array's location, as we need it to emit the read barrier.
2135 locations->SetOut(Location::RequiresRegister(),
2136 object_array_get_with_read_barrier
2137 ? Location::kOutputOverlap
2138 : Location::kNoOutputOverlap);
2139 }
2140 // We need a temporary register for the read barrier marking slow
2141 // path in CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier.
2142 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002143 bool temp_needed = instruction->GetIndex()->IsConstant()
2144 ? !kBakerReadBarrierThunksEnableForFields
2145 : !kBakerReadBarrierThunksEnableForArrays;
2146 if (temp_needed) {
2147 locations->AddTemp(Location::RequiresRegister());
2148 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002149 }
2150}
2151
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002152static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS64* codegen) {
2153 auto null_checker = [codegen, instruction]() {
2154 codegen->MaybeRecordImplicitNullCheck(instruction);
2155 };
2156 return null_checker;
2157}
2158
Alexey Frunze4dda3372015-06-01 18:31:49 -07002159void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
2160 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002161 Location obj_loc = locations->InAt(0);
2162 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
2163 Location out_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002164 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002165 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002166 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002167
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002168 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002169 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2170 instruction->IsStringCharAt();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002171 switch (type) {
2172 case Primitive::kPrimBoolean: {
Alexey Frunze15958152017-02-09 19:08:30 -08002173 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002174 if (index.IsConstant()) {
2175 size_t offset =
2176 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002177 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002178 } else {
2179 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002180 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002181 }
2182 break;
2183 }
2184
2185 case Primitive::kPrimByte: {
Alexey Frunze15958152017-02-09 19:08:30 -08002186 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002187 if (index.IsConstant()) {
2188 size_t offset =
2189 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002190 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002191 } else {
2192 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002193 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002194 }
2195 break;
2196 }
2197
2198 case Primitive::kPrimShort: {
Alexey Frunze15958152017-02-09 19:08:30 -08002199 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002200 if (index.IsConstant()) {
2201 size_t offset =
2202 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002203 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002204 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002205 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002206 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002207 }
2208 break;
2209 }
2210
2211 case Primitive::kPrimChar: {
Alexey Frunze15958152017-02-09 19:08:30 -08002212 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002213 if (maybe_compressed_char_at) {
2214 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002215 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002216 __ Dext(TMP, TMP, 0, 1);
2217 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2218 "Expecting 0=compressed, 1=uncompressed");
2219 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002220 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002221 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2222 if (maybe_compressed_char_at) {
2223 Mips64Label uncompressed_load, done;
2224 __ Bnezc(TMP, &uncompressed_load);
2225 __ LoadFromOffset(kLoadUnsignedByte,
2226 out,
2227 obj,
2228 data_offset + (const_index << TIMES_1));
2229 __ Bc(&done);
2230 __ Bind(&uncompressed_load);
2231 __ LoadFromOffset(kLoadUnsignedHalfword,
2232 out,
2233 obj,
2234 data_offset + (const_index << TIMES_2));
2235 __ Bind(&done);
2236 } else {
2237 __ LoadFromOffset(kLoadUnsignedHalfword,
2238 out,
2239 obj,
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002240 data_offset + (const_index << TIMES_2),
2241 null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002242 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002243 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002244 GpuRegister index_reg = index.AsRegister<GpuRegister>();
2245 if (maybe_compressed_char_at) {
2246 Mips64Label uncompressed_load, done;
2247 __ Bnezc(TMP, &uncompressed_load);
2248 __ Daddu(TMP, obj, index_reg);
2249 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2250 __ Bc(&done);
2251 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002252 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002253 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2254 __ Bind(&done);
2255 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002256 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002257 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002258 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002259 }
2260 break;
2261 }
2262
Alexey Frunze15958152017-02-09 19:08:30 -08002263 case Primitive::kPrimInt: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002264 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002265 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002266 LoadOperandType load_type = (type == Primitive::kPrimNot) ? kLoadUnsignedWord : kLoadWord;
2267 if (index.IsConstant()) {
2268 size_t offset =
2269 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002270 __ LoadFromOffset(load_type, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002271 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002272 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002273 __ LoadFromOffset(load_type, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002274 }
2275 break;
2276 }
2277
Alexey Frunze15958152017-02-09 19:08:30 -08002278 case Primitive::kPrimNot: {
2279 static_assert(
2280 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2281 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2282 // /* HeapReference<Object> */ out =
2283 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2284 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002285 bool temp_needed = index.IsConstant()
2286 ? !kBakerReadBarrierThunksEnableForFields
2287 : !kBakerReadBarrierThunksEnableForArrays;
2288 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002289 // Note that a potential implicit null check is handled in this
2290 // CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002291 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2292 if (index.IsConstant()) {
2293 // Array load with a constant index can be treated as a field load.
2294 size_t offset =
2295 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2296 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2297 out_loc,
2298 obj,
2299 offset,
2300 temp,
2301 /* needs_null_check */ false);
2302 } else {
2303 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2304 out_loc,
2305 obj,
2306 data_offset,
2307 index,
2308 temp,
2309 /* needs_null_check */ false);
2310 }
Alexey Frunze15958152017-02-09 19:08:30 -08002311 } else {
2312 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2313 if (index.IsConstant()) {
2314 size_t offset =
2315 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2316 __ LoadFromOffset(kLoadUnsignedWord, out, obj, offset, null_checker);
2317 // If read barriers are enabled, emit read barriers other than
2318 // Baker's using a slow path (and also unpoison the loaded
2319 // reference, if heap poisoning is enabled).
2320 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2321 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002322 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002323 __ LoadFromOffset(kLoadUnsignedWord, out, TMP, data_offset, null_checker);
2324 // If read barriers are enabled, emit read barriers other than
2325 // Baker's using a slow path (and also unpoison the loaded
2326 // reference, if heap poisoning is enabled).
2327 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2328 out_loc,
2329 out_loc,
2330 obj_loc,
2331 data_offset,
2332 index);
2333 }
2334 }
2335 break;
2336 }
2337
Alexey Frunze4dda3372015-06-01 18:31:49 -07002338 case Primitive::kPrimLong: {
Alexey Frunze15958152017-02-09 19:08:30 -08002339 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002340 if (index.IsConstant()) {
2341 size_t offset =
2342 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002343 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002344 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002345 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002346 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002347 }
2348 break;
2349 }
2350
2351 case Primitive::kPrimFloat: {
Alexey Frunze15958152017-02-09 19:08:30 -08002352 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002353 if (index.IsConstant()) {
2354 size_t offset =
2355 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002356 __ LoadFpuFromOffset(kLoadWord, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002357 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002358 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002359 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002360 }
2361 break;
2362 }
2363
2364 case Primitive::kPrimDouble: {
Alexey Frunze15958152017-02-09 19:08:30 -08002365 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002366 if (index.IsConstant()) {
2367 size_t offset =
2368 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002369 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002370 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002371 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002372 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002373 }
2374 break;
2375 }
2376
2377 case Primitive::kPrimVoid:
2378 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2379 UNREACHABLE();
2380 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002381}
2382
2383void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
2384 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2385 locations->SetInAt(0, Location::RequiresRegister());
2386 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2387}
2388
2389void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
2390 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002391 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002392 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2393 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2394 __ LoadFromOffset(kLoadWord, out, obj, offset);
2395 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002396 // Mask out compression flag from String's array length.
2397 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2398 __ Srl(out, out, 1u);
2399 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002400}
2401
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002402Location LocationsBuilderMIPS64::RegisterOrZeroConstant(HInstruction* instruction) {
2403 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2404 ? Location::ConstantLocation(instruction->AsConstant())
2405 : Location::RequiresRegister();
2406}
2407
2408Location LocationsBuilderMIPS64::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2409 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2410 // We can store a non-zero float or double constant without first loading it into the FPU,
2411 // but we should only prefer this if the constant has a single use.
2412 if (instruction->IsConstant() &&
2413 (instruction->AsConstant()->IsZeroBitPattern() ||
2414 instruction->GetUses().HasExactlyOneElement())) {
2415 return Location::ConstantLocation(instruction->AsConstant());
2416 // Otherwise fall through and require an FPU register for the constant.
2417 }
2418 return Location::RequiresFpuRegister();
2419}
2420
Alexey Frunze4dda3372015-06-01 18:31:49 -07002421void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002422 Primitive::Type value_type = instruction->GetComponentType();
2423
2424 bool needs_write_barrier =
2425 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2426 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2427
Alexey Frunze4dda3372015-06-01 18:31:49 -07002428 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2429 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002430 may_need_runtime_call_for_type_check ?
2431 LocationSummary::kCallOnSlowPath :
2432 LocationSummary::kNoCall);
2433
2434 locations->SetInAt(0, Location::RequiresRegister());
2435 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2436 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
2437 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002438 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002439 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2440 }
2441 if (needs_write_barrier) {
2442 // Temporary register for the write barrier.
2443 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002444 }
2445}
2446
2447void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
2448 LocationSummary* locations = instruction->GetLocations();
2449 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2450 Location index = locations->InAt(1);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002451 Location value_location = locations->InAt(2);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002452 Primitive::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002453 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002454 bool needs_write_barrier =
2455 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002456 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002457 GpuRegister base_reg = index.IsConstant() ? obj : TMP;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002458
2459 switch (value_type) {
2460 case Primitive::kPrimBoolean:
2461 case Primitive::kPrimByte: {
2462 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002463 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002464 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002465 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002466 __ Daddu(base_reg, obj, index.AsRegister<GpuRegister>());
2467 }
2468 if (value_location.IsConstant()) {
2469 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2470 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2471 } else {
2472 GpuRegister value = value_location.AsRegister<GpuRegister>();
2473 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002474 }
2475 break;
2476 }
2477
2478 case Primitive::kPrimShort:
2479 case Primitive::kPrimChar: {
2480 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002481 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002482 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002483 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002484 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002485 }
2486 if (value_location.IsConstant()) {
2487 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2488 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2489 } else {
2490 GpuRegister value = value_location.AsRegister<GpuRegister>();
2491 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002492 }
2493 break;
2494 }
2495
Alexey Frunze15958152017-02-09 19:08:30 -08002496 case Primitive::kPrimInt: {
2497 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2498 if (index.IsConstant()) {
2499 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2500 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002501 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002502 }
2503 if (value_location.IsConstant()) {
2504 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2505 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2506 } else {
2507 GpuRegister value = value_location.AsRegister<GpuRegister>();
2508 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2509 }
2510 break;
2511 }
2512
Alexey Frunze4dda3372015-06-01 18:31:49 -07002513 case Primitive::kPrimNot: {
Alexey Frunze15958152017-02-09 19:08:30 -08002514 if (value_location.IsConstant()) {
2515 // Just setting null.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002516 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002517 if (index.IsConstant()) {
Alexey Frunzec061de12017-02-14 13:27:23 -08002518 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002519 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002520 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunzec061de12017-02-14 13:27:23 -08002521 }
Alexey Frunze15958152017-02-09 19:08:30 -08002522 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2523 DCHECK_EQ(value, 0);
2524 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2525 DCHECK(!needs_write_barrier);
2526 DCHECK(!may_need_runtime_call_for_type_check);
2527 break;
2528 }
2529
2530 DCHECK(needs_write_barrier);
2531 GpuRegister value = value_location.AsRegister<GpuRegister>();
2532 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
2533 GpuRegister temp2 = TMP; // Doesn't need to survive slow path.
2534 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2535 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2536 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2537 Mips64Label done;
2538 SlowPathCodeMIPS64* slow_path = nullptr;
2539
2540 if (may_need_runtime_call_for_type_check) {
2541 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS64(instruction);
2542 codegen_->AddSlowPath(slow_path);
2543 if (instruction->GetValueCanBeNull()) {
2544 Mips64Label non_zero;
2545 __ Bnezc(value, &non_zero);
2546 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2547 if (index.IsConstant()) {
2548 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002549 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002550 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002551 }
Alexey Frunze15958152017-02-09 19:08:30 -08002552 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2553 __ Bc(&done);
2554 __ Bind(&non_zero);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002555 }
Alexey Frunze15958152017-02-09 19:08:30 -08002556
2557 // Note that when read barriers are enabled, the type checks
2558 // are performed without read barriers. This is fine, even in
2559 // the case where a class object is in the from-space after
2560 // the flip, as a comparison involving such a type would not
2561 // produce a false positive; it may of course produce a false
2562 // negative, in which case we would take the ArraySet slow
2563 // path.
2564
2565 // /* HeapReference<Class> */ temp1 = obj->klass_
2566 __ LoadFromOffset(kLoadUnsignedWord, temp1, obj, class_offset, null_checker);
2567 __ MaybeUnpoisonHeapReference(temp1);
2568
2569 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2570 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, component_offset);
2571 // /* HeapReference<Class> */ temp2 = value->klass_
2572 __ LoadFromOffset(kLoadUnsignedWord, temp2, value, class_offset);
2573 // If heap poisoning is enabled, no need to unpoison `temp1`
2574 // nor `temp2`, as we are comparing two poisoned references.
2575
2576 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2577 Mips64Label do_put;
2578 __ Beqc(temp1, temp2, &do_put);
2579 // If heap poisoning is enabled, the `temp1` reference has
2580 // not been unpoisoned yet; unpoison it now.
2581 __ MaybeUnpoisonHeapReference(temp1);
2582
2583 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2584 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, super_offset);
2585 // If heap poisoning is enabled, no need to unpoison
2586 // `temp1`, as we are comparing against null below.
2587 __ Bnezc(temp1, slow_path->GetEntryLabel());
2588 __ Bind(&do_put);
2589 } else {
2590 __ Bnec(temp1, temp2, slow_path->GetEntryLabel());
2591 }
2592 }
2593
2594 GpuRegister source = value;
2595 if (kPoisonHeapReferences) {
2596 // Note that in the case where `value` is a null reference,
2597 // we do not enter this block, as a null reference does not
2598 // need poisoning.
2599 __ Move(temp1, value);
2600 __ PoisonHeapReference(temp1);
2601 source = temp1;
2602 }
2603
2604 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2605 if (index.IsConstant()) {
2606 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002607 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002608 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002609 }
2610 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2611
2612 if (!may_need_runtime_call_for_type_check) {
2613 codegen_->MaybeRecordImplicitNullCheck(instruction);
2614 }
2615
2616 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2617
2618 if (done.IsLinked()) {
2619 __ Bind(&done);
2620 }
2621
2622 if (slow_path != nullptr) {
2623 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002624 }
2625 break;
2626 }
2627
2628 case Primitive::kPrimLong: {
2629 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002630 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002631 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002632 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002633 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002634 }
2635 if (value_location.IsConstant()) {
2636 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2637 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2638 } else {
2639 GpuRegister value = value_location.AsRegister<GpuRegister>();
2640 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002641 }
2642 break;
2643 }
2644
2645 case Primitive::kPrimFloat: {
2646 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002647 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002648 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002649 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002650 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002651 }
2652 if (value_location.IsConstant()) {
2653 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2654 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2655 } else {
2656 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2657 __ StoreFpuToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002658 }
2659 break;
2660 }
2661
2662 case Primitive::kPrimDouble: {
2663 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002664 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002665 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002666 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002667 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002668 }
2669 if (value_location.IsConstant()) {
2670 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2671 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2672 } else {
2673 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2674 __ StoreFpuToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002675 }
2676 break;
2677 }
2678
2679 case Primitive::kPrimVoid:
2680 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2681 UNREACHABLE();
2682 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002683}
2684
2685void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002686 RegisterSet caller_saves = RegisterSet::Empty();
2687 InvokeRuntimeCallingConvention calling_convention;
2688 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2689 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2690 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002691 locations->SetInAt(0, Location::RequiresRegister());
2692 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002693}
2694
2695void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
2696 LocationSummary* locations = instruction->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002697 BoundsCheckSlowPathMIPS64* slow_path =
2698 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002699 codegen_->AddSlowPath(slow_path);
2700
2701 GpuRegister index = locations->InAt(0).AsRegister<GpuRegister>();
2702 GpuRegister length = locations->InAt(1).AsRegister<GpuRegister>();
2703
2704 // length is limited by the maximum positive signed 32-bit integer.
2705 // Unsigned comparison of length and index checks for index < 0
2706 // and for length <= index simultaneously.
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002707 __ Bgeuc(index, length, slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002708}
2709
Alexey Frunze15958152017-02-09 19:08:30 -08002710// Temp is used for read barrier.
2711static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
2712 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002713 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08002714 (kUseBakerReadBarrier ||
2715 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2716 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2717 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
2718 return 1;
2719 }
2720 return 0;
2721}
2722
2723// Extra temp is used for read barrier.
2724static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
2725 return 1 + NumberOfInstanceOfTemps(type_check_kind);
2726}
2727
Alexey Frunze4dda3372015-06-01 18:31:49 -07002728void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002729 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2730 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
2731
2732 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
2733 switch (type_check_kind) {
2734 case TypeCheckKind::kExactCheck:
2735 case TypeCheckKind::kAbstractClassCheck:
2736 case TypeCheckKind::kClassHierarchyCheck:
2737 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08002738 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002739 ? LocationSummary::kCallOnSlowPath
2740 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
2741 break;
2742 case TypeCheckKind::kArrayCheck:
2743 case TypeCheckKind::kUnresolvedCheck:
2744 case TypeCheckKind::kInterfaceCheck:
2745 call_kind = LocationSummary::kCallOnSlowPath;
2746 break;
2747 }
2748
2749 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002750 locations->SetInAt(0, Location::RequiresRegister());
2751 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08002752 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002753}
2754
2755void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002756 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002757 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002758 Location obj_loc = locations->InAt(0);
2759 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002760 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08002761 Location temp_loc = locations->GetTemp(0);
2762 GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
2763 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
2764 DCHECK_LE(num_temps, 2u);
2765 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002766 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2767 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2768 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2769 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2770 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
2771 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
2772 const uint32_t object_array_data_offset =
2773 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2774 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002775
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002776 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
2777 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
2778 // read barriers is done for performance and code size reasons.
2779 bool is_type_check_slow_path_fatal = false;
2780 if (!kEmitCompilerReadBarrier) {
2781 is_type_check_slow_path_fatal =
2782 (type_check_kind == TypeCheckKind::kExactCheck ||
2783 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2784 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2785 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
2786 !instruction->CanThrowIntoCatchBlock();
2787 }
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002788 SlowPathCodeMIPS64* slow_path =
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002789 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
2790 is_type_check_slow_path_fatal);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002791 codegen_->AddSlowPath(slow_path);
2792
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002793 // Avoid this check if we know `obj` is not null.
2794 if (instruction->MustDoNullCheck()) {
2795 __ Beqzc(obj, &done);
2796 }
2797
2798 switch (type_check_kind) {
2799 case TypeCheckKind::kExactCheck:
2800 case TypeCheckKind::kArrayCheck: {
2801 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002802 GenerateReferenceLoadTwoRegisters(instruction,
2803 temp_loc,
2804 obj_loc,
2805 class_offset,
2806 maybe_temp2_loc,
2807 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002808 // Jump to slow path for throwing the exception or doing a
2809 // more involved array check.
2810 __ Bnec(temp, cls, slow_path->GetEntryLabel());
2811 break;
2812 }
2813
2814 case TypeCheckKind::kAbstractClassCheck: {
2815 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002816 GenerateReferenceLoadTwoRegisters(instruction,
2817 temp_loc,
2818 obj_loc,
2819 class_offset,
2820 maybe_temp2_loc,
2821 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002822 // If the class is abstract, we eagerly fetch the super class of the
2823 // object to avoid doing a comparison we know will fail.
2824 Mips64Label loop;
2825 __ Bind(&loop);
2826 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002827 GenerateReferenceLoadOneRegister(instruction,
2828 temp_loc,
2829 super_offset,
2830 maybe_temp2_loc,
2831 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002832 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2833 // exception.
2834 __ Beqzc(temp, slow_path->GetEntryLabel());
2835 // Otherwise, compare the classes.
2836 __ Bnec(temp, cls, &loop);
2837 break;
2838 }
2839
2840 case TypeCheckKind::kClassHierarchyCheck: {
2841 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002842 GenerateReferenceLoadTwoRegisters(instruction,
2843 temp_loc,
2844 obj_loc,
2845 class_offset,
2846 maybe_temp2_loc,
2847 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002848 // Walk over the class hierarchy to find a match.
2849 Mips64Label loop;
2850 __ Bind(&loop);
2851 __ Beqc(temp, cls, &done);
2852 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002853 GenerateReferenceLoadOneRegister(instruction,
2854 temp_loc,
2855 super_offset,
2856 maybe_temp2_loc,
2857 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002858 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2859 // exception. Otherwise, jump to the beginning of the loop.
2860 __ Bnezc(temp, &loop);
2861 __ Bc(slow_path->GetEntryLabel());
2862 break;
2863 }
2864
2865 case TypeCheckKind::kArrayObjectCheck: {
2866 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002867 GenerateReferenceLoadTwoRegisters(instruction,
2868 temp_loc,
2869 obj_loc,
2870 class_offset,
2871 maybe_temp2_loc,
2872 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002873 // Do an exact check.
2874 __ Beqc(temp, cls, &done);
2875 // Otherwise, we need to check that the object's class is a non-primitive array.
2876 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08002877 GenerateReferenceLoadOneRegister(instruction,
2878 temp_loc,
2879 component_offset,
2880 maybe_temp2_loc,
2881 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002882 // If the component type is null, jump to the slow path to throw the exception.
2883 __ Beqzc(temp, slow_path->GetEntryLabel());
2884 // Otherwise, the object is indeed an array, further check that this component
2885 // type is not a primitive type.
2886 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
2887 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2888 __ Bnezc(temp, slow_path->GetEntryLabel());
2889 break;
2890 }
2891
2892 case TypeCheckKind::kUnresolvedCheck:
2893 // We always go into the type check slow path for the unresolved check case.
2894 // We cannot directly call the CheckCast runtime entry point
2895 // without resorting to a type checking slow path here (i.e. by
2896 // calling InvokeRuntime directly), as it would require to
2897 // assign fixed registers for the inputs of this HInstanceOf
2898 // instruction (following the runtime calling convention), which
2899 // might be cluttered by the potential first read barrier
2900 // emission at the beginning of this method.
2901 __ Bc(slow_path->GetEntryLabel());
2902 break;
2903
2904 case TypeCheckKind::kInterfaceCheck: {
2905 // Avoid read barriers to improve performance of the fast path. We can not get false
2906 // positives by doing this.
2907 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002908 GenerateReferenceLoadTwoRegisters(instruction,
2909 temp_loc,
2910 obj_loc,
2911 class_offset,
2912 maybe_temp2_loc,
2913 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002914 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08002915 GenerateReferenceLoadTwoRegisters(instruction,
2916 temp_loc,
2917 temp_loc,
2918 iftable_offset,
2919 maybe_temp2_loc,
2920 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002921 // Iftable is never null.
2922 __ Lw(TMP, temp, array_length_offset);
2923 // Loop through the iftable and check if any class matches.
2924 Mips64Label loop;
2925 __ Bind(&loop);
2926 __ Beqzc(TMP, slow_path->GetEntryLabel());
2927 __ Lwu(AT, temp, object_array_data_offset);
2928 __ MaybeUnpoisonHeapReference(AT);
2929 // Go to next interface.
2930 __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
2931 __ Addiu(TMP, TMP, -2);
2932 // Compare the classes and continue the loop if they do not match.
2933 __ Bnec(AT, cls, &loop);
2934 break;
2935 }
2936 }
2937
2938 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002939 __ Bind(slow_path->GetExitLabel());
2940}
2941
2942void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
2943 LocationSummary* locations =
2944 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2945 locations->SetInAt(0, Location::RequiresRegister());
2946 if (check->HasUses()) {
2947 locations->SetOut(Location::SameAsFirstInput());
2948 }
2949}
2950
2951void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
2952 // We assume the class is not null.
2953 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
2954 check->GetLoadClass(),
2955 check,
2956 check->GetDexPc(),
2957 true);
2958 codegen_->AddSlowPath(slow_path);
2959 GenerateClassInitializationCheck(slow_path,
2960 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
2961}
2962
2963void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
2964 Primitive::Type in_type = compare->InputAt(0)->GetType();
2965
Alexey Frunze299a9392015-12-08 16:08:02 -08002966 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002967
2968 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002969 case Primitive::kPrimBoolean:
2970 case Primitive::kPrimByte:
2971 case Primitive::kPrimShort:
2972 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002973 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002974 case Primitive::kPrimLong:
2975 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002976 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002977 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2978 break;
2979
2980 case Primitive::kPrimFloat:
Alexey Frunze299a9392015-12-08 16:08:02 -08002981 case Primitive::kPrimDouble:
2982 locations->SetInAt(0, Location::RequiresFpuRegister());
2983 locations->SetInAt(1, Location::RequiresFpuRegister());
2984 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002985 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002986
2987 default:
2988 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2989 }
2990}
2991
2992void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
2993 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08002994 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002995 Primitive::Type in_type = instruction->InputAt(0)->GetType();
2996
2997 // 0 if: left == right
2998 // 1 if: left > right
2999 // -1 if: left < right
3000 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003001 case Primitive::kPrimBoolean:
3002 case Primitive::kPrimByte:
3003 case Primitive::kPrimShort:
3004 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003005 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003006 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003007 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003008 Location rhs_location = locations->InAt(1);
3009 bool use_imm = rhs_location.IsConstant();
3010 GpuRegister rhs = ZERO;
3011 if (use_imm) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003012 if (in_type == Primitive::kPrimLong) {
Aart Bika19616e2016-02-01 18:57:58 -08003013 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
3014 if (value != 0) {
3015 rhs = AT;
3016 __ LoadConst64(rhs, value);
3017 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00003018 } else {
3019 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
3020 if (value != 0) {
3021 rhs = AT;
3022 __ LoadConst32(rhs, value);
3023 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003024 }
3025 } else {
3026 rhs = rhs_location.AsRegister<GpuRegister>();
3027 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003028 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08003029 __ Slt(res, rhs, lhs);
3030 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003031 break;
3032 }
3033
Alexey Frunze299a9392015-12-08 16:08:02 -08003034 case Primitive::kPrimFloat: {
3035 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3036 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3037 Mips64Label done;
3038 __ CmpEqS(FTMP, lhs, rhs);
3039 __ LoadConst32(res, 0);
3040 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003041 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003042 __ CmpLtS(FTMP, lhs, rhs);
3043 __ LoadConst32(res, -1);
3044 __ Bc1nez(FTMP, &done);
3045 __ LoadConst32(res, 1);
3046 } else {
3047 __ CmpLtS(FTMP, rhs, lhs);
3048 __ LoadConst32(res, 1);
3049 __ Bc1nez(FTMP, &done);
3050 __ LoadConst32(res, -1);
3051 }
3052 __ Bind(&done);
3053 break;
3054 }
3055
Alexey Frunze4dda3372015-06-01 18:31:49 -07003056 case Primitive::kPrimDouble: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003057 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3058 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3059 Mips64Label done;
3060 __ CmpEqD(FTMP, lhs, rhs);
3061 __ LoadConst32(res, 0);
3062 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003063 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003064 __ CmpLtD(FTMP, lhs, rhs);
3065 __ LoadConst32(res, -1);
3066 __ Bc1nez(FTMP, &done);
3067 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003068 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08003069 __ CmpLtD(FTMP, rhs, lhs);
3070 __ LoadConst32(res, 1);
3071 __ Bc1nez(FTMP, &done);
3072 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003073 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003074 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003075 break;
3076 }
3077
3078 default:
3079 LOG(FATAL) << "Unimplemented compare type " << in_type;
3080 }
3081}
3082
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003083void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003084 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08003085 switch (instruction->InputAt(0)->GetType()) {
3086 default:
3087 case Primitive::kPrimLong:
3088 locations->SetInAt(0, Location::RequiresRegister());
3089 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3090 break;
3091
3092 case Primitive::kPrimFloat:
3093 case Primitive::kPrimDouble:
3094 locations->SetInAt(0, Location::RequiresFpuRegister());
3095 locations->SetInAt(1, Location::RequiresFpuRegister());
3096 break;
3097 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003098 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003099 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3100 }
3101}
3102
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003103void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003104 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003105 return;
3106 }
3107
Alexey Frunze299a9392015-12-08 16:08:02 -08003108 Primitive::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003109 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003110 switch (type) {
3111 default:
3112 // Integer case.
3113 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
3114 return;
3115 case Primitive::kPrimLong:
3116 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
3117 return;
Alexey Frunze299a9392015-12-08 16:08:02 -08003118 case Primitive::kPrimFloat:
3119 case Primitive::kPrimDouble:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003120 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3121 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003122 }
3123}
3124
Alexey Frunzec857c742015-09-23 15:12:39 -07003125void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3126 DCHECK(instruction->IsDiv() || instruction->IsRem());
3127 Primitive::Type type = instruction->GetResultType();
3128
3129 LocationSummary* locations = instruction->GetLocations();
3130 Location second = locations->InAt(1);
3131 DCHECK(second.IsConstant());
3132
3133 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3134 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3135 int64_t imm = Int64FromConstant(second.GetConstant());
3136 DCHECK(imm == 1 || imm == -1);
3137
3138 if (instruction->IsRem()) {
3139 __ Move(out, ZERO);
3140 } else {
3141 if (imm == -1) {
3142 if (type == Primitive::kPrimInt) {
3143 __ Subu(out, ZERO, dividend);
3144 } else {
3145 DCHECK_EQ(type, Primitive::kPrimLong);
3146 __ Dsubu(out, ZERO, dividend);
3147 }
3148 } else if (out != dividend) {
3149 __ Move(out, dividend);
3150 }
3151 }
3152}
3153
3154void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3155 DCHECK(instruction->IsDiv() || instruction->IsRem());
3156 Primitive::Type type = instruction->GetResultType();
3157
3158 LocationSummary* locations = instruction->GetLocations();
3159 Location second = locations->InAt(1);
3160 DCHECK(second.IsConstant());
3161
3162 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3163 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3164 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003165 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07003166 int ctz_imm = CTZ(abs_imm);
3167
3168 if (instruction->IsDiv()) {
3169 if (type == Primitive::kPrimInt) {
3170 if (ctz_imm == 1) {
3171 // Fast path for division by +/-2, which is very common.
3172 __ Srl(TMP, dividend, 31);
3173 } else {
3174 __ Sra(TMP, dividend, 31);
3175 __ Srl(TMP, TMP, 32 - ctz_imm);
3176 }
3177 __ Addu(out, dividend, TMP);
3178 __ Sra(out, out, ctz_imm);
3179 if (imm < 0) {
3180 __ Subu(out, ZERO, out);
3181 }
3182 } else {
3183 DCHECK_EQ(type, Primitive::kPrimLong);
3184 if (ctz_imm == 1) {
3185 // Fast path for division by +/-2, which is very common.
3186 __ Dsrl32(TMP, dividend, 31);
3187 } else {
3188 __ Dsra32(TMP, dividend, 31);
3189 if (ctz_imm > 32) {
3190 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3191 } else {
3192 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3193 }
3194 }
3195 __ Daddu(out, dividend, TMP);
3196 if (ctz_imm < 32) {
3197 __ Dsra(out, out, ctz_imm);
3198 } else {
3199 __ Dsra32(out, out, ctz_imm - 32);
3200 }
3201 if (imm < 0) {
3202 __ Dsubu(out, ZERO, out);
3203 }
3204 }
3205 } else {
3206 if (type == Primitive::kPrimInt) {
3207 if (ctz_imm == 1) {
3208 // Fast path for modulo +/-2, which is very common.
3209 __ Sra(TMP, dividend, 31);
3210 __ Subu(out, dividend, TMP);
3211 __ Andi(out, out, 1);
3212 __ Addu(out, out, TMP);
3213 } else {
3214 __ Sra(TMP, dividend, 31);
3215 __ Srl(TMP, TMP, 32 - ctz_imm);
3216 __ Addu(out, dividend, TMP);
3217 if (IsUint<16>(abs_imm - 1)) {
3218 __ Andi(out, out, abs_imm - 1);
3219 } else {
3220 __ Sll(out, out, 32 - ctz_imm);
3221 __ Srl(out, out, 32 - ctz_imm);
3222 }
3223 __ Subu(out, out, TMP);
3224 }
3225 } else {
3226 DCHECK_EQ(type, Primitive::kPrimLong);
3227 if (ctz_imm == 1) {
3228 // Fast path for modulo +/-2, which is very common.
3229 __ Dsra32(TMP, dividend, 31);
3230 __ Dsubu(out, dividend, TMP);
3231 __ Andi(out, out, 1);
3232 __ Daddu(out, out, TMP);
3233 } else {
3234 __ Dsra32(TMP, dividend, 31);
3235 if (ctz_imm > 32) {
3236 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3237 } else {
3238 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3239 }
3240 __ Daddu(out, dividend, TMP);
3241 if (IsUint<16>(abs_imm - 1)) {
3242 __ Andi(out, out, abs_imm - 1);
3243 } else {
3244 if (ctz_imm > 32) {
3245 __ Dsll(out, out, 64 - ctz_imm);
3246 __ Dsrl(out, out, 64 - ctz_imm);
3247 } else {
3248 __ Dsll32(out, out, 32 - ctz_imm);
3249 __ Dsrl32(out, out, 32 - ctz_imm);
3250 }
3251 }
3252 __ Dsubu(out, out, TMP);
3253 }
3254 }
3255 }
3256}
3257
3258void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3259 DCHECK(instruction->IsDiv() || instruction->IsRem());
3260
3261 LocationSummary* locations = instruction->GetLocations();
3262 Location second = locations->InAt(1);
3263 DCHECK(second.IsConstant());
3264
3265 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3266 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3267 int64_t imm = Int64FromConstant(second.GetConstant());
3268
3269 Primitive::Type type = instruction->GetResultType();
3270 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
3271
3272 int64_t magic;
3273 int shift;
3274 CalculateMagicAndShiftForDivRem(imm,
3275 (type == Primitive::kPrimLong),
3276 &magic,
3277 &shift);
3278
3279 if (type == Primitive::kPrimInt) {
3280 __ LoadConst32(TMP, magic);
3281 __ MuhR6(TMP, dividend, TMP);
3282
3283 if (imm > 0 && magic < 0) {
3284 __ Addu(TMP, TMP, dividend);
3285 } else if (imm < 0 && magic > 0) {
3286 __ Subu(TMP, TMP, dividend);
3287 }
3288
3289 if (shift != 0) {
3290 __ Sra(TMP, TMP, shift);
3291 }
3292
3293 if (instruction->IsDiv()) {
3294 __ Sra(out, TMP, 31);
3295 __ Subu(out, TMP, out);
3296 } else {
3297 __ Sra(AT, TMP, 31);
3298 __ Subu(AT, TMP, AT);
3299 __ LoadConst32(TMP, imm);
3300 __ MulR6(TMP, AT, TMP);
3301 __ Subu(out, dividend, TMP);
3302 }
3303 } else {
3304 __ LoadConst64(TMP, magic);
3305 __ Dmuh(TMP, dividend, TMP);
3306
3307 if (imm > 0 && magic < 0) {
3308 __ Daddu(TMP, TMP, dividend);
3309 } else if (imm < 0 && magic > 0) {
3310 __ Dsubu(TMP, TMP, dividend);
3311 }
3312
3313 if (shift >= 32) {
3314 __ Dsra32(TMP, TMP, shift - 32);
3315 } else if (shift > 0) {
3316 __ Dsra(TMP, TMP, shift);
3317 }
3318
3319 if (instruction->IsDiv()) {
3320 __ Dsra32(out, TMP, 31);
3321 __ Dsubu(out, TMP, out);
3322 } else {
3323 __ Dsra32(AT, TMP, 31);
3324 __ Dsubu(AT, TMP, AT);
3325 __ LoadConst64(TMP, imm);
3326 __ Dmul(TMP, AT, TMP);
3327 __ Dsubu(out, dividend, TMP);
3328 }
3329 }
3330}
3331
3332void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3333 DCHECK(instruction->IsDiv() || instruction->IsRem());
3334 Primitive::Type type = instruction->GetResultType();
3335 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
3336
3337 LocationSummary* locations = instruction->GetLocations();
3338 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3339 Location second = locations->InAt(1);
3340
3341 if (second.IsConstant()) {
3342 int64_t imm = Int64FromConstant(second.GetConstant());
3343 if (imm == 0) {
3344 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3345 } else if (imm == 1 || imm == -1) {
3346 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003347 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003348 DivRemByPowerOfTwo(instruction);
3349 } else {
3350 DCHECK(imm <= -2 || imm >= 2);
3351 GenerateDivRemWithAnyConstant(instruction);
3352 }
3353 } else {
3354 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3355 GpuRegister divisor = second.AsRegister<GpuRegister>();
3356 if (instruction->IsDiv()) {
3357 if (type == Primitive::kPrimInt)
3358 __ DivR6(out, dividend, divisor);
3359 else
3360 __ Ddiv(out, dividend, divisor);
3361 } else {
3362 if (type == Primitive::kPrimInt)
3363 __ ModR6(out, dividend, divisor);
3364 else
3365 __ Dmod(out, dividend, divisor);
3366 }
3367 }
3368}
3369
Alexey Frunze4dda3372015-06-01 18:31:49 -07003370void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
3371 LocationSummary* locations =
3372 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3373 switch (div->GetResultType()) {
3374 case Primitive::kPrimInt:
3375 case Primitive::kPrimLong:
3376 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003377 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003378 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3379 break;
3380
3381 case Primitive::kPrimFloat:
3382 case Primitive::kPrimDouble:
3383 locations->SetInAt(0, Location::RequiresFpuRegister());
3384 locations->SetInAt(1, Location::RequiresFpuRegister());
3385 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3386 break;
3387
3388 default:
3389 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3390 }
3391}
3392
3393void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
3394 Primitive::Type type = instruction->GetType();
3395 LocationSummary* locations = instruction->GetLocations();
3396
3397 switch (type) {
3398 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07003399 case Primitive::kPrimLong:
3400 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003401 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003402 case Primitive::kPrimFloat:
3403 case Primitive::kPrimDouble: {
3404 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3405 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3406 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3407 if (type == Primitive::kPrimFloat)
3408 __ DivS(dst, lhs, rhs);
3409 else
3410 __ DivD(dst, lhs, rhs);
3411 break;
3412 }
3413 default:
3414 LOG(FATAL) << "Unexpected div type " << type;
3415 }
3416}
3417
3418void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003419 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003420 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003421}
3422
3423void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3424 SlowPathCodeMIPS64* slow_path =
3425 new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS64(instruction);
3426 codegen_->AddSlowPath(slow_path);
3427 Location value = instruction->GetLocations()->InAt(0);
3428
3429 Primitive::Type type = instruction->GetType();
3430
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003431 if (!Primitive::IsIntegralType(type)) {
3432 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003433 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003434 }
3435
3436 if (value.IsConstant()) {
3437 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
3438 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003439 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003440 } else {
3441 // A division by a non-null constant is valid. We don't need to perform
3442 // any check, so simply fall through.
3443 }
3444 } else {
3445 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3446 }
3447}
3448
3449void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
3450 LocationSummary* locations =
3451 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3452 locations->SetOut(Location::ConstantLocation(constant));
3453}
3454
3455void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3456 // Will be generated at use site.
3457}
3458
3459void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
3460 exit->SetLocations(nullptr);
3461}
3462
3463void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3464}
3465
3466void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
3467 LocationSummary* locations =
3468 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3469 locations->SetOut(Location::ConstantLocation(constant));
3470}
3471
3472void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3473 // Will be generated at use site.
3474}
3475
David Brazdilfc6a86a2015-06-26 10:33:45 +00003476void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003477 DCHECK(!successor->IsExitBlock());
3478 HBasicBlock* block = got->GetBlock();
3479 HInstruction* previous = got->GetPrevious();
3480 HLoopInformation* info = block->GetLoopInformation();
3481
3482 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3483 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3484 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3485 return;
3486 }
3487 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3488 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3489 }
3490 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003491 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003492 }
3493}
3494
David Brazdilfc6a86a2015-06-26 10:33:45 +00003495void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
3496 got->SetLocations(nullptr);
3497}
3498
3499void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
3500 HandleGoto(got, got->GetSuccessor());
3501}
3502
3503void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3504 try_boundary->SetLocations(nullptr);
3505}
3506
3507void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3508 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3509 if (!successor->IsExitBlock()) {
3510 HandleGoto(try_boundary, successor);
3511 }
3512}
3513
Alexey Frunze299a9392015-12-08 16:08:02 -08003514void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
3515 bool is64bit,
3516 LocationSummary* locations) {
3517 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3518 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3519 Location rhs_location = locations->InAt(1);
3520 GpuRegister rhs_reg = ZERO;
3521 int64_t rhs_imm = 0;
3522 bool use_imm = rhs_location.IsConstant();
3523 if (use_imm) {
3524 if (is64bit) {
3525 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3526 } else {
3527 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3528 }
3529 } else {
3530 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3531 }
3532 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3533
3534 switch (cond) {
3535 case kCondEQ:
3536 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003537 if (use_imm && IsInt<16>(-rhs_imm)) {
3538 if (rhs_imm == 0) {
3539 if (cond == kCondEQ) {
3540 __ Sltiu(dst, lhs, 1);
3541 } else {
3542 __ Sltu(dst, ZERO, lhs);
3543 }
3544 } else {
3545 if (is64bit) {
3546 __ Daddiu(dst, lhs, -rhs_imm);
3547 } else {
3548 __ Addiu(dst, lhs, -rhs_imm);
3549 }
3550 if (cond == kCondEQ) {
3551 __ Sltiu(dst, dst, 1);
3552 } else {
3553 __ Sltu(dst, ZERO, dst);
3554 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003555 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003556 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003557 if (use_imm && IsUint<16>(rhs_imm)) {
3558 __ Xori(dst, lhs, rhs_imm);
3559 } else {
3560 if (use_imm) {
3561 rhs_reg = TMP;
3562 __ LoadConst64(rhs_reg, rhs_imm);
3563 }
3564 __ Xor(dst, lhs, rhs_reg);
3565 }
3566 if (cond == kCondEQ) {
3567 __ Sltiu(dst, dst, 1);
3568 } else {
3569 __ Sltu(dst, ZERO, dst);
3570 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003571 }
3572 break;
3573
3574 case kCondLT:
3575 case kCondGE:
3576 if (use_imm && IsInt<16>(rhs_imm)) {
3577 __ Slti(dst, lhs, rhs_imm);
3578 } else {
3579 if (use_imm) {
3580 rhs_reg = TMP;
3581 __ LoadConst64(rhs_reg, rhs_imm);
3582 }
3583 __ Slt(dst, lhs, rhs_reg);
3584 }
3585 if (cond == kCondGE) {
3586 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3587 // only the slt instruction but no sge.
3588 __ Xori(dst, dst, 1);
3589 }
3590 break;
3591
3592 case kCondLE:
3593 case kCondGT:
3594 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3595 // Simulate lhs <= rhs via lhs < rhs + 1.
3596 __ Slti(dst, lhs, rhs_imm_plus_one);
3597 if (cond == kCondGT) {
3598 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3599 // only the slti instruction but no sgti.
3600 __ Xori(dst, dst, 1);
3601 }
3602 } else {
3603 if (use_imm) {
3604 rhs_reg = TMP;
3605 __ LoadConst64(rhs_reg, rhs_imm);
3606 }
3607 __ Slt(dst, rhs_reg, lhs);
3608 if (cond == kCondLE) {
3609 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3610 // only the slt instruction but no sle.
3611 __ Xori(dst, dst, 1);
3612 }
3613 }
3614 break;
3615
3616 case kCondB:
3617 case kCondAE:
3618 if (use_imm && IsInt<16>(rhs_imm)) {
3619 // Sltiu sign-extends its 16-bit immediate operand before
3620 // the comparison and thus lets us compare directly with
3621 // unsigned values in the ranges [0, 0x7fff] and
3622 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3623 __ Sltiu(dst, lhs, rhs_imm);
3624 } else {
3625 if (use_imm) {
3626 rhs_reg = TMP;
3627 __ LoadConst64(rhs_reg, rhs_imm);
3628 }
3629 __ Sltu(dst, lhs, rhs_reg);
3630 }
3631 if (cond == kCondAE) {
3632 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3633 // only the sltu instruction but no sgeu.
3634 __ Xori(dst, dst, 1);
3635 }
3636 break;
3637
3638 case kCondBE:
3639 case kCondA:
3640 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3641 // Simulate lhs <= rhs via lhs < rhs + 1.
3642 // Note that this only works if rhs + 1 does not overflow
3643 // to 0, hence the check above.
3644 // Sltiu sign-extends its 16-bit immediate operand before
3645 // the comparison and thus lets us compare directly with
3646 // unsigned values in the ranges [0, 0x7fff] and
3647 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3648 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3649 if (cond == kCondA) {
3650 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3651 // only the sltiu instruction but no sgtiu.
3652 __ Xori(dst, dst, 1);
3653 }
3654 } else {
3655 if (use_imm) {
3656 rhs_reg = TMP;
3657 __ LoadConst64(rhs_reg, rhs_imm);
3658 }
3659 __ Sltu(dst, rhs_reg, lhs);
3660 if (cond == kCondBE) {
3661 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3662 // only the sltu instruction but no sleu.
3663 __ Xori(dst, dst, 1);
3664 }
3665 }
3666 break;
3667 }
3668}
3669
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003670bool InstructionCodeGeneratorMIPS64::MaterializeIntLongCompare(IfCondition cond,
3671 bool is64bit,
3672 LocationSummary* input_locations,
3673 GpuRegister dst) {
3674 GpuRegister lhs = input_locations->InAt(0).AsRegister<GpuRegister>();
3675 Location rhs_location = input_locations->InAt(1);
3676 GpuRegister rhs_reg = ZERO;
3677 int64_t rhs_imm = 0;
3678 bool use_imm = rhs_location.IsConstant();
3679 if (use_imm) {
3680 if (is64bit) {
3681 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3682 } else {
3683 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3684 }
3685 } else {
3686 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3687 }
3688 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3689
3690 switch (cond) {
3691 case kCondEQ:
3692 case kCondNE:
3693 if (use_imm && IsInt<16>(-rhs_imm)) {
3694 if (is64bit) {
3695 __ Daddiu(dst, lhs, -rhs_imm);
3696 } else {
3697 __ Addiu(dst, lhs, -rhs_imm);
3698 }
3699 } else if (use_imm && IsUint<16>(rhs_imm)) {
3700 __ Xori(dst, lhs, rhs_imm);
3701 } else {
3702 if (use_imm) {
3703 rhs_reg = TMP;
3704 __ LoadConst64(rhs_reg, rhs_imm);
3705 }
3706 __ Xor(dst, lhs, rhs_reg);
3707 }
3708 return (cond == kCondEQ);
3709
3710 case kCondLT:
3711 case kCondGE:
3712 if (use_imm && IsInt<16>(rhs_imm)) {
3713 __ Slti(dst, lhs, rhs_imm);
3714 } else {
3715 if (use_imm) {
3716 rhs_reg = TMP;
3717 __ LoadConst64(rhs_reg, rhs_imm);
3718 }
3719 __ Slt(dst, lhs, rhs_reg);
3720 }
3721 return (cond == kCondGE);
3722
3723 case kCondLE:
3724 case kCondGT:
3725 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3726 // Simulate lhs <= rhs via lhs < rhs + 1.
3727 __ Slti(dst, lhs, rhs_imm_plus_one);
3728 return (cond == kCondGT);
3729 } else {
3730 if (use_imm) {
3731 rhs_reg = TMP;
3732 __ LoadConst64(rhs_reg, rhs_imm);
3733 }
3734 __ Slt(dst, rhs_reg, lhs);
3735 return (cond == kCondLE);
3736 }
3737
3738 case kCondB:
3739 case kCondAE:
3740 if (use_imm && IsInt<16>(rhs_imm)) {
3741 // Sltiu sign-extends its 16-bit immediate operand before
3742 // the comparison and thus lets us compare directly with
3743 // unsigned values in the ranges [0, 0x7fff] and
3744 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3745 __ Sltiu(dst, lhs, rhs_imm);
3746 } else {
3747 if (use_imm) {
3748 rhs_reg = TMP;
3749 __ LoadConst64(rhs_reg, rhs_imm);
3750 }
3751 __ Sltu(dst, lhs, rhs_reg);
3752 }
3753 return (cond == kCondAE);
3754
3755 case kCondBE:
3756 case kCondA:
3757 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3758 // Simulate lhs <= rhs via lhs < rhs + 1.
3759 // Note that this only works if rhs + 1 does not overflow
3760 // to 0, hence the check above.
3761 // Sltiu sign-extends its 16-bit immediate operand before
3762 // the comparison and thus lets us compare directly with
3763 // unsigned values in the ranges [0, 0x7fff] and
3764 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3765 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3766 return (cond == kCondA);
3767 } else {
3768 if (use_imm) {
3769 rhs_reg = TMP;
3770 __ LoadConst64(rhs_reg, rhs_imm);
3771 }
3772 __ Sltu(dst, rhs_reg, lhs);
3773 return (cond == kCondBE);
3774 }
3775 }
3776}
3777
Alexey Frunze299a9392015-12-08 16:08:02 -08003778void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
3779 bool is64bit,
3780 LocationSummary* locations,
3781 Mips64Label* label) {
3782 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3783 Location rhs_location = locations->InAt(1);
3784 GpuRegister rhs_reg = ZERO;
3785 int64_t rhs_imm = 0;
3786 bool use_imm = rhs_location.IsConstant();
3787 if (use_imm) {
3788 if (is64bit) {
3789 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3790 } else {
3791 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3792 }
3793 } else {
3794 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3795 }
3796
3797 if (use_imm && rhs_imm == 0) {
3798 switch (cond) {
3799 case kCondEQ:
3800 case kCondBE: // <= 0 if zero
3801 __ Beqzc(lhs, label);
3802 break;
3803 case kCondNE:
3804 case kCondA: // > 0 if non-zero
3805 __ Bnezc(lhs, label);
3806 break;
3807 case kCondLT:
3808 __ Bltzc(lhs, label);
3809 break;
3810 case kCondGE:
3811 __ Bgezc(lhs, label);
3812 break;
3813 case kCondLE:
3814 __ Blezc(lhs, label);
3815 break;
3816 case kCondGT:
3817 __ Bgtzc(lhs, label);
3818 break;
3819 case kCondB: // always false
3820 break;
3821 case kCondAE: // always true
3822 __ Bc(label);
3823 break;
3824 }
3825 } else {
3826 if (use_imm) {
3827 rhs_reg = TMP;
3828 __ LoadConst64(rhs_reg, rhs_imm);
3829 }
3830 switch (cond) {
3831 case kCondEQ:
3832 __ Beqc(lhs, rhs_reg, label);
3833 break;
3834 case kCondNE:
3835 __ Bnec(lhs, rhs_reg, label);
3836 break;
3837 case kCondLT:
3838 __ Bltc(lhs, rhs_reg, label);
3839 break;
3840 case kCondGE:
3841 __ Bgec(lhs, rhs_reg, label);
3842 break;
3843 case kCondLE:
3844 __ Bgec(rhs_reg, lhs, label);
3845 break;
3846 case kCondGT:
3847 __ Bltc(rhs_reg, lhs, label);
3848 break;
3849 case kCondB:
3850 __ Bltuc(lhs, rhs_reg, label);
3851 break;
3852 case kCondAE:
3853 __ Bgeuc(lhs, rhs_reg, label);
3854 break;
3855 case kCondBE:
3856 __ Bgeuc(rhs_reg, lhs, label);
3857 break;
3858 case kCondA:
3859 __ Bltuc(rhs_reg, lhs, label);
3860 break;
3861 }
3862 }
3863}
3864
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003865void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
3866 bool gt_bias,
3867 Primitive::Type type,
3868 LocationSummary* locations) {
3869 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3870 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3871 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3872 if (type == Primitive::kPrimFloat) {
3873 switch (cond) {
3874 case kCondEQ:
3875 __ CmpEqS(FTMP, lhs, rhs);
3876 __ Mfc1(dst, FTMP);
3877 __ Andi(dst, dst, 1);
3878 break;
3879 case kCondNE:
3880 __ CmpEqS(FTMP, lhs, rhs);
3881 __ Mfc1(dst, FTMP);
3882 __ Addiu(dst, dst, 1);
3883 break;
3884 case kCondLT:
3885 if (gt_bias) {
3886 __ CmpLtS(FTMP, lhs, rhs);
3887 } else {
3888 __ CmpUltS(FTMP, lhs, rhs);
3889 }
3890 __ Mfc1(dst, FTMP);
3891 __ Andi(dst, dst, 1);
3892 break;
3893 case kCondLE:
3894 if (gt_bias) {
3895 __ CmpLeS(FTMP, lhs, rhs);
3896 } else {
3897 __ CmpUleS(FTMP, lhs, rhs);
3898 }
3899 __ Mfc1(dst, FTMP);
3900 __ Andi(dst, dst, 1);
3901 break;
3902 case kCondGT:
3903 if (gt_bias) {
3904 __ CmpUltS(FTMP, rhs, lhs);
3905 } else {
3906 __ CmpLtS(FTMP, rhs, lhs);
3907 }
3908 __ Mfc1(dst, FTMP);
3909 __ Andi(dst, dst, 1);
3910 break;
3911 case kCondGE:
3912 if (gt_bias) {
3913 __ CmpUleS(FTMP, rhs, lhs);
3914 } else {
3915 __ CmpLeS(FTMP, rhs, lhs);
3916 }
3917 __ Mfc1(dst, FTMP);
3918 __ Andi(dst, dst, 1);
3919 break;
3920 default:
3921 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3922 UNREACHABLE();
3923 }
3924 } else {
3925 DCHECK_EQ(type, Primitive::kPrimDouble);
3926 switch (cond) {
3927 case kCondEQ:
3928 __ CmpEqD(FTMP, lhs, rhs);
3929 __ Mfc1(dst, FTMP);
3930 __ Andi(dst, dst, 1);
3931 break;
3932 case kCondNE:
3933 __ CmpEqD(FTMP, lhs, rhs);
3934 __ Mfc1(dst, FTMP);
3935 __ Addiu(dst, dst, 1);
3936 break;
3937 case kCondLT:
3938 if (gt_bias) {
3939 __ CmpLtD(FTMP, lhs, rhs);
3940 } else {
3941 __ CmpUltD(FTMP, lhs, rhs);
3942 }
3943 __ Mfc1(dst, FTMP);
3944 __ Andi(dst, dst, 1);
3945 break;
3946 case kCondLE:
3947 if (gt_bias) {
3948 __ CmpLeD(FTMP, lhs, rhs);
3949 } else {
3950 __ CmpUleD(FTMP, lhs, rhs);
3951 }
3952 __ Mfc1(dst, FTMP);
3953 __ Andi(dst, dst, 1);
3954 break;
3955 case kCondGT:
3956 if (gt_bias) {
3957 __ CmpUltD(FTMP, rhs, lhs);
3958 } else {
3959 __ CmpLtD(FTMP, rhs, lhs);
3960 }
3961 __ Mfc1(dst, FTMP);
3962 __ Andi(dst, dst, 1);
3963 break;
3964 case kCondGE:
3965 if (gt_bias) {
3966 __ CmpUleD(FTMP, rhs, lhs);
3967 } else {
3968 __ CmpLeD(FTMP, rhs, lhs);
3969 }
3970 __ Mfc1(dst, FTMP);
3971 __ Andi(dst, dst, 1);
3972 break;
3973 default:
3974 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3975 UNREACHABLE();
3976 }
3977 }
3978}
3979
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003980bool InstructionCodeGeneratorMIPS64::MaterializeFpCompare(IfCondition cond,
3981 bool gt_bias,
3982 Primitive::Type type,
3983 LocationSummary* input_locations,
3984 FpuRegister dst) {
3985 FpuRegister lhs = input_locations->InAt(0).AsFpuRegister<FpuRegister>();
3986 FpuRegister rhs = input_locations->InAt(1).AsFpuRegister<FpuRegister>();
3987 if (type == Primitive::kPrimFloat) {
3988 switch (cond) {
3989 case kCondEQ:
3990 __ CmpEqS(dst, lhs, rhs);
3991 return false;
3992 case kCondNE:
3993 __ CmpEqS(dst, lhs, rhs);
3994 return true;
3995 case kCondLT:
3996 if (gt_bias) {
3997 __ CmpLtS(dst, lhs, rhs);
3998 } else {
3999 __ CmpUltS(dst, lhs, rhs);
4000 }
4001 return false;
4002 case kCondLE:
4003 if (gt_bias) {
4004 __ CmpLeS(dst, lhs, rhs);
4005 } else {
4006 __ CmpUleS(dst, lhs, rhs);
4007 }
4008 return false;
4009 case kCondGT:
4010 if (gt_bias) {
4011 __ CmpUltS(dst, rhs, lhs);
4012 } else {
4013 __ CmpLtS(dst, rhs, lhs);
4014 }
4015 return false;
4016 case kCondGE:
4017 if (gt_bias) {
4018 __ CmpUleS(dst, rhs, lhs);
4019 } else {
4020 __ CmpLeS(dst, rhs, lhs);
4021 }
4022 return false;
4023 default:
4024 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4025 UNREACHABLE();
4026 }
4027 } else {
4028 DCHECK_EQ(type, Primitive::kPrimDouble);
4029 switch (cond) {
4030 case kCondEQ:
4031 __ CmpEqD(dst, lhs, rhs);
4032 return false;
4033 case kCondNE:
4034 __ CmpEqD(dst, lhs, rhs);
4035 return true;
4036 case kCondLT:
4037 if (gt_bias) {
4038 __ CmpLtD(dst, lhs, rhs);
4039 } else {
4040 __ CmpUltD(dst, lhs, rhs);
4041 }
4042 return false;
4043 case kCondLE:
4044 if (gt_bias) {
4045 __ CmpLeD(dst, lhs, rhs);
4046 } else {
4047 __ CmpUleD(dst, lhs, rhs);
4048 }
4049 return false;
4050 case kCondGT:
4051 if (gt_bias) {
4052 __ CmpUltD(dst, rhs, lhs);
4053 } else {
4054 __ CmpLtD(dst, rhs, lhs);
4055 }
4056 return false;
4057 case kCondGE:
4058 if (gt_bias) {
4059 __ CmpUleD(dst, rhs, lhs);
4060 } else {
4061 __ CmpLeD(dst, rhs, lhs);
4062 }
4063 return false;
4064 default:
4065 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4066 UNREACHABLE();
4067 }
4068 }
4069}
4070
Alexey Frunze299a9392015-12-08 16:08:02 -08004071void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
4072 bool gt_bias,
4073 Primitive::Type type,
4074 LocationSummary* locations,
4075 Mips64Label* label) {
4076 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4077 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
4078 if (type == Primitive::kPrimFloat) {
4079 switch (cond) {
4080 case kCondEQ:
4081 __ CmpEqS(FTMP, lhs, rhs);
4082 __ Bc1nez(FTMP, label);
4083 break;
4084 case kCondNE:
4085 __ CmpEqS(FTMP, lhs, rhs);
4086 __ Bc1eqz(FTMP, label);
4087 break;
4088 case kCondLT:
4089 if (gt_bias) {
4090 __ CmpLtS(FTMP, lhs, rhs);
4091 } else {
4092 __ CmpUltS(FTMP, lhs, rhs);
4093 }
4094 __ Bc1nez(FTMP, label);
4095 break;
4096 case kCondLE:
4097 if (gt_bias) {
4098 __ CmpLeS(FTMP, lhs, rhs);
4099 } else {
4100 __ CmpUleS(FTMP, lhs, rhs);
4101 }
4102 __ Bc1nez(FTMP, label);
4103 break;
4104 case kCondGT:
4105 if (gt_bias) {
4106 __ CmpUltS(FTMP, rhs, lhs);
4107 } else {
4108 __ CmpLtS(FTMP, rhs, lhs);
4109 }
4110 __ Bc1nez(FTMP, label);
4111 break;
4112 case kCondGE:
4113 if (gt_bias) {
4114 __ CmpUleS(FTMP, rhs, lhs);
4115 } else {
4116 __ CmpLeS(FTMP, rhs, lhs);
4117 }
4118 __ Bc1nez(FTMP, label);
4119 break;
4120 default:
4121 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004122 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004123 }
4124 } else {
4125 DCHECK_EQ(type, Primitive::kPrimDouble);
4126 switch (cond) {
4127 case kCondEQ:
4128 __ CmpEqD(FTMP, lhs, rhs);
4129 __ Bc1nez(FTMP, label);
4130 break;
4131 case kCondNE:
4132 __ CmpEqD(FTMP, lhs, rhs);
4133 __ Bc1eqz(FTMP, label);
4134 break;
4135 case kCondLT:
4136 if (gt_bias) {
4137 __ CmpLtD(FTMP, lhs, rhs);
4138 } else {
4139 __ CmpUltD(FTMP, lhs, rhs);
4140 }
4141 __ Bc1nez(FTMP, label);
4142 break;
4143 case kCondLE:
4144 if (gt_bias) {
4145 __ CmpLeD(FTMP, lhs, rhs);
4146 } else {
4147 __ CmpUleD(FTMP, lhs, rhs);
4148 }
4149 __ Bc1nez(FTMP, label);
4150 break;
4151 case kCondGT:
4152 if (gt_bias) {
4153 __ CmpUltD(FTMP, rhs, lhs);
4154 } else {
4155 __ CmpLtD(FTMP, rhs, lhs);
4156 }
4157 __ Bc1nez(FTMP, label);
4158 break;
4159 case kCondGE:
4160 if (gt_bias) {
4161 __ CmpUleD(FTMP, rhs, lhs);
4162 } else {
4163 __ CmpLeD(FTMP, rhs, lhs);
4164 }
4165 __ Bc1nez(FTMP, label);
4166 break;
4167 default:
4168 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004169 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004170 }
4171 }
4172}
4173
Alexey Frunze4dda3372015-06-01 18:31:49 -07004174void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00004175 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004176 Mips64Label* true_target,
4177 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00004178 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004179
David Brazdil0debae72015-11-12 18:37:00 +00004180 if (true_target == nullptr && false_target == nullptr) {
4181 // Nothing to do. The code always falls through.
4182 return;
4183 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00004184 // Constant condition, statically compared against "true" (integer value 1).
4185 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00004186 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004187 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004188 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004189 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00004190 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00004191 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004192 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00004193 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004194 }
David Brazdil0debae72015-11-12 18:37:00 +00004195 return;
4196 }
4197
4198 // The following code generates these patterns:
4199 // (1) true_target == nullptr && false_target != nullptr
4200 // - opposite condition true => branch to false_target
4201 // (2) true_target != nullptr && false_target == nullptr
4202 // - condition true => branch to true_target
4203 // (3) true_target != nullptr && false_target != nullptr
4204 // - condition true => branch to true_target
4205 // - branch to false_target
4206 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004207 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00004208 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004209 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00004210 if (true_target == nullptr) {
4211 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
4212 } else {
4213 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
4214 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004215 } else {
4216 // The condition instruction has not been materialized, use its inputs as
4217 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00004218 HCondition* condition = cond->AsCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08004219 Primitive::Type type = condition->InputAt(0)->GetType();
4220 LocationSummary* locations = cond->GetLocations();
4221 IfCondition if_cond = condition->GetCondition();
4222 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00004223
David Brazdil0debae72015-11-12 18:37:00 +00004224 if (true_target == nullptr) {
4225 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08004226 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00004227 }
4228
Alexey Frunze299a9392015-12-08 16:08:02 -08004229 switch (type) {
4230 default:
4231 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
4232 break;
4233 case Primitive::kPrimLong:
4234 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
4235 break;
4236 case Primitive::kPrimFloat:
4237 case Primitive::kPrimDouble:
4238 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
4239 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004240 }
4241 }
David Brazdil0debae72015-11-12 18:37:00 +00004242
4243 // If neither branch falls through (case 3), the conditional branch to `true_target`
4244 // was already emitted (case 2) and we need to emit a jump to `false_target`.
4245 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004246 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004247 }
4248}
4249
4250void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
4251 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00004252 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004253 locations->SetInAt(0, Location::RequiresRegister());
4254 }
4255}
4256
4257void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00004258 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
4259 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004260 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004261 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004262 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004263 nullptr : codegen_->GetLabelOf(false_successor);
4264 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004265}
4266
4267void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
4268 LocationSummary* locations = new (GetGraph()->GetArena())
4269 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01004270 InvokeRuntimeCallingConvention calling_convention;
4271 RegisterSet caller_saves = RegisterSet::Empty();
4272 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4273 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00004274 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004275 locations->SetInAt(0, Location::RequiresRegister());
4276 }
4277}
4278
4279void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08004280 SlowPathCodeMIPS64* slow_path =
4281 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00004282 GenerateTestAndBranch(deoptimize,
4283 /* condition_input_index */ 0,
4284 slow_path->GetEntryLabel(),
4285 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004286}
4287
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004288// This function returns true if a conditional move can be generated for HSelect.
4289// Otherwise it returns false and HSelect must be implemented in terms of conditonal
4290// branches and regular moves.
4291//
4292// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
4293//
4294// While determining feasibility of a conditional move and setting inputs/outputs
4295// are two distinct tasks, this function does both because they share quite a bit
4296// of common logic.
4297static bool CanMoveConditionally(HSelect* select, LocationSummary* locations_to_set) {
4298 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
4299 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4300 HCondition* condition = cond->AsCondition();
4301
4302 Primitive::Type cond_type = materialized ? Primitive::kPrimInt : condition->InputAt(0)->GetType();
4303 Primitive::Type dst_type = select->GetType();
4304
4305 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
4306 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
4307 bool is_true_value_zero_constant =
4308 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
4309 bool is_false_value_zero_constant =
4310 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
4311
4312 bool can_move_conditionally = false;
4313 bool use_const_for_false_in = false;
4314 bool use_const_for_true_in = false;
4315
4316 if (!cond->IsConstant()) {
4317 if (!Primitive::IsFloatingPointType(cond_type)) {
4318 if (!Primitive::IsFloatingPointType(dst_type)) {
4319 // Moving int/long on int/long condition.
4320 if (is_true_value_zero_constant) {
4321 // seleqz out_reg, false_reg, cond_reg
4322 can_move_conditionally = true;
4323 use_const_for_true_in = true;
4324 } else if (is_false_value_zero_constant) {
4325 // selnez out_reg, true_reg, cond_reg
4326 can_move_conditionally = true;
4327 use_const_for_false_in = true;
4328 } else if (materialized) {
4329 // Not materializing unmaterialized int conditions
4330 // to keep the instruction count low.
4331 // selnez AT, true_reg, cond_reg
4332 // seleqz TMP, false_reg, cond_reg
4333 // or out_reg, AT, TMP
4334 can_move_conditionally = true;
4335 }
4336 } else {
4337 // Moving float/double on int/long condition.
4338 if (materialized) {
4339 // Not materializing unmaterialized int conditions
4340 // to keep the instruction count low.
4341 can_move_conditionally = true;
4342 if (is_true_value_zero_constant) {
4343 // sltu TMP, ZERO, cond_reg
4344 // mtc1 TMP, temp_cond_reg
4345 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4346 use_const_for_true_in = true;
4347 } else if (is_false_value_zero_constant) {
4348 // sltu TMP, ZERO, cond_reg
4349 // mtc1 TMP, temp_cond_reg
4350 // selnez.fmt out_reg, true_reg, temp_cond_reg
4351 use_const_for_false_in = true;
4352 } else {
4353 // sltu TMP, ZERO, cond_reg
4354 // mtc1 TMP, temp_cond_reg
4355 // sel.fmt temp_cond_reg, false_reg, true_reg
4356 // mov.fmt out_reg, temp_cond_reg
4357 }
4358 }
4359 }
4360 } else {
4361 if (!Primitive::IsFloatingPointType(dst_type)) {
4362 // Moving int/long on float/double condition.
4363 can_move_conditionally = true;
4364 if (is_true_value_zero_constant) {
4365 // mfc1 TMP, temp_cond_reg
4366 // seleqz out_reg, false_reg, TMP
4367 use_const_for_true_in = true;
4368 } else if (is_false_value_zero_constant) {
4369 // mfc1 TMP, temp_cond_reg
4370 // selnez out_reg, true_reg, TMP
4371 use_const_for_false_in = true;
4372 } else {
4373 // mfc1 TMP, temp_cond_reg
4374 // selnez AT, true_reg, TMP
4375 // seleqz TMP, false_reg, TMP
4376 // or out_reg, AT, TMP
4377 }
4378 } else {
4379 // Moving float/double on float/double condition.
4380 can_move_conditionally = true;
4381 if (is_true_value_zero_constant) {
4382 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4383 use_const_for_true_in = true;
4384 } else if (is_false_value_zero_constant) {
4385 // selnez.fmt out_reg, true_reg, temp_cond_reg
4386 use_const_for_false_in = true;
4387 } else {
4388 // sel.fmt temp_cond_reg, false_reg, true_reg
4389 // mov.fmt out_reg, temp_cond_reg
4390 }
4391 }
4392 }
4393 }
4394
4395 if (can_move_conditionally) {
4396 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
4397 } else {
4398 DCHECK(!use_const_for_false_in);
4399 DCHECK(!use_const_for_true_in);
4400 }
4401
4402 if (locations_to_set != nullptr) {
4403 if (use_const_for_false_in) {
4404 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
4405 } else {
4406 locations_to_set->SetInAt(0,
4407 Primitive::IsFloatingPointType(dst_type)
4408 ? Location::RequiresFpuRegister()
4409 : Location::RequiresRegister());
4410 }
4411 if (use_const_for_true_in) {
4412 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
4413 } else {
4414 locations_to_set->SetInAt(1,
4415 Primitive::IsFloatingPointType(dst_type)
4416 ? Location::RequiresFpuRegister()
4417 : Location::RequiresRegister());
4418 }
4419 if (materialized) {
4420 locations_to_set->SetInAt(2, Location::RequiresRegister());
4421 }
4422
4423 if (can_move_conditionally) {
4424 locations_to_set->SetOut(Primitive::IsFloatingPointType(dst_type)
4425 ? Location::RequiresFpuRegister()
4426 : Location::RequiresRegister());
4427 } else {
4428 locations_to_set->SetOut(Location::SameAsFirstInput());
4429 }
4430 }
4431
4432 return can_move_conditionally;
4433}
4434
4435
4436void InstructionCodeGeneratorMIPS64::GenConditionalMove(HSelect* select) {
4437 LocationSummary* locations = select->GetLocations();
4438 Location dst = locations->Out();
4439 Location false_src = locations->InAt(0);
4440 Location true_src = locations->InAt(1);
4441 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4442 GpuRegister cond_reg = TMP;
4443 FpuRegister fcond_reg = FTMP;
4444 Primitive::Type cond_type = Primitive::kPrimInt;
4445 bool cond_inverted = false;
4446 Primitive::Type dst_type = select->GetType();
4447
4448 if (IsBooleanValueOrMaterializedCondition(cond)) {
4449 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<GpuRegister>();
4450 } else {
4451 HCondition* condition = cond->AsCondition();
4452 LocationSummary* cond_locations = cond->GetLocations();
4453 IfCondition if_cond = condition->GetCondition();
4454 cond_type = condition->InputAt(0)->GetType();
4455 switch (cond_type) {
4456 default:
4457 cond_inverted = MaterializeIntLongCompare(if_cond,
4458 /* is64bit */ false,
4459 cond_locations,
4460 cond_reg);
4461 break;
4462 case Primitive::kPrimLong:
4463 cond_inverted = MaterializeIntLongCompare(if_cond,
4464 /* is64bit */ true,
4465 cond_locations,
4466 cond_reg);
4467 break;
4468 case Primitive::kPrimFloat:
4469 case Primitive::kPrimDouble:
4470 cond_inverted = MaterializeFpCompare(if_cond,
4471 condition->IsGtBias(),
4472 cond_type,
4473 cond_locations,
4474 fcond_reg);
4475 break;
4476 }
4477 }
4478
4479 if (true_src.IsConstant()) {
4480 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
4481 }
4482 if (false_src.IsConstant()) {
4483 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
4484 }
4485
4486 switch (dst_type) {
4487 default:
4488 if (Primitive::IsFloatingPointType(cond_type)) {
4489 __ Mfc1(cond_reg, fcond_reg);
4490 }
4491 if (true_src.IsConstant()) {
4492 if (cond_inverted) {
4493 __ Selnez(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4494 } else {
4495 __ Seleqz(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4496 }
4497 } else if (false_src.IsConstant()) {
4498 if (cond_inverted) {
4499 __ Seleqz(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4500 } else {
4501 __ Selnez(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4502 }
4503 } else {
4504 DCHECK_NE(cond_reg, AT);
4505 if (cond_inverted) {
4506 __ Seleqz(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4507 __ Selnez(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4508 } else {
4509 __ Selnez(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4510 __ Seleqz(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4511 }
4512 __ Or(dst.AsRegister<GpuRegister>(), AT, TMP);
4513 }
4514 break;
4515 case Primitive::kPrimFloat: {
4516 if (!Primitive::IsFloatingPointType(cond_type)) {
4517 // sel*.fmt tests bit 0 of the condition register, account for that.
4518 __ Sltu(TMP, ZERO, cond_reg);
4519 __ Mtc1(TMP, fcond_reg);
4520 }
4521 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4522 if (true_src.IsConstant()) {
4523 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4524 if (cond_inverted) {
4525 __ SelnezS(dst_reg, src_reg, fcond_reg);
4526 } else {
4527 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4528 }
4529 } else if (false_src.IsConstant()) {
4530 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4531 if (cond_inverted) {
4532 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4533 } else {
4534 __ SelnezS(dst_reg, src_reg, fcond_reg);
4535 }
4536 } else {
4537 if (cond_inverted) {
4538 __ SelS(fcond_reg,
4539 true_src.AsFpuRegister<FpuRegister>(),
4540 false_src.AsFpuRegister<FpuRegister>());
4541 } else {
4542 __ SelS(fcond_reg,
4543 false_src.AsFpuRegister<FpuRegister>(),
4544 true_src.AsFpuRegister<FpuRegister>());
4545 }
4546 __ MovS(dst_reg, fcond_reg);
4547 }
4548 break;
4549 }
4550 case Primitive::kPrimDouble: {
4551 if (!Primitive::IsFloatingPointType(cond_type)) {
4552 // sel*.fmt tests bit 0 of the condition register, account for that.
4553 __ Sltu(TMP, ZERO, cond_reg);
4554 __ Mtc1(TMP, fcond_reg);
4555 }
4556 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4557 if (true_src.IsConstant()) {
4558 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4559 if (cond_inverted) {
4560 __ SelnezD(dst_reg, src_reg, fcond_reg);
4561 } else {
4562 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4563 }
4564 } else if (false_src.IsConstant()) {
4565 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4566 if (cond_inverted) {
4567 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4568 } else {
4569 __ SelnezD(dst_reg, src_reg, fcond_reg);
4570 }
4571 } else {
4572 if (cond_inverted) {
4573 __ SelD(fcond_reg,
4574 true_src.AsFpuRegister<FpuRegister>(),
4575 false_src.AsFpuRegister<FpuRegister>());
4576 } else {
4577 __ SelD(fcond_reg,
4578 false_src.AsFpuRegister<FpuRegister>(),
4579 true_src.AsFpuRegister<FpuRegister>());
4580 }
4581 __ MovD(dst_reg, fcond_reg);
4582 }
4583 break;
4584 }
4585 }
4586}
4587
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004588void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4589 LocationSummary* locations = new (GetGraph()->GetArena())
4590 LocationSummary(flag, LocationSummary::kNoCall);
4591 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07004592}
4593
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004594void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4595 __ LoadFromOffset(kLoadWord,
4596 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
4597 SP,
4598 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07004599}
4600
David Brazdil74eb1b22015-12-14 11:44:01 +00004601void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
4602 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004603 CanMoveConditionally(select, locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00004604}
4605
4606void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004607 if (CanMoveConditionally(select, /* locations_to_set */ nullptr)) {
4608 GenConditionalMove(select);
4609 } else {
4610 LocationSummary* locations = select->GetLocations();
4611 Mips64Label false_target;
4612 GenerateTestAndBranch(select,
4613 /* condition_input_index */ 2,
4614 /* true_target */ nullptr,
4615 &false_target);
4616 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
4617 __ Bind(&false_target);
4618 }
David Brazdil74eb1b22015-12-14 11:44:01 +00004619}
4620
David Srbecky0cf44932015-12-09 14:09:59 +00004621void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
4622 new (GetGraph()->GetArena()) LocationSummary(info);
4623}
4624
David Srbeckyd28f4a02016-03-14 17:14:24 +00004625void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
4626 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00004627}
4628
4629void CodeGeneratorMIPS64::GenerateNop() {
4630 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00004631}
4632
Alexey Frunze4dda3372015-06-01 18:31:49 -07004633void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08004634 const FieldInfo& field_info) {
4635 Primitive::Type field_type = field_info.GetFieldType();
4636 bool object_field_get_with_read_barrier =
4637 kEmitCompilerReadBarrier && (field_type == Primitive::kPrimNot);
4638 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
4639 instruction,
4640 object_field_get_with_read_barrier
4641 ? LocationSummary::kCallOnSlowPath
4642 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004643 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4644 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4645 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004646 locations->SetInAt(0, Location::RequiresRegister());
4647 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4648 locations->SetOut(Location::RequiresFpuRegister());
4649 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004650 // The output overlaps in the case of an object field get with
4651 // read barriers enabled: we do not want the move to overwrite the
4652 // object's location, as we need it to emit the read barrier.
4653 locations->SetOut(Location::RequiresRegister(),
4654 object_field_get_with_read_barrier
4655 ? Location::kOutputOverlap
4656 : Location::kNoOutputOverlap);
4657 }
4658 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4659 // We need a temporary register for the read barrier marking slow
4660 // path in CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004661 if (!kBakerReadBarrierThunksEnableForFields) {
4662 locations->AddTemp(Location::RequiresRegister());
4663 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004664 }
4665}
4666
4667void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
4668 const FieldInfo& field_info) {
4669 Primitive::Type type = field_info.GetFieldType();
4670 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004671 Location obj_loc = locations->InAt(0);
4672 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
4673 Location dst_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004674 LoadOperandType load_type = kLoadUnsignedByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004675 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004676 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004677 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4678
Alexey Frunze4dda3372015-06-01 18:31:49 -07004679 switch (type) {
4680 case Primitive::kPrimBoolean:
4681 load_type = kLoadUnsignedByte;
4682 break;
4683 case Primitive::kPrimByte:
4684 load_type = kLoadSignedByte;
4685 break;
4686 case Primitive::kPrimShort:
4687 load_type = kLoadSignedHalfword;
4688 break;
4689 case Primitive::kPrimChar:
4690 load_type = kLoadUnsignedHalfword;
4691 break;
4692 case Primitive::kPrimInt:
4693 case Primitive::kPrimFloat:
4694 load_type = kLoadWord;
4695 break;
4696 case Primitive::kPrimLong:
4697 case Primitive::kPrimDouble:
4698 load_type = kLoadDoubleword;
4699 break;
4700 case Primitive::kPrimNot:
4701 load_type = kLoadUnsignedWord;
4702 break;
4703 case Primitive::kPrimVoid:
4704 LOG(FATAL) << "Unreachable type " << type;
4705 UNREACHABLE();
4706 }
4707 if (!Primitive::IsFloatingPointType(type)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004708 DCHECK(dst_loc.IsRegister());
4709 GpuRegister dst = dst_loc.AsRegister<GpuRegister>();
4710 if (type == Primitive::kPrimNot) {
4711 // /* HeapReference<Object> */ dst = *(obj + offset)
4712 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004713 Location temp_loc =
4714 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08004715 // Note that a potential implicit null check is handled in this
4716 // CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier call.
4717 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4718 dst_loc,
4719 obj,
4720 offset,
4721 temp_loc,
4722 /* needs_null_check */ true);
4723 if (is_volatile) {
4724 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4725 }
4726 } else {
4727 __ LoadFromOffset(kLoadUnsignedWord, dst, obj, offset, null_checker);
4728 if (is_volatile) {
4729 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4730 }
4731 // If read barriers are enabled, emit read barriers other than
4732 // Baker's using a slow path (and also unpoison the loaded
4733 // reference, if heap poisoning is enabled).
4734 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
4735 }
4736 } else {
4737 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
4738 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004739 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004740 DCHECK(dst_loc.IsFpuRegister());
4741 FpuRegister dst = dst_loc.AsFpuRegister<FpuRegister>();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004742 __ LoadFpuFromOffset(load_type, dst, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004743 }
Alexey Frunzec061de12017-02-14 13:27:23 -08004744
Alexey Frunze15958152017-02-09 19:08:30 -08004745 // Memory barriers, in the case of references, are handled in the
4746 // previous switch statement.
4747 if (is_volatile && (type != Primitive::kPrimNot)) {
4748 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Alexey Frunzec061de12017-02-14 13:27:23 -08004749 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004750}
4751
4752void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
4753 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
4754 LocationSummary* locations =
4755 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4756 locations->SetInAt(0, Location::RequiresRegister());
4757 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004758 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004759 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004760 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004761 }
4762}
4763
4764void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004765 const FieldInfo& field_info,
4766 bool value_can_be_null) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004767 Primitive::Type type = field_info.GetFieldType();
4768 LocationSummary* locations = instruction->GetLocations();
4769 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004770 Location value_location = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004771 StoreOperandType store_type = kStoreByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004772 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004773 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4774 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004775 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4776
Alexey Frunze4dda3372015-06-01 18:31:49 -07004777 switch (type) {
4778 case Primitive::kPrimBoolean:
4779 case Primitive::kPrimByte:
4780 store_type = kStoreByte;
4781 break;
4782 case Primitive::kPrimShort:
4783 case Primitive::kPrimChar:
4784 store_type = kStoreHalfword;
4785 break;
4786 case Primitive::kPrimInt:
4787 case Primitive::kPrimFloat:
4788 case Primitive::kPrimNot:
4789 store_type = kStoreWord;
4790 break;
4791 case Primitive::kPrimLong:
4792 case Primitive::kPrimDouble:
4793 store_type = kStoreDoubleword;
4794 break;
4795 case Primitive::kPrimVoid:
4796 LOG(FATAL) << "Unreachable type " << type;
4797 UNREACHABLE();
4798 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004799
Alexey Frunze15958152017-02-09 19:08:30 -08004800 if (is_volatile) {
4801 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
4802 }
4803
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004804 if (value_location.IsConstant()) {
4805 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
4806 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
4807 } else {
4808 if (!Primitive::IsFloatingPointType(type)) {
4809 DCHECK(value_location.IsRegister());
4810 GpuRegister src = value_location.AsRegister<GpuRegister>();
4811 if (kPoisonHeapReferences && needs_write_barrier) {
4812 // Note that in the case where `value` is a null reference,
4813 // we do not enter this block, as a null reference does not
4814 // need poisoning.
4815 DCHECK_EQ(type, Primitive::kPrimNot);
4816 __ PoisonHeapReference(TMP, src);
4817 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
4818 } else {
4819 __ StoreToOffset(store_type, src, obj, offset, null_checker);
4820 }
4821 } else {
4822 DCHECK(value_location.IsFpuRegister());
4823 FpuRegister src = value_location.AsFpuRegister<FpuRegister>();
4824 __ StoreFpuToOffset(store_type, src, obj, offset, null_checker);
4825 }
4826 }
Alexey Frunze15958152017-02-09 19:08:30 -08004827
Alexey Frunzec061de12017-02-14 13:27:23 -08004828 if (needs_write_barrier) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004829 DCHECK(value_location.IsRegister());
4830 GpuRegister src = value_location.AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004831 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004832 }
Alexey Frunze15958152017-02-09 19:08:30 -08004833
4834 if (is_volatile) {
4835 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
4836 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004837}
4838
4839void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4840 HandleFieldGet(instruction, instruction->GetFieldInfo());
4841}
4842
4843void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4844 HandleFieldGet(instruction, instruction->GetFieldInfo());
4845}
4846
4847void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4848 HandleFieldSet(instruction, instruction->GetFieldInfo());
4849}
4850
4851void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004852 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004853}
4854
Alexey Frunze15958152017-02-09 19:08:30 -08004855void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadOneRegister(
4856 HInstruction* instruction,
4857 Location out,
4858 uint32_t offset,
4859 Location maybe_temp,
4860 ReadBarrierOption read_barrier_option) {
4861 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4862 if (read_barrier_option == kWithReadBarrier) {
4863 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004864 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
4865 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4866 }
Alexey Frunze15958152017-02-09 19:08:30 -08004867 if (kUseBakerReadBarrier) {
4868 // Load with fast path based Baker's read barrier.
4869 // /* HeapReference<Object> */ out = *(out + offset)
4870 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4871 out,
4872 out_reg,
4873 offset,
4874 maybe_temp,
4875 /* needs_null_check */ false);
4876 } else {
4877 // Load with slow path based read barrier.
4878 // Save the value of `out` into `maybe_temp` before overwriting it
4879 // in the following move operation, as we will need it for the
4880 // read barrier below.
4881 __ Move(maybe_temp.AsRegister<GpuRegister>(), out_reg);
4882 // /* HeapReference<Object> */ out = *(out + offset)
4883 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4884 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4885 }
4886 } else {
4887 // Plain load with no read barrier.
4888 // /* HeapReference<Object> */ out = *(out + offset)
4889 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4890 __ MaybeUnpoisonHeapReference(out_reg);
4891 }
4892}
4893
4894void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadTwoRegisters(
4895 HInstruction* instruction,
4896 Location out,
4897 Location obj,
4898 uint32_t offset,
4899 Location maybe_temp,
4900 ReadBarrierOption read_barrier_option) {
4901 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4902 GpuRegister obj_reg = obj.AsRegister<GpuRegister>();
4903 if (read_barrier_option == kWithReadBarrier) {
4904 CHECK(kEmitCompilerReadBarrier);
4905 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004906 if (!kBakerReadBarrierThunksEnableForFields) {
4907 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4908 }
Alexey Frunze15958152017-02-09 19:08:30 -08004909 // Load with fast path based Baker's read barrier.
4910 // /* HeapReference<Object> */ out = *(obj + offset)
4911 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4912 out,
4913 obj_reg,
4914 offset,
4915 maybe_temp,
4916 /* needs_null_check */ false);
4917 } else {
4918 // Load with slow path based read barrier.
4919 // /* HeapReference<Object> */ out = *(obj + offset)
4920 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
4921 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4922 }
4923 } else {
4924 // Plain load with no read barrier.
4925 // /* HeapReference<Object> */ out = *(obj + offset)
4926 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
4927 __ MaybeUnpoisonHeapReference(out_reg);
4928 }
4929}
4930
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004931static inline int GetBakerMarkThunkNumber(GpuRegister reg) {
4932 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 20, "Expecting equal");
4933 if (reg >= V0 && reg <= T2) { // 13 consequtive regs.
4934 return reg - V0;
4935 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
4936 return 13 + (reg - S2);
4937 } else if (reg == S8) { // One more.
4938 return 19;
4939 }
4940 LOG(FATAL) << "Unexpected register " << reg;
4941 UNREACHABLE();
4942}
4943
4944static inline int GetBakerMarkFieldArrayThunkDisplacement(GpuRegister reg, bool short_offset) {
4945 int num = GetBakerMarkThunkNumber(reg) +
4946 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
4947 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
4948}
4949
4950static inline int GetBakerMarkGcRootThunkDisplacement(GpuRegister reg) {
4951 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
4952 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
4953}
4954
4955void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(HInstruction* instruction,
4956 Location root,
4957 GpuRegister obj,
4958 uint32_t offset,
4959 ReadBarrierOption read_barrier_option,
4960 Mips64Label* label_low) {
4961 if (label_low != nullptr) {
4962 DCHECK_EQ(offset, 0x5678u);
4963 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08004964 GpuRegister root_reg = root.AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08004965 if (read_barrier_option == kWithReadBarrier) {
4966 DCHECK(kEmitCompilerReadBarrier);
4967 if (kUseBakerReadBarrier) {
4968 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4969 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004970 if (kBakerReadBarrierThunksEnableForGcRoots) {
4971 // Note that we do not actually check the value of `GetIsGcMarking()`
4972 // to decide whether to mark the loaded GC root or not. Instead, we
4973 // load into `temp` (T9) the read barrier mark introspection entrypoint.
4974 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
4975 // vice versa.
4976 //
4977 // We use thunks for the slow path. That thunk checks the reference
4978 // and jumps to the entrypoint if needed.
4979 //
4980 // temp = Thread::Current()->pReadBarrierMarkReg00
4981 // // AKA &art_quick_read_barrier_mark_introspection.
4982 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
4983 // if (temp != nullptr) {
4984 // temp = &gc_root_thunk<root_reg>
4985 // root = temp(root)
4986 // }
Alexey Frunze15958152017-02-09 19:08:30 -08004987
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004988 const int32_t entry_point_offset =
4989 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
4990 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
4991 int16_t offset_low = Low16Bits(offset);
4992 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
4993 // extension in lwu.
4994 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
4995 GpuRegister base = short_offset ? obj : TMP;
4996 // Loading the entrypoint does not require a load acquire since it is only changed when
4997 // threads are suspended or running a checkpoint.
4998 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
4999 if (!short_offset) {
5000 DCHECK(!label_low);
5001 __ Daui(base, obj, offset_high);
5002 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07005003 Mips64Label skip_call;
5004 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005005 if (label_low != nullptr) {
5006 DCHECK(short_offset);
5007 __ Bind(label_low);
5008 }
5009 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5010 __ LoadFromOffset(kLoadUnsignedWord, root_reg, base, offset_low); // Single instruction
5011 // in delay slot.
5012 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005013 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005014 } else {
5015 // Note that we do not actually check the value of `GetIsGcMarking()`
5016 // to decide whether to mark the loaded GC root or not. Instead, we
5017 // load into `temp` (T9) the read barrier mark entry point corresponding
5018 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
5019 // is false, and vice versa.
5020 //
5021 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5022 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5023 // if (temp != null) {
5024 // root = temp(root)
5025 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005026
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005027 if (label_low != nullptr) {
5028 __ Bind(label_low);
5029 }
5030 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5031 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5032 static_assert(
5033 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5034 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5035 "have different sizes.");
5036 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5037 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5038 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08005039
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005040 // Slow path marking the GC root `root`.
5041 Location temp = Location::RegisterLocation(T9);
5042 SlowPathCodeMIPS64* slow_path =
5043 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS64(
5044 instruction,
5045 root,
5046 /*entrypoint*/ temp);
5047 codegen_->AddSlowPath(slow_path);
5048
5049 const int32_t entry_point_offset =
5050 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(root.reg() - 1);
5051 // Loading the entrypoint does not require a load acquire since it is only changed when
5052 // threads are suspended or running a checkpoint.
5053 __ LoadFromOffset(kLoadDoubleword, temp.AsRegister<GpuRegister>(), TR, entry_point_offset);
5054 __ Bnezc(temp.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
5055 __ Bind(slow_path->GetExitLabel());
5056 }
Alexey Frunze15958152017-02-09 19:08:30 -08005057 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005058 if (label_low != nullptr) {
5059 __ Bind(label_low);
5060 }
Alexey Frunze15958152017-02-09 19:08:30 -08005061 // GC root loaded through a slow path for read barriers other
5062 // than Baker's.
5063 // /* GcRoot<mirror::Object>* */ root = obj + offset
5064 __ Daddiu64(root_reg, obj, static_cast<int32_t>(offset));
5065 // /* mirror::Object* */ root = root->Read()
5066 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5067 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005068 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005069 if (label_low != nullptr) {
5070 __ Bind(label_low);
5071 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005072 // Plain GC root load with no read barrier.
5073 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5074 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5075 // Note that GC roots are not affected by heap poisoning, thus we
5076 // do not have to unpoison `root_reg` here.
5077 }
5078}
5079
Alexey Frunze15958152017-02-09 19:08:30 -08005080void CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5081 Location ref,
5082 GpuRegister obj,
5083 uint32_t offset,
5084 Location temp,
5085 bool needs_null_check) {
5086 DCHECK(kEmitCompilerReadBarrier);
5087 DCHECK(kUseBakerReadBarrier);
5088
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005089 if (kBakerReadBarrierThunksEnableForFields) {
5090 // Note that we do not actually check the value of `GetIsGcMarking()`
5091 // to decide whether to mark the loaded reference or not. Instead, we
5092 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5093 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5094 // vice versa.
5095 //
5096 // We use thunks for the slow path. That thunk checks the reference
5097 // and jumps to the entrypoint if needed. If the holder is not gray,
5098 // it issues a load-load memory barrier and returns to the original
5099 // reference load.
5100 //
5101 // temp = Thread::Current()->pReadBarrierMarkReg00
5102 // // AKA &art_quick_read_barrier_mark_introspection.
5103 // if (temp != nullptr) {
5104 // temp = &field_array_thunk<holder_reg>
5105 // temp()
5106 // }
5107 // not_gray_return_address:
5108 // // If the offset is too large to fit into the lw instruction, we
5109 // // use an adjusted base register (TMP) here. This register
5110 // // receives bits 16 ... 31 of the offset before the thunk invocation
5111 // // and the thunk benefits from it.
5112 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
5113 // gray_return_address:
5114
5115 DCHECK(temp.IsInvalid());
5116 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5117 const int32_t entry_point_offset =
5118 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5119 // There may have or may have not been a null check if the field offset is smaller than
5120 // the page size.
5121 // There must've been a null check in case it's actually a load from an array.
5122 // We will, however, perform an explicit null check in the thunk as it's easier to
5123 // do it than not.
5124 if (instruction->IsArrayGet()) {
5125 DCHECK(!needs_null_check);
5126 }
5127 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
5128 // Loading the entrypoint does not require a load acquire since it is only changed when
5129 // threads are suspended or running a checkpoint.
5130 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5131 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07005132 Mips64Label skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005133 if (short_offset) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07005134 __ Beqzc(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005135 __ Nop(); // In forbidden slot.
5136 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005137 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005138 // /* HeapReference<Object> */ ref = *(obj + offset)
5139 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset); // Single instruction.
5140 } else {
5141 int16_t offset_low = Low16Bits(offset);
5142 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lwu.
Alexey Frunze0cab6562017-07-25 15:19:36 -07005143 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005144 __ Daui(TMP, obj, offset_high); // In delay slot.
5145 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005146 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005147 // /* HeapReference<Object> */ ref = *(obj + offset)
5148 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset_low); // Single instruction.
5149 }
5150 if (needs_null_check) {
5151 MaybeRecordImplicitNullCheck(instruction);
5152 }
5153 __ MaybeUnpoisonHeapReference(ref_reg);
5154 return;
5155 }
5156
Alexey Frunze15958152017-02-09 19:08:30 -08005157 // /* HeapReference<Object> */ ref = *(obj + offset)
5158 Location no_index = Location::NoLocation();
5159 ScaleFactor no_scale_factor = TIMES_1;
5160 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5161 ref,
5162 obj,
5163 offset,
5164 no_index,
5165 no_scale_factor,
5166 temp,
5167 needs_null_check);
5168}
5169
5170void CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5171 Location ref,
5172 GpuRegister obj,
5173 uint32_t data_offset,
5174 Location index,
5175 Location temp,
5176 bool needs_null_check) {
5177 DCHECK(kEmitCompilerReadBarrier);
5178 DCHECK(kUseBakerReadBarrier);
5179
5180 static_assert(
5181 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5182 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005183 ScaleFactor scale_factor = TIMES_4;
5184
5185 if (kBakerReadBarrierThunksEnableForArrays) {
5186 // Note that we do not actually check the value of `GetIsGcMarking()`
5187 // to decide whether to mark the loaded reference or not. Instead, we
5188 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5189 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5190 // vice versa.
5191 //
5192 // We use thunks for the slow path. That thunk checks the reference
5193 // and jumps to the entrypoint if needed. If the holder is not gray,
5194 // it issues a load-load memory barrier and returns to the original
5195 // reference load.
5196 //
5197 // temp = Thread::Current()->pReadBarrierMarkReg00
5198 // // AKA &art_quick_read_barrier_mark_introspection.
5199 // if (temp != nullptr) {
5200 // temp = &field_array_thunk<holder_reg>
5201 // temp()
5202 // }
5203 // not_gray_return_address:
5204 // // The element address is pre-calculated in the TMP register before the
5205 // // thunk invocation and the thunk benefits from it.
5206 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
5207 // gray_return_address:
5208
5209 DCHECK(temp.IsInvalid());
5210 DCHECK(index.IsValid());
5211 const int32_t entry_point_offset =
5212 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5213 // We will not do the explicit null check in the thunk as some form of a null check
5214 // must've been done earlier.
5215 DCHECK(!needs_null_check);
5216 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
5217 // Loading the entrypoint does not require a load acquire since it is only changed when
5218 // threads are suspended or running a checkpoint.
5219 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005220 Mips64Label skip_call;
5221 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005222 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5223 GpuRegister index_reg = index.AsRegister<GpuRegister>();
5224 __ Dlsa(TMP, index_reg, obj, scale_factor); // In delay slot.
5225 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005226 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005227 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
5228 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
5229 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, data_offset); // Single instruction.
5230 __ MaybeUnpoisonHeapReference(ref_reg);
5231 return;
5232 }
5233
Alexey Frunze15958152017-02-09 19:08:30 -08005234 // /* HeapReference<Object> */ ref =
5235 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08005236 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5237 ref,
5238 obj,
5239 data_offset,
5240 index,
5241 scale_factor,
5242 temp,
5243 needs_null_check);
5244}
5245
5246void CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5247 Location ref,
5248 GpuRegister obj,
5249 uint32_t offset,
5250 Location index,
5251 ScaleFactor scale_factor,
5252 Location temp,
5253 bool needs_null_check,
5254 bool always_update_field) {
5255 DCHECK(kEmitCompilerReadBarrier);
5256 DCHECK(kUseBakerReadBarrier);
5257
5258 // In slow path based read barriers, the read barrier call is
5259 // inserted after the original load. However, in fast path based
5260 // Baker's read barriers, we need to perform the load of
5261 // mirror::Object::monitor_ *before* the original reference load.
5262 // This load-load ordering is required by the read barrier.
5263 // The fast path/slow path (for Baker's algorithm) should look like:
5264 //
5265 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5266 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5267 // HeapReference<Object> ref = *src; // Original reference load.
5268 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5269 // if (is_gray) {
5270 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5271 // }
5272 //
5273 // Note: the original implementation in ReadBarrier::Barrier is
5274 // slightly more complex as it performs additional checks that we do
5275 // not do here for performance reasons.
5276
5277 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5278 GpuRegister temp_reg = temp.AsRegister<GpuRegister>();
5279 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5280
5281 // /* int32_t */ monitor = obj->monitor_
5282 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
5283 if (needs_null_check) {
5284 MaybeRecordImplicitNullCheck(instruction);
5285 }
5286 // /* LockWord */ lock_word = LockWord(monitor)
5287 static_assert(sizeof(LockWord) == sizeof(int32_t),
5288 "art::LockWord and int32_t have different sizes.");
5289
5290 __ Sync(0); // Barrier to prevent load-load reordering.
5291
5292 // The actual reference load.
5293 if (index.IsValid()) {
5294 // Load types involving an "index": ArrayGet,
5295 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5296 // intrinsics.
5297 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5298 if (index.IsConstant()) {
5299 size_t computed_offset =
5300 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
5301 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, computed_offset);
5302 } else {
5303 GpuRegister index_reg = index.AsRegister<GpuRegister>();
Chris Larsencd0295d2017-03-31 15:26:54 -07005304 if (scale_factor == TIMES_1) {
5305 __ Daddu(TMP, index_reg, obj);
5306 } else {
5307 __ Dlsa(TMP, index_reg, obj, scale_factor);
5308 }
Alexey Frunze15958152017-02-09 19:08:30 -08005309 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset);
5310 }
5311 } else {
5312 // /* HeapReference<Object> */ ref = *(obj + offset)
5313 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset);
5314 }
5315
5316 // Object* ref = ref_addr->AsMirrorPtr()
5317 __ MaybeUnpoisonHeapReference(ref_reg);
5318
5319 // Slow path marking the object `ref` when it is gray.
5320 SlowPathCodeMIPS64* slow_path;
5321 if (always_update_field) {
5322 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 only supports address
5323 // of the form `obj + field_offset`, where `obj` is a register and
5324 // `field_offset` is a register. Thus `offset` and `scale_factor`
5325 // above are expected to be null in this code path.
5326 DCHECK_EQ(offset, 0u);
5327 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
5328 slow_path = new (GetGraph()->GetArena())
5329 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(instruction,
5330 ref,
5331 obj,
5332 /* field_offset */ index,
5333 temp_reg);
5334 } else {
5335 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS64(instruction, ref);
5336 }
5337 AddSlowPath(slow_path);
5338
5339 // if (rb_state == ReadBarrier::GrayState())
5340 // ref = ReadBarrier::Mark(ref);
5341 // Given the numeric representation, it's enough to check the low bit of the
5342 // rb_state. We do that by shifting the bit into the sign bit (31) and
5343 // performing a branch on less than zero.
5344 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5345 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
5346 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
5347 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
5348 __ Bltzc(temp_reg, slow_path->GetEntryLabel());
5349 __ Bind(slow_path->GetExitLabel());
5350}
5351
5352void CodeGeneratorMIPS64::GenerateReadBarrierSlow(HInstruction* instruction,
5353 Location out,
5354 Location ref,
5355 Location obj,
5356 uint32_t offset,
5357 Location index) {
5358 DCHECK(kEmitCompilerReadBarrier);
5359
5360 // Insert a slow path based read barrier *after* the reference load.
5361 //
5362 // If heap poisoning is enabled, the unpoisoning of the loaded
5363 // reference will be carried out by the runtime within the slow
5364 // path.
5365 //
5366 // Note that `ref` currently does not get unpoisoned (when heap
5367 // poisoning is enabled), which is alright as the `ref` argument is
5368 // not used by the artReadBarrierSlow entry point.
5369 //
5370 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5371 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena())
5372 ReadBarrierForHeapReferenceSlowPathMIPS64(instruction, out, ref, obj, offset, index);
5373 AddSlowPath(slow_path);
5374
5375 __ Bc(slow_path->GetEntryLabel());
5376 __ Bind(slow_path->GetExitLabel());
5377}
5378
5379void CodeGeneratorMIPS64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5380 Location out,
5381 Location ref,
5382 Location obj,
5383 uint32_t offset,
5384 Location index) {
5385 if (kEmitCompilerReadBarrier) {
5386 // Baker's read barriers shall be handled by the fast path
5387 // (CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier).
5388 DCHECK(!kUseBakerReadBarrier);
5389 // If heap poisoning is enabled, unpoisoning will be taken care of
5390 // by the runtime within the slow path.
5391 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
5392 } else if (kPoisonHeapReferences) {
5393 __ UnpoisonHeapReference(out.AsRegister<GpuRegister>());
5394 }
5395}
5396
5397void CodeGeneratorMIPS64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5398 Location out,
5399 Location root) {
5400 DCHECK(kEmitCompilerReadBarrier);
5401
5402 // Insert a slow path based read barrier *after* the GC root load.
5403 //
5404 // Note that GC roots are not affected by heap poisoning, so we do
5405 // not need to do anything special for this here.
5406 SlowPathCodeMIPS64* slow_path =
5407 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS64(instruction, out, root);
5408 AddSlowPath(slow_path);
5409
5410 __ Bc(slow_path->GetEntryLabel());
5411 __ Bind(slow_path->GetExitLabel());
5412}
5413
Alexey Frunze4dda3372015-06-01 18:31:49 -07005414void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005415 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5416 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07005417 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005418 switch (type_check_kind) {
5419 case TypeCheckKind::kExactCheck:
5420 case TypeCheckKind::kAbstractClassCheck:
5421 case TypeCheckKind::kClassHierarchyCheck:
5422 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08005423 call_kind =
5424 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005425 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005426 break;
5427 case TypeCheckKind::kArrayCheck:
5428 case TypeCheckKind::kUnresolvedCheck:
5429 case TypeCheckKind::kInterfaceCheck:
5430 call_kind = LocationSummary::kCallOnSlowPath;
5431 break;
5432 }
5433
Alexey Frunze4dda3372015-06-01 18:31:49 -07005434 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005435 if (baker_read_barrier_slow_path) {
5436 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5437 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005438 locations->SetInAt(0, Location::RequiresRegister());
5439 locations->SetInAt(1, Location::RequiresRegister());
5440 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005441 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005442 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08005443 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005444}
5445
5446void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005447 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005448 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08005449 Location obj_loc = locations->InAt(0);
5450 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005451 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08005452 Location out_loc = locations->Out();
5453 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5454 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
5455 DCHECK_LE(num_temps, 1u);
5456 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005457 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5458 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5459 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5460 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005461 Mips64Label done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005462 SlowPathCodeMIPS64* slow_path = nullptr;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005463
5464 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005465 // Avoid this check if we know `obj` is not null.
5466 if (instruction->MustDoNullCheck()) {
5467 __ Move(out, ZERO);
5468 __ Beqzc(obj, &done);
5469 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005470
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005471 switch (type_check_kind) {
5472 case TypeCheckKind::kExactCheck: {
5473 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005474 GenerateReferenceLoadTwoRegisters(instruction,
5475 out_loc,
5476 obj_loc,
5477 class_offset,
5478 maybe_temp_loc,
5479 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005480 // Classes must be equal for the instanceof to succeed.
5481 __ Xor(out, out, cls);
5482 __ Sltiu(out, out, 1);
5483 break;
5484 }
5485
5486 case TypeCheckKind::kAbstractClassCheck: {
5487 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005488 GenerateReferenceLoadTwoRegisters(instruction,
5489 out_loc,
5490 obj_loc,
5491 class_offset,
5492 maybe_temp_loc,
5493 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005494 // If the class is abstract, we eagerly fetch the super class of the
5495 // object to avoid doing a comparison we know will fail.
5496 Mips64Label loop;
5497 __ Bind(&loop);
5498 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005499 GenerateReferenceLoadOneRegister(instruction,
5500 out_loc,
5501 super_offset,
5502 maybe_temp_loc,
5503 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005504 // If `out` is null, we use it for the result, and jump to `done`.
5505 __ Beqzc(out, &done);
5506 __ Bnec(out, cls, &loop);
5507 __ LoadConst32(out, 1);
5508 break;
5509 }
5510
5511 case TypeCheckKind::kClassHierarchyCheck: {
5512 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005513 GenerateReferenceLoadTwoRegisters(instruction,
5514 out_loc,
5515 obj_loc,
5516 class_offset,
5517 maybe_temp_loc,
5518 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005519 // Walk over the class hierarchy to find a match.
5520 Mips64Label loop, success;
5521 __ Bind(&loop);
5522 __ Beqc(out, cls, &success);
5523 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005524 GenerateReferenceLoadOneRegister(instruction,
5525 out_loc,
5526 super_offset,
5527 maybe_temp_loc,
5528 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005529 __ Bnezc(out, &loop);
5530 // If `out` is null, we use it for the result, and jump to `done`.
5531 __ Bc(&done);
5532 __ Bind(&success);
5533 __ LoadConst32(out, 1);
5534 break;
5535 }
5536
5537 case TypeCheckKind::kArrayObjectCheck: {
5538 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005539 GenerateReferenceLoadTwoRegisters(instruction,
5540 out_loc,
5541 obj_loc,
5542 class_offset,
5543 maybe_temp_loc,
5544 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005545 // Do an exact check.
5546 Mips64Label success;
5547 __ Beqc(out, cls, &success);
5548 // Otherwise, we need to check that the object's class is a non-primitive array.
5549 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08005550 GenerateReferenceLoadOneRegister(instruction,
5551 out_loc,
5552 component_offset,
5553 maybe_temp_loc,
5554 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005555 // If `out` is null, we use it for the result, and jump to `done`.
5556 __ Beqzc(out, &done);
5557 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
5558 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
5559 __ Sltiu(out, out, 1);
5560 __ Bc(&done);
5561 __ Bind(&success);
5562 __ LoadConst32(out, 1);
5563 break;
5564 }
5565
5566 case TypeCheckKind::kArrayCheck: {
5567 // No read barrier since the slow path will retry upon failure.
5568 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005569 GenerateReferenceLoadTwoRegisters(instruction,
5570 out_loc,
5571 obj_loc,
5572 class_offset,
5573 maybe_temp_loc,
5574 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005575 DCHECK(locations->OnlyCallsOnSlowPath());
5576 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
5577 /* is_fatal */ false);
5578 codegen_->AddSlowPath(slow_path);
5579 __ Bnec(out, cls, slow_path->GetEntryLabel());
5580 __ LoadConst32(out, 1);
5581 break;
5582 }
5583
5584 case TypeCheckKind::kUnresolvedCheck:
5585 case TypeCheckKind::kInterfaceCheck: {
5586 // Note that we indeed only call on slow path, but we always go
5587 // into the slow path for the unresolved and interface check
5588 // cases.
5589 //
5590 // We cannot directly call the InstanceofNonTrivial runtime
5591 // entry point without resorting to a type checking slow path
5592 // here (i.e. by calling InvokeRuntime directly), as it would
5593 // require to assign fixed registers for the inputs of this
5594 // HInstanceOf instruction (following the runtime calling
5595 // convention), which might be cluttered by the potential first
5596 // read barrier emission at the beginning of this method.
5597 //
5598 // TODO: Introduce a new runtime entry point taking the object
5599 // to test (instead of its class) as argument, and let it deal
5600 // with the read barrier issues. This will let us refactor this
5601 // case of the `switch` code as it was previously (with a direct
5602 // call to the runtime not using a type checking slow path).
5603 // This should also be beneficial for the other cases above.
5604 DCHECK(locations->OnlyCallsOnSlowPath());
5605 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
5606 /* is_fatal */ false);
5607 codegen_->AddSlowPath(slow_path);
5608 __ Bc(slow_path->GetEntryLabel());
5609 break;
5610 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005611 }
5612
5613 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005614
5615 if (slow_path != nullptr) {
5616 __ Bind(slow_path->GetExitLabel());
5617 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005618}
5619
5620void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
5621 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
5622 locations->SetOut(Location::ConstantLocation(constant));
5623}
5624
5625void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
5626 // Will be generated at use site.
5627}
5628
5629void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
5630 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
5631 locations->SetOut(Location::ConstantLocation(constant));
5632}
5633
5634void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
5635 // Will be generated at use site.
5636}
5637
Calin Juravle175dc732015-08-25 15:42:32 +01005638void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5639 // The trampoline uses the same calling convention as dex calling conventions,
5640 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
5641 // the method_idx.
5642 HandleInvoke(invoke);
5643}
5644
5645void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5646 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
5647}
5648
Alexey Frunze4dda3372015-06-01 18:31:49 -07005649void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
5650 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
5651 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
5652}
5653
5654void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5655 HandleInvoke(invoke);
5656 // The register T0 is required to be used for the hidden argument in
5657 // art_quick_imt_conflict_trampoline, so add the hidden argument.
5658 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
5659}
5660
5661void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5662 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
5663 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005664 Location receiver = invoke->GetLocations()->InAt(0);
5665 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005666 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005667
5668 // Set the hidden argument.
5669 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
5670 invoke->GetDexMethodIndex());
5671
5672 // temp = object->GetClass();
5673 if (receiver.IsStackSlot()) {
5674 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
5675 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
5676 } else {
5677 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
5678 }
5679 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005680 // Instead of simply (possibly) unpoisoning `temp` here, we should
5681 // emit a read barrier for the previous class reference load.
5682 // However this is not required in practice, as this is an
5683 // intermediate/temporary reference and because the current
5684 // concurrent copying collector keeps the from-space memory
5685 // intact/accessible until the end of the marking phase (the
5686 // concurrent copying collector may not in the future).
5687 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005688 __ LoadFromOffset(kLoadDoubleword, temp, temp,
5689 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
5690 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005691 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005692 // temp = temp->GetImtEntryAt(method_offset);
5693 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5694 // T9 = temp->GetEntryPoint();
5695 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5696 // T9();
5697 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005698 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005699 DCHECK(!codegen_->IsLeafMethod());
5700 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
5701}
5702
5703void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07005704 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5705 if (intrinsic.TryDispatch(invoke)) {
5706 return;
5707 }
5708
Alexey Frunze4dda3372015-06-01 18:31:49 -07005709 HandleInvoke(invoke);
5710}
5711
5712void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005713 // Explicit clinit checks triggered by static invokes must have been pruned by
5714 // art::PrepareForRegisterAllocation.
5715 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005716
Chris Larsen3039e382015-08-26 07:54:08 -07005717 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5718 if (intrinsic.TryDispatch(invoke)) {
5719 return;
5720 }
5721
Alexey Frunze4dda3372015-06-01 18:31:49 -07005722 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005723}
5724
Orion Hodsonac141392017-01-13 11:53:47 +00005725void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5726 HandleInvoke(invoke);
5727}
5728
5729void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5730 codegen_->GenerateInvokePolymorphicCall(invoke);
5731}
5732
Chris Larsen3039e382015-08-26 07:54:08 -07005733static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005734 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07005735 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
5736 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005737 return true;
5738 }
5739 return false;
5740}
5741
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005742HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08005743 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005744 bool fallback_load = false;
5745 switch (desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005746 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005747 case HLoadString::LoadKind::kBootImageInternTable:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005748 case HLoadString::LoadKind::kBssEntry:
5749 DCHECK(!Runtime::Current()->UseJitCompilation());
5750 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005751 case HLoadString::LoadKind::kJitTableAddress:
5752 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005753 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005754 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005755 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko764d4542017-05-16 10:31:41 +01005756 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005757 }
5758 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005759 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005760 }
5761 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005762}
5763
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005764HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
5765 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005766 bool fallback_load = false;
5767 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005768 case HLoadClass::LoadKind::kInvalid:
5769 LOG(FATAL) << "UNREACHABLE";
5770 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005771 case HLoadClass::LoadKind::kReferrersClass:
5772 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005773 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005774 case HLoadClass::LoadKind::kBootImageClassTable:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005775 case HLoadClass::LoadKind::kBssEntry:
5776 DCHECK(!Runtime::Current()->UseJitCompilation());
5777 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005778 case HLoadClass::LoadKind::kJitTableAddress:
5779 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005780 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005781 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005782 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005783 break;
5784 }
5785 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005786 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005787 }
5788 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005789}
5790
Vladimir Markodc151b22015-10-15 18:02:30 +01005791HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
5792 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01005793 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08005794 // On MIPS64 we support all dispatch types.
5795 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01005796}
5797
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005798void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(
5799 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005800 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00005801 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08005802 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
5803 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
5804
Alexey Frunze19f6c692016-11-30 19:19:55 -08005805 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005806 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00005807 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005808 uint32_t offset =
5809 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00005810 __ LoadFromOffset(kLoadDoubleword,
5811 temp.AsRegister<GpuRegister>(),
5812 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005813 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00005814 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005815 }
Vladimir Marko58155012015-08-19 12:49:41 +00005816 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00005817 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00005818 break;
Vladimir Marko65979462017-05-19 17:25:12 +01005819 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
5820 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005821 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko65979462017-05-19 17:25:12 +01005822 NewPcRelativeMethodPatch(invoke->GetTargetMethod());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005823 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
5824 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
5825 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Vladimir Marko65979462017-05-19 17:25:12 +01005826 __ Daddiu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5827 break;
5828 }
Vladimir Marko58155012015-08-19 12:49:41 +00005829 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005830 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
5831 kLoadDoubleword,
5832 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00005833 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005834 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005835 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005836 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005837 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
5838 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
5839 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunze19f6c692016-11-30 19:19:55 -08005840 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5841 break;
5842 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005843 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
5844 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
5845 return; // No code pointer retrieval; the runtime performs the call directly.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005846 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005847 }
5848
Alexey Frunze19f6c692016-11-30 19:19:55 -08005849 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00005850 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005851 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00005852 break;
Vladimir Marko58155012015-08-19 12:49:41 +00005853 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
5854 // T9 = callee_method->entry_point_from_quick_compiled_code_;
5855 __ LoadFromOffset(kLoadDoubleword,
5856 T9,
5857 callee_method.AsRegister<GpuRegister>(),
5858 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07005859 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00005860 // T9()
5861 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005862 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00005863 break;
5864 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005865 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
5866
Alexey Frunze4dda3372015-06-01 18:31:49 -07005867 DCHECK(!IsLeafMethod());
5868}
5869
5870void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005871 // Explicit clinit checks triggered by static invokes must have been pruned by
5872 // art::PrepareForRegisterAllocation.
5873 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005874
5875 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5876 return;
5877 }
5878
5879 LocationSummary* locations = invoke->GetLocations();
5880 codegen_->GenerateStaticOrDirectCall(invoke,
5881 locations->HasTemps()
5882 ? locations->GetTemp(0)
5883 : Location::NoLocation());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005884}
5885
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005886void CodeGeneratorMIPS64::GenerateVirtualCall(
5887 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005888 // Use the calling convention instead of the location of the receiver, as
5889 // intrinsics may have put the receiver in a different register. In the intrinsics
5890 // slow path, the arguments have been moved to the right place, so here we are
5891 // guaranteed that the receiver is the first register of the calling convention.
5892 InvokeDexCallingConvention calling_convention;
5893 GpuRegister receiver = calling_convention.GetRegisterAt(0);
5894
Alexey Frunze53afca12015-11-05 16:34:23 -08005895 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005896 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5897 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
5898 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005899 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005900
5901 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005902 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08005903 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005904 // Instead of simply (possibly) unpoisoning `temp` here, we should
5905 // emit a read barrier for the previous class reference load.
5906 // However this is not required in practice, as this is an
5907 // intermediate/temporary reference and because the current
5908 // concurrent copying collector keeps the from-space memory
5909 // intact/accessible until the end of the marking phase (the
5910 // concurrent copying collector may not in the future).
5911 __ MaybeUnpoisonHeapReference(temp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005912 // temp = temp->GetMethodAt(method_offset);
5913 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5914 // T9 = temp->GetEntryPoint();
5915 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5916 // T9();
5917 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005918 __ Nop();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005919 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Alexey Frunze53afca12015-11-05 16:34:23 -08005920}
5921
5922void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
5923 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5924 return;
5925 }
5926
5927 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005928 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005929}
5930
5931void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005932 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005933 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005934 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005935 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
5936 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005937 return;
5938 }
Vladimir Marko41559982017-01-06 14:04:23 +00005939 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005940
Alexey Frunze15958152017-02-09 19:08:30 -08005941 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5942 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunzef63f5692016-12-13 17:43:11 -08005943 ? LocationSummary::kCallOnSlowPath
5944 : LocationSummary::kNoCall;
5945 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005946 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
5947 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5948 }
Vladimir Marko41559982017-01-06 14:04:23 +00005949 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005950 locations->SetInAt(0, Location::RequiresRegister());
5951 }
5952 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005953 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5954 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5955 // Rely on the type resolution or initialization and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005956 // Request a temp to hold the BSS entry location for the slow path.
5957 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005958 RegisterSet caller_saves = RegisterSet::Empty();
5959 InvokeRuntimeCallingConvention calling_convention;
5960 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5961 locations->SetCustomSlowPathCallerSaves(caller_saves);
5962 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005963 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07005964 }
5965 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005966}
5967
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005968// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5969// move.
5970void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005971 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005972 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005973 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01005974 return;
5975 }
Vladimir Marko41559982017-01-06 14:04:23 +00005976 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005977
Vladimir Marko41559982017-01-06 14:04:23 +00005978 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005979 Location out_loc = locations->Out();
5980 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5981 GpuRegister current_method_reg = ZERO;
5982 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005983 load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005984 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
5985 }
5986
Alexey Frunze15958152017-02-09 19:08:30 -08005987 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5988 ? kWithoutReadBarrier
5989 : kCompilerReadBarrierOption;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005990 bool generate_null_check = false;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005991 CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high = nullptr;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005992 switch (load_kind) {
5993 case HLoadClass::LoadKind::kReferrersClass:
5994 DCHECK(!cls->CanCallRuntime());
5995 DCHECK(!cls->MustGenerateClinitCheck());
5996 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5997 GenerateGcRootFieldLoad(cls,
5998 out_loc,
5999 current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08006000 ArtMethod::DeclaringClassOffset().Int32Value(),
6001 read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006002 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006003 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006004 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08006005 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006006 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Alexey Frunzef63f5692016-12-13 17:43:11 -08006007 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006008 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6009 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
6010 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006011 __ Daddiu(out, AT, /* placeholder */ 0x5678);
6012 break;
6013 }
6014 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08006015 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006016 uint32_t address = dchecked_integral_cast<uint32_t>(
6017 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
6018 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006019 __ LoadLiteral(out,
6020 kLoadUnsignedWord,
6021 codegen_->DeduplicateBootImageAddressLiteral(address));
6022 break;
6023 }
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006024 case HLoadClass::LoadKind::kBootImageClassTable: {
6025 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6026 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6027 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
6028 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6029 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
6030 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6031 __ Lwu(out, AT, /* placeholder */ 0x5678);
6032 // Extract the reference from the slot data, i.e. clear the hash bits.
6033 int32_t masked_hash = ClassTable::TableSlot::MaskHash(
6034 ComputeModifiedUtf8Hash(cls->GetDexFile().StringByTypeIdx(cls->GetTypeIndex())));
6035 if (masked_hash != 0) {
6036 __ Daddiu(out, out, -masked_hash);
6037 }
6038 break;
6039 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006040 case HLoadClass::LoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006041 bss_info_high = codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
6042 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6043 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
6044 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
6045 GpuRegister temp = non_baker_read_barrier
6046 ? out
6047 : locations->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006048 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high, temp);
6049 GenerateGcRootFieldLoad(cls,
6050 out_loc,
6051 temp,
6052 /* placeholder */ 0x5678,
6053 read_barrier_option,
6054 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006055 generate_null_check = true;
6056 break;
6057 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006058 case HLoadClass::LoadKind::kJitTableAddress:
6059 __ LoadLiteral(out,
6060 kLoadUnsignedWord,
6061 codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
6062 cls->GetTypeIndex(),
6063 cls->GetClass()));
Alexey Frunze15958152017-02-09 19:08:30 -08006064 GenerateGcRootFieldLoad(cls, out_loc, out, 0, read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006065 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006066 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006067 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006068 LOG(FATAL) << "UNREACHABLE";
6069 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006070 }
6071
6072 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6073 DCHECK(cls->CanCallRuntime());
6074 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006075 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck(), bss_info_high);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006076 codegen_->AddSlowPath(slow_path);
6077 if (generate_null_check) {
6078 __ Beqzc(out, slow_path->GetEntryLabel());
6079 }
6080 if (cls->MustGenerateClinitCheck()) {
6081 GenerateClassInitializationCheck(slow_path, out);
6082 } else {
6083 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006084 }
6085 }
6086}
6087
David Brazdilcb1c0552015-08-04 16:22:25 +01006088static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006089 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01006090}
6091
Alexey Frunze4dda3372015-06-01 18:31:49 -07006092void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
6093 LocationSummary* locations =
6094 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
6095 locations->SetOut(Location::RequiresRegister());
6096}
6097
6098void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
6099 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01006100 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
6101}
6102
6103void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
6104 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
6105}
6106
6107void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6108 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006109}
6110
Alexey Frunze4dda3372015-06-01 18:31:49 -07006111void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006112 HLoadString::LoadKind load_kind = load->GetLoadKind();
6113 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00006114 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006115 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006116 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006117 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzef63f5692016-12-13 17:43:11 -08006118 } else {
6119 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006120 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6121 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6122 // Rely on the pResolveString and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006123 // Request a temp to hold the BSS entry location for the slow path.
6124 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006125 RegisterSet caller_saves = RegisterSet::Empty();
6126 InvokeRuntimeCallingConvention calling_convention;
6127 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6128 locations->SetCustomSlowPathCallerSaves(caller_saves);
6129 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006130 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006131 }
6132 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08006133 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006134}
6135
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006136// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6137// move.
6138void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006139 HLoadString::LoadKind load_kind = load->GetLoadKind();
6140 LocationSummary* locations = load->GetLocations();
6141 Location out_loc = locations->Out();
6142 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6143
6144 switch (load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006145 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
6146 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006147 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006148 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006149 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6150 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
6151 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006152 __ Daddiu(out, AT, /* placeholder */ 0x5678);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006153 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006154 }
6155 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006156 uint32_t address = dchecked_integral_cast<uint32_t>(
6157 reinterpret_cast<uintptr_t>(load->GetString().Get()));
6158 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006159 __ LoadLiteral(out,
6160 kLoadUnsignedWord,
6161 codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006162 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006163 }
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006164 case HLoadString::LoadKind::kBootImageInternTable: {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006165 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006166 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006167 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006168 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6169 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006170 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6171 __ Lwu(out, AT, /* placeholder */ 0x5678);
6172 return;
6173 }
6174 case HLoadString::LoadKind::kBssEntry: {
6175 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6176 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6177 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
6178 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6179 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006180 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
6181 GpuRegister temp = non_baker_read_barrier
6182 ? out
6183 : locations->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006184 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, temp);
Alexey Frunze15958152017-02-09 19:08:30 -08006185 GenerateGcRootFieldLoad(load,
6186 out_loc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006187 temp,
Alexey Frunze15958152017-02-09 19:08:30 -08006188 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006189 kCompilerReadBarrierOption,
6190 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006191 SlowPathCodeMIPS64* slow_path =
6192 new (GetGraph()->GetArena()) LoadStringSlowPathMIPS64(load, info_high);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006193 codegen_->AddSlowPath(slow_path);
6194 __ Beqzc(out, slow_path->GetEntryLabel());
6195 __ Bind(slow_path->GetExitLabel());
6196 return;
6197 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006198 case HLoadString::LoadKind::kJitTableAddress:
6199 __ LoadLiteral(out,
6200 kLoadUnsignedWord,
6201 codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
6202 load->GetStringIndex(),
6203 load->GetString()));
Alexey Frunze15958152017-02-09 19:08:30 -08006204 GenerateGcRootFieldLoad(load, out_loc, out, 0, kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08006205 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006206 default:
6207 break;
6208 }
6209
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006210 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006211 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006212 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006213 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006214 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
6215 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6216 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006217}
6218
Alexey Frunze4dda3372015-06-01 18:31:49 -07006219void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
6220 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
6221 locations->SetOut(Location::ConstantLocation(constant));
6222}
6223
6224void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
6225 // Will be generated at use site.
6226}
6227
6228void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
6229 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006230 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006231 InvokeRuntimeCallingConvention calling_convention;
6232 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6233}
6234
6235void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006236 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07006237 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01006238 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006239 if (instruction->IsEnter()) {
6240 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6241 } else {
6242 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6243 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006244}
6245
6246void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
6247 LocationSummary* locations =
6248 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
6249 switch (mul->GetResultType()) {
6250 case Primitive::kPrimInt:
6251 case Primitive::kPrimLong:
6252 locations->SetInAt(0, Location::RequiresRegister());
6253 locations->SetInAt(1, Location::RequiresRegister());
6254 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6255 break;
6256
6257 case Primitive::kPrimFloat:
6258 case Primitive::kPrimDouble:
6259 locations->SetInAt(0, Location::RequiresFpuRegister());
6260 locations->SetInAt(1, Location::RequiresFpuRegister());
6261 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6262 break;
6263
6264 default:
6265 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
6266 }
6267}
6268
6269void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
6270 Primitive::Type type = instruction->GetType();
6271 LocationSummary* locations = instruction->GetLocations();
6272
6273 switch (type) {
6274 case Primitive::kPrimInt:
6275 case Primitive::kPrimLong: {
6276 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6277 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6278 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
6279 if (type == Primitive::kPrimInt)
6280 __ MulR6(dst, lhs, rhs);
6281 else
6282 __ Dmul(dst, lhs, rhs);
6283 break;
6284 }
6285 case Primitive::kPrimFloat:
6286 case Primitive::kPrimDouble: {
6287 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6288 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
6289 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
6290 if (type == Primitive::kPrimFloat)
6291 __ MulS(dst, lhs, rhs);
6292 else
6293 __ MulD(dst, lhs, rhs);
6294 break;
6295 }
6296 default:
6297 LOG(FATAL) << "Unexpected mul type " << type;
6298 }
6299}
6300
6301void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
6302 LocationSummary* locations =
6303 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
6304 switch (neg->GetResultType()) {
6305 case Primitive::kPrimInt:
6306 case Primitive::kPrimLong:
6307 locations->SetInAt(0, Location::RequiresRegister());
6308 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6309 break;
6310
6311 case Primitive::kPrimFloat:
6312 case Primitive::kPrimDouble:
6313 locations->SetInAt(0, Location::RequiresFpuRegister());
6314 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6315 break;
6316
6317 default:
6318 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
6319 }
6320}
6321
6322void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
6323 Primitive::Type type = instruction->GetType();
6324 LocationSummary* locations = instruction->GetLocations();
6325
6326 switch (type) {
6327 case Primitive::kPrimInt:
6328 case Primitive::kPrimLong: {
6329 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6330 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6331 if (type == Primitive::kPrimInt)
6332 __ Subu(dst, ZERO, src);
6333 else
6334 __ Dsubu(dst, ZERO, src);
6335 break;
6336 }
6337 case Primitive::kPrimFloat:
6338 case Primitive::kPrimDouble: {
6339 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6340 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
6341 if (type == Primitive::kPrimFloat)
6342 __ NegS(dst, src);
6343 else
6344 __ NegD(dst, src);
6345 break;
6346 }
6347 default:
6348 LOG(FATAL) << "Unexpected neg type " << type;
6349 }
6350}
6351
6352void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
6353 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006354 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006355 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006356 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006357 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6358 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006359}
6360
6361void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006362 // Note: if heap poisoning is enabled, the entry point takes care
6363 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02006364 QuickEntrypointEnum entrypoint =
6365 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
6366 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006367 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02006368 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006369}
6370
6371void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
6372 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006373 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006374 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00006375 if (instruction->IsStringAlloc()) {
6376 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
6377 } else {
6378 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00006379 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006380 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
6381}
6382
6383void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006384 // Note: if heap poisoning is enabled, the entry point takes care
6385 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00006386 if (instruction->IsStringAlloc()) {
6387 // String is allocated through StringFactory. Call NewEmptyString entry point.
6388 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02006389 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07006390 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00006391 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
6392 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
6393 __ Jalr(T9);
6394 __ Nop();
6395 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6396 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01006397 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00006398 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00006399 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006400}
6401
6402void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
6403 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
6404 locations->SetInAt(0, Location::RequiresRegister());
6405 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6406}
6407
6408void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
6409 Primitive::Type type = instruction->GetType();
6410 LocationSummary* locations = instruction->GetLocations();
6411
6412 switch (type) {
6413 case Primitive::kPrimInt:
6414 case Primitive::kPrimLong: {
6415 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6416 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6417 __ Nor(dst, src, ZERO);
6418 break;
6419 }
6420
6421 default:
6422 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
6423 }
6424}
6425
6426void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6427 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
6428 locations->SetInAt(0, Location::RequiresRegister());
6429 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6430}
6431
6432void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6433 LocationSummary* locations = instruction->GetLocations();
6434 __ Xori(locations->Out().AsRegister<GpuRegister>(),
6435 locations->InAt(0).AsRegister<GpuRegister>(),
6436 1);
6437}
6438
6439void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006440 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
6441 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006442}
6443
Calin Juravle2ae48182016-03-16 14:05:09 +00006444void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
6445 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006446 return;
6447 }
6448 Location obj = instruction->GetLocations()->InAt(0);
6449
6450 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00006451 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006452}
6453
Calin Juravle2ae48182016-03-16 14:05:09 +00006454void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006455 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00006456 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006457
6458 Location obj = instruction->GetLocations()->InAt(0);
6459
6460 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
6461}
6462
6463void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00006464 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006465}
6466
6467void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
6468 HandleBinaryOp(instruction);
6469}
6470
6471void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
6472 HandleBinaryOp(instruction);
6473}
6474
6475void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
6476 LOG(FATAL) << "Unreachable";
6477}
6478
6479void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
6480 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6481}
6482
6483void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
6484 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
6485 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
6486 if (location.IsStackSlot()) {
6487 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6488 } else if (location.IsDoubleStackSlot()) {
6489 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6490 }
6491 locations->SetOut(location);
6492}
6493
6494void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
6495 ATTRIBUTE_UNUSED) {
6496 // Nothing to do, the parameter is already at its location.
6497}
6498
6499void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
6500 LocationSummary* locations =
6501 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6502 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
6503}
6504
6505void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
6506 ATTRIBUTE_UNUSED) {
6507 // Nothing to do, the method is already at its location.
6508}
6509
6510void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
6511 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01006512 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006513 locations->SetInAt(i, Location::Any());
6514 }
6515 locations->SetOut(Location::Any());
6516}
6517
6518void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
6519 LOG(FATAL) << "Unreachable";
6520}
6521
6522void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
6523 Primitive::Type type = rem->GetResultType();
6524 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006525 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
6526 : LocationSummary::kNoCall;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006527 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
6528
6529 switch (type) {
6530 case Primitive::kPrimInt:
6531 case Primitive::kPrimLong:
6532 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07006533 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006534 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6535 break;
6536
6537 case Primitive::kPrimFloat:
6538 case Primitive::kPrimDouble: {
6539 InvokeRuntimeCallingConvention calling_convention;
6540 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
6541 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
6542 locations->SetOut(calling_convention.GetReturnLocation(type));
6543 break;
6544 }
6545
6546 default:
6547 LOG(FATAL) << "Unexpected rem type " << type;
6548 }
6549}
6550
6551void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
6552 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006553
6554 switch (type) {
6555 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07006556 case Primitive::kPrimLong:
6557 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006558 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006559
6560 case Primitive::kPrimFloat:
6561 case Primitive::kPrimDouble: {
Serban Constantinescufc734082016-07-19 17:18:07 +01006562 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
6563 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006564 if (type == Primitive::kPrimFloat) {
6565 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
6566 } else {
6567 CheckEntrypointTypes<kQuickFmod, double, double, double>();
6568 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006569 break;
6570 }
6571 default:
6572 LOG(FATAL) << "Unexpected rem type " << type;
6573 }
6574}
6575
Igor Murashkind01745e2017-04-05 16:40:31 -07006576void LocationsBuilderMIPS64::VisitConstructorFence(HConstructorFence* constructor_fence) {
6577 constructor_fence->SetLocations(nullptr);
6578}
6579
6580void InstructionCodeGeneratorMIPS64::VisitConstructorFence(
6581 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
6582 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
6583}
6584
Alexey Frunze4dda3372015-06-01 18:31:49 -07006585void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
6586 memory_barrier->SetLocations(nullptr);
6587}
6588
6589void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
6590 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
6591}
6592
6593void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
6594 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
6595 Primitive::Type return_type = ret->InputAt(0)->GetType();
6596 locations->SetInAt(0, Mips64ReturnLocation(return_type));
6597}
6598
6599void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
6600 codegen_->GenerateFrameExit();
6601}
6602
6603void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
6604 ret->SetLocations(nullptr);
6605}
6606
6607void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
6608 codegen_->GenerateFrameExit();
6609}
6610
Alexey Frunze92d90602015-12-18 18:16:36 -08006611void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
6612 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00006613}
6614
Alexey Frunze92d90602015-12-18 18:16:36 -08006615void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
6616 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00006617}
6618
Alexey Frunze4dda3372015-06-01 18:31:49 -07006619void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
6620 HandleShift(shl);
6621}
6622
6623void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
6624 HandleShift(shl);
6625}
6626
6627void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
6628 HandleShift(shr);
6629}
6630
6631void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
6632 HandleShift(shr);
6633}
6634
Alexey Frunze4dda3372015-06-01 18:31:49 -07006635void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
6636 HandleBinaryOp(instruction);
6637}
6638
6639void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
6640 HandleBinaryOp(instruction);
6641}
6642
6643void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6644 HandleFieldGet(instruction, instruction->GetFieldInfo());
6645}
6646
6647void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6648 HandleFieldGet(instruction, instruction->GetFieldInfo());
6649}
6650
6651void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
6652 HandleFieldSet(instruction, instruction->GetFieldInfo());
6653}
6654
6655void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01006656 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006657}
6658
Calin Juravlee460d1d2015-09-29 04:52:17 +01006659void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
6660 HUnresolvedInstanceFieldGet* instruction) {
6661 FieldAccessCallingConventionMIPS64 calling_convention;
6662 codegen_->CreateUnresolvedFieldLocationSummary(
6663 instruction, instruction->GetFieldType(), calling_convention);
6664}
6665
6666void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
6667 HUnresolvedInstanceFieldGet* instruction) {
6668 FieldAccessCallingConventionMIPS64 calling_convention;
6669 codegen_->GenerateUnresolvedFieldAccess(instruction,
6670 instruction->GetFieldType(),
6671 instruction->GetFieldIndex(),
6672 instruction->GetDexPc(),
6673 calling_convention);
6674}
6675
6676void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
6677 HUnresolvedInstanceFieldSet* instruction) {
6678 FieldAccessCallingConventionMIPS64 calling_convention;
6679 codegen_->CreateUnresolvedFieldLocationSummary(
6680 instruction, instruction->GetFieldType(), calling_convention);
6681}
6682
6683void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
6684 HUnresolvedInstanceFieldSet* instruction) {
6685 FieldAccessCallingConventionMIPS64 calling_convention;
6686 codegen_->GenerateUnresolvedFieldAccess(instruction,
6687 instruction->GetFieldType(),
6688 instruction->GetFieldIndex(),
6689 instruction->GetDexPc(),
6690 calling_convention);
6691}
6692
6693void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
6694 HUnresolvedStaticFieldGet* instruction) {
6695 FieldAccessCallingConventionMIPS64 calling_convention;
6696 codegen_->CreateUnresolvedFieldLocationSummary(
6697 instruction, instruction->GetFieldType(), calling_convention);
6698}
6699
6700void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
6701 HUnresolvedStaticFieldGet* instruction) {
6702 FieldAccessCallingConventionMIPS64 calling_convention;
6703 codegen_->GenerateUnresolvedFieldAccess(instruction,
6704 instruction->GetFieldType(),
6705 instruction->GetFieldIndex(),
6706 instruction->GetDexPc(),
6707 calling_convention);
6708}
6709
6710void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
6711 HUnresolvedStaticFieldSet* instruction) {
6712 FieldAccessCallingConventionMIPS64 calling_convention;
6713 codegen_->CreateUnresolvedFieldLocationSummary(
6714 instruction, instruction->GetFieldType(), calling_convention);
6715}
6716
6717void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
6718 HUnresolvedStaticFieldSet* instruction) {
6719 FieldAccessCallingConventionMIPS64 calling_convention;
6720 codegen_->GenerateUnresolvedFieldAccess(instruction,
6721 instruction->GetFieldType(),
6722 instruction->GetFieldIndex(),
6723 instruction->GetDexPc(),
6724 calling_convention);
6725}
6726
Alexey Frunze4dda3372015-06-01 18:31:49 -07006727void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01006728 LocationSummary* locations =
6729 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02006730 // In suspend check slow path, usually there are no caller-save registers at all.
6731 // If SIMD instructions are present, however, we force spilling all live SIMD
6732 // registers in full width (since the runtime only saves/restores lower part).
6733 locations->SetCustomSlowPathCallerSaves(
6734 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006735}
6736
6737void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
6738 HBasicBlock* block = instruction->GetBlock();
6739 if (block->GetLoopInformation() != nullptr) {
6740 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
6741 // The back edge will generate the suspend check.
6742 return;
6743 }
6744 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
6745 // The goto will generate the suspend check.
6746 return;
6747 }
6748 GenerateSuspendCheck(instruction, nullptr);
6749}
6750
Alexey Frunze4dda3372015-06-01 18:31:49 -07006751void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
6752 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006753 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006754 InvokeRuntimeCallingConvention calling_convention;
6755 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6756}
6757
6758void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006759 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006760 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
6761}
6762
6763void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
6764 Primitive::Type input_type = conversion->GetInputType();
6765 Primitive::Type result_type = conversion->GetResultType();
6766 DCHECK_NE(input_type, result_type);
6767
6768 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
6769 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
6770 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
6771 }
6772
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006773 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion);
6774
6775 if (Primitive::IsFloatingPointType(input_type)) {
6776 locations->SetInAt(0, Location::RequiresFpuRegister());
6777 } else {
6778 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006779 }
6780
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006781 if (Primitive::IsFloatingPointType(result_type)) {
6782 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006783 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006784 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006785 }
6786}
6787
6788void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
6789 LocationSummary* locations = conversion->GetLocations();
6790 Primitive::Type result_type = conversion->GetResultType();
6791 Primitive::Type input_type = conversion->GetInputType();
6792
6793 DCHECK_NE(input_type, result_type);
6794
6795 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
6796 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6797 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6798
6799 switch (result_type) {
6800 case Primitive::kPrimChar:
6801 __ Andi(dst, src, 0xFFFF);
6802 break;
6803 case Primitive::kPrimByte:
Vladimir Markob52bbde2016-02-12 12:06:05 +00006804 if (input_type == Primitive::kPrimLong) {
6805 // Type conversion from long to types narrower than int is a result of code
6806 // transformations. To avoid unpredictable results for SEB and SEH, we first
6807 // need to sign-extend the low 32-bit value into bits 32 through 63.
6808 __ Sll(dst, src, 0);
6809 __ Seb(dst, dst);
6810 } else {
6811 __ Seb(dst, src);
6812 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006813 break;
6814 case Primitive::kPrimShort:
Vladimir Markob52bbde2016-02-12 12:06:05 +00006815 if (input_type == Primitive::kPrimLong) {
6816 // Type conversion from long to types narrower than int is a result of code
6817 // transformations. To avoid unpredictable results for SEB and SEH, we first
6818 // need to sign-extend the low 32-bit value into bits 32 through 63.
6819 __ Sll(dst, src, 0);
6820 __ Seh(dst, dst);
6821 } else {
6822 __ Seh(dst, src);
6823 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006824 break;
6825 case Primitive::kPrimInt:
6826 case Primitive::kPrimLong:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01006827 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
6828 // conversions, except when the input and output registers are the same and we are not
6829 // converting longs to shorter types. In these cases, do nothing.
6830 if ((input_type == Primitive::kPrimLong) || (dst != src)) {
6831 __ Sll(dst, src, 0);
6832 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006833 break;
6834
6835 default:
6836 LOG(FATAL) << "Unexpected type conversion from " << input_type
6837 << " to " << result_type;
6838 }
6839 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006840 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6841 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6842 if (input_type == Primitive::kPrimLong) {
6843 __ Dmtc1(src, FTMP);
6844 if (result_type == Primitive::kPrimFloat) {
6845 __ Cvtsl(dst, FTMP);
6846 } else {
6847 __ Cvtdl(dst, FTMP);
6848 }
6849 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006850 __ Mtc1(src, FTMP);
6851 if (result_type == Primitive::kPrimFloat) {
6852 __ Cvtsw(dst, FTMP);
6853 } else {
6854 __ Cvtdw(dst, FTMP);
6855 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006856 }
6857 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
6858 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006859 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6860 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006861
6862 if (result_type == Primitive::kPrimLong) {
Roland Levillain888d0672015-11-23 18:53:50 +00006863 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006864 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006865 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006866 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006867 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006868 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006869 } else {
6870 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006871 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006872 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006873 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006874 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006875 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006876 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006877 } else if (Primitive::IsFloatingPointType(result_type) &&
6878 Primitive::IsFloatingPointType(input_type)) {
6879 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6880 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
6881 if (result_type == Primitive::kPrimFloat) {
6882 __ Cvtsd(dst, src);
6883 } else {
6884 __ Cvtds(dst, src);
6885 }
6886 } else {
6887 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
6888 << " to " << result_type;
6889 }
6890}
6891
6892void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
6893 HandleShift(ushr);
6894}
6895
6896void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
6897 HandleShift(ushr);
6898}
6899
6900void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
6901 HandleBinaryOp(instruction);
6902}
6903
6904void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
6905 HandleBinaryOp(instruction);
6906}
6907
6908void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6909 // Nothing to do, this should be removed during prepare for register allocator.
6910 LOG(FATAL) << "Unreachable";
6911}
6912
6913void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6914 // Nothing to do, this should be removed during prepare for register allocator.
6915 LOG(FATAL) << "Unreachable";
6916}
6917
6918void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006919 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006920}
6921
6922void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006923 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006924}
6925
6926void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006927 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006928}
6929
6930void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006931 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006932}
6933
6934void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006935 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006936}
6937
6938void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006939 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006940}
6941
6942void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006943 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006944}
6945
6946void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006947 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006948}
6949
6950void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006951 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006952}
6953
6954void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006955 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006956}
6957
6958void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006959 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006960}
6961
6962void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006963 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006964}
6965
Aart Bike9f37602015-10-09 11:15:55 -07006966void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006967 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006968}
6969
6970void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006971 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006972}
6973
6974void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006975 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006976}
6977
6978void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006979 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006980}
6981
6982void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006983 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006984}
6985
6986void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006987 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006988}
6989
6990void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006991 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006992}
6993
6994void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006995 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006996}
6997
Mark Mendellfe57faa2015-09-18 09:26:15 -04006998// Simple implementation of packed switch - generate cascaded compare/jumps.
6999void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7000 LocationSummary* locations =
7001 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
7002 locations->SetInAt(0, Location::RequiresRegister());
7003}
7004
Alexey Frunze0960ac52016-12-20 17:24:59 -08007005void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
7006 int32_t lower_bound,
7007 uint32_t num_entries,
7008 HBasicBlock* switch_block,
7009 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007010 // Create a set of compare/jumps.
7011 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08007012 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007013 // Jump to default if index is negative
7014 // Note: We don't check the case that index is positive while value < lower_bound, because in
7015 // this case, index >= num_entries must be true. So that we can save one branch instruction.
7016 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
7017
Alexey Frunze0960ac52016-12-20 17:24:59 -08007018 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007019 // Jump to successors[0] if value == lower_bound.
7020 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
7021 int32_t last_index = 0;
7022 for (; num_entries - last_index > 2; last_index += 2) {
7023 __ Addiu(temp_reg, temp_reg, -2);
7024 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
7025 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
7026 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
7027 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
7028 }
7029 if (num_entries - last_index == 2) {
7030 // The last missing case_value.
7031 __ Addiu(temp_reg, temp_reg, -1);
7032 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007033 }
7034
7035 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08007036 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07007037 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007038 }
7039}
7040
Alexey Frunze0960ac52016-12-20 17:24:59 -08007041void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
7042 int32_t lower_bound,
7043 uint32_t num_entries,
7044 HBasicBlock* switch_block,
7045 HBasicBlock* default_block) {
7046 // Create a jump table.
7047 std::vector<Mips64Label*> labels(num_entries);
7048 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7049 for (uint32_t i = 0; i < num_entries; i++) {
7050 labels[i] = codegen_->GetLabelOf(successors[i]);
7051 }
7052 JumpTable* table = __ CreateJumpTable(std::move(labels));
7053
7054 // Is the value in range?
7055 __ Addiu32(TMP, value_reg, -lower_bound);
7056 __ LoadConst32(AT, num_entries);
7057 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
7058
7059 // We are in the range of the table.
7060 // Load the target address from the jump table, indexing by the value.
7061 __ LoadLabelAddress(AT, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07007062 __ Dlsa(TMP, TMP, AT, 2);
Alexey Frunze0960ac52016-12-20 17:24:59 -08007063 __ Lw(TMP, TMP, 0);
7064 // Compute the absolute target address by adding the table start address
7065 // (the table contains offsets to targets relative to its start).
7066 __ Daddu(TMP, TMP, AT);
7067 // And jump.
7068 __ Jr(TMP);
7069 __ Nop();
7070}
7071
7072void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7073 int32_t lower_bound = switch_instr->GetStartValue();
7074 uint32_t num_entries = switch_instr->GetNumEntries();
7075 LocationSummary* locations = switch_instr->GetLocations();
7076 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
7077 HBasicBlock* switch_block = switch_instr->GetBlock();
7078 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7079
7080 if (num_entries > kPackedSwitchJumpTableThreshold) {
7081 GenTableBasedPackedSwitch(value_reg,
7082 lower_bound,
7083 num_entries,
7084 switch_block,
7085 default_block);
7086 } else {
7087 GenPackedSwitchWithCompares(value_reg,
7088 lower_bound,
7089 num_entries,
7090 switch_block,
7091 default_block);
7092 }
7093}
7094
Chris Larsenc9905a62017-03-13 17:06:18 -07007095void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7096 LocationSummary* locations =
7097 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
7098 locations->SetInAt(0, Location::RequiresRegister());
7099 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007100}
7101
Chris Larsenc9905a62017-03-13 17:06:18 -07007102void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7103 LocationSummary* locations = instruction->GetLocations();
7104 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
7105 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7106 instruction->GetIndex(), kMips64PointerSize).SizeValue();
7107 __ LoadFromOffset(kLoadDoubleword,
7108 locations->Out().AsRegister<GpuRegister>(),
7109 locations->InAt(0).AsRegister<GpuRegister>(),
7110 method_offset);
7111 } else {
7112 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
7113 instruction->GetIndex(), kMips64PointerSize));
7114 __ LoadFromOffset(kLoadDoubleword,
7115 locations->Out().AsRegister<GpuRegister>(),
7116 locations->InAt(0).AsRegister<GpuRegister>(),
7117 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
7118 __ LoadFromOffset(kLoadDoubleword,
7119 locations->Out().AsRegister<GpuRegister>(),
7120 locations->Out().AsRegister<GpuRegister>(),
7121 method_offset);
7122 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007123}
7124
Alexey Frunze4dda3372015-06-01 18:31:49 -07007125} // namespace mips64
7126} // namespace art