blob: 5fb8755086fdb16b82623301f4ad4ed1b4548979 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunzec857c742015-09-23 15:12:39 -070019#include "art_method.h"
20#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080021#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070022#include "entrypoints/quick/quick_entrypoints.h"
23#include "entrypoints/quick/quick_entrypoints_enum.h"
24#include "gc/accounting/card_table.h"
25#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070026#include "intrinsics_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070027#include "mirror/array-inl.h"
28#include "mirror/class-inl.h"
29#include "offsets.h"
30#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070031#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070032#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070033#include "utils/stack_checks.h"
34
35namespace art {
36namespace mips64 {
37
38static constexpr int kCurrentMethodStackOffset = 0;
39static constexpr GpuRegister kMethodRegisterArgument = A0;
40
Alexey Frunze4dda3372015-06-01 18:31:49 -070041Location Mips64ReturnLocation(Primitive::Type return_type) {
42 switch (return_type) {
43 case Primitive::kPrimBoolean:
44 case Primitive::kPrimByte:
45 case Primitive::kPrimChar:
46 case Primitive::kPrimShort:
47 case Primitive::kPrimInt:
48 case Primitive::kPrimNot:
49 case Primitive::kPrimLong:
50 return Location::RegisterLocation(V0);
51
52 case Primitive::kPrimFloat:
53 case Primitive::kPrimDouble:
54 return Location::FpuRegisterLocation(F0);
55
56 case Primitive::kPrimVoid:
57 return Location();
58 }
59 UNREACHABLE();
60}
61
62Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(Primitive::Type type) const {
63 return Mips64ReturnLocation(type);
64}
65
66Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
67 return Location::RegisterLocation(kMethodRegisterArgument);
68}
69
70Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(Primitive::Type type) {
71 Location next_location;
72 if (type == Primitive::kPrimVoid) {
73 LOG(FATAL) << "Unexpected parameter type " << type;
74 }
75
76 if (Primitive::IsFloatingPointType(type) &&
77 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
78 next_location = Location::FpuRegisterLocation(
79 calling_convention.GetFpuRegisterAt(float_index_++));
80 gp_index_++;
81 } else if (!Primitive::IsFloatingPointType(type) &&
82 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
83 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
84 float_index_++;
85 } else {
86 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
87 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
88 : Location::StackSlot(stack_offset);
89 }
90
91 // Space on the stack is reserved for all arguments.
92 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
93
Alexey Frunze4dda3372015-06-01 18:31:49 -070094 return next_location;
95}
96
97Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
98 return Mips64ReturnLocation(type);
99}
100
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100101// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
102#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700103#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700104
105class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
106 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000107 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700108
109 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100110 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700111 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
112 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000113 if (instruction_->CanThrowIntoCatchBlock()) {
114 // Live registers will be restored in the catch block if caught.
115 SaveLiveRegisters(codegen, instruction_->GetLocations());
116 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700117 // We're moving two locations to locations that could overlap, so we need a parallel
118 // move resolver.
119 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100120 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700121 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
122 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100123 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700124 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
125 Primitive::kPrimInt);
Serban Constantinescufc734082016-07-19 17:18:07 +0100126 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
127 ? kQuickThrowStringBounds
128 : kQuickThrowArrayBounds;
129 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100130 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700131 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
132 }
133
Alexandre Rames8158f282015-08-07 10:26:17 +0100134 bool IsFatal() const OVERRIDE { return true; }
135
Roland Levillain46648892015-06-19 16:07:18 +0100136 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
137
Alexey Frunze4dda3372015-06-01 18:31:49 -0700138 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700139 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
140};
141
142class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
143 public:
Alexey Frunzec61c0762017-04-10 13:54:23 -0700144 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction)
145 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700146
147 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
148 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
149 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100150 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700151 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
152 }
153
Alexandre Rames8158f282015-08-07 10:26:17 +0100154 bool IsFatal() const OVERRIDE { return true; }
155
Roland Levillain46648892015-06-19 16:07:18 +0100156 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
157
Alexey Frunze4dda3372015-06-01 18:31:49 -0700158 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700159 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
160};
161
162class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
163 public:
164 LoadClassSlowPathMIPS64(HLoadClass* cls,
165 HInstruction* at,
166 uint32_t dex_pc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700167 bool do_clinit,
168 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high = nullptr)
169 : SlowPathCodeMIPS64(at),
170 cls_(cls),
171 dex_pc_(dex_pc),
172 do_clinit_(do_clinit),
173 bss_info_high_(bss_info_high) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700174 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
175 }
176
177 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000178 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700179 Location out = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700180 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700181 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
182 InvokeRuntimeCallingConvention calling_convention;
183 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
184 const bool is_load_class_bss_entry =
185 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700186 __ Bind(GetEntryLabel());
187 SaveLiveRegisters(codegen, locations);
188
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700189 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
190 GpuRegister entry_address = kNoGpuRegister;
191 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
192 GpuRegister temp = locations->GetTemp(0).AsRegister<GpuRegister>();
193 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
194 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
195 // kSaveEverything call.
196 entry_address = temp_is_a0 ? out.AsRegister<GpuRegister>() : temp;
197 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
198 if (temp_is_a0) {
199 __ Move(entry_address, temp);
200 }
201 }
202
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000203 dex::TypeIndex type_index = cls_->GetTypeIndex();
204 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100205 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
206 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000207 mips64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700208 if (do_clinit_) {
209 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
210 } else {
211 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
212 }
213
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700214 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
215 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
216 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
217 DCHECK(bss_info_high_);
218 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
219 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, bss_info_high_);
220 __ Bind(&info_low->label);
221 __ StoreToOffset(kStoreWord,
222 calling_convention.GetRegisterAt(0),
223 entry_address,
224 /* placeholder */ 0x5678);
225 }
226
Alexey Frunze4dda3372015-06-01 18:31:49 -0700227 // Move the class to the desired location.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700228 if (out.IsValid()) {
229 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000230 Primitive::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700231 mips64_codegen->MoveLocation(out,
232 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
233 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700234 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700235 RestoreLiveRegisters(codegen, locations);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700236
237 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
238 if (is_load_class_bss_entry && !baker_or_no_read_barriers) {
239 // For non-Baker read barriers we need to re-calculate the address of
240 // the class entry.
241 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000242 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700243 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
244 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, info_high);
245 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, info_low);
246 __ StoreToOffset(kStoreWord, out.AsRegister<GpuRegister>(), TMP, /* placeholder */ 0x5678);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000247 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700248 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700249 }
250
Roland Levillain46648892015-06-19 16:07:18 +0100251 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
252
Alexey Frunze4dda3372015-06-01 18:31:49 -0700253 private:
254 // The class this slow path will load.
255 HLoadClass* const cls_;
256
Alexey Frunze4dda3372015-06-01 18:31:49 -0700257 // The dex PC of `at_`.
258 const uint32_t dex_pc_;
259
260 // Whether to initialize the class.
261 const bool do_clinit_;
262
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700263 // Pointer to the high half PC-relative patch info for HLoadClass/kBssEntry.
264 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high_;
265
Alexey Frunze4dda3372015-06-01 18:31:49 -0700266 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
267};
268
269class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
270 public:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700271 explicit LoadStringSlowPathMIPS64(HLoadString* instruction,
272 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high)
273 : SlowPathCodeMIPS64(instruction), bss_info_high_(bss_info_high) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700274
275 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700276 DCHECK(instruction_->IsLoadString());
277 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700278 LocationSummary* locations = instruction_->GetLocations();
279 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700280 HLoadString* load = instruction_->AsLoadString();
281 const dex::StringIndex string_index = load->GetStringIndex();
282 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700283 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700284 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
285 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700286 __ Bind(GetEntryLabel());
287 SaveLiveRegisters(codegen, locations);
288
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700289 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
290 GpuRegister entry_address = kNoGpuRegister;
291 if (baker_or_no_read_barriers) {
292 GpuRegister temp = locations->GetTemp(0).AsRegister<GpuRegister>();
293 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
294 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
295 // kSaveEverything call.
296 entry_address = temp_is_a0 ? out : temp;
297 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
298 if (temp_is_a0) {
299 __ Move(entry_address, temp);
300 }
301 }
302
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000303 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100304 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700305 instruction_,
306 instruction_->GetDexPc(),
307 this);
308 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700309
310 // Store the resolved string to the BSS entry.
311 if (baker_or_no_read_barriers) {
312 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
313 DCHECK(bss_info_high_);
314 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
315 mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(),
316 string_index,
317 bss_info_high_);
318 __ Bind(&info_low->label);
319 __ StoreToOffset(kStoreWord,
320 calling_convention.GetRegisterAt(0),
321 entry_address,
322 /* placeholder */ 0x5678);
323 }
324
Alexey Frunze4dda3372015-06-01 18:31:49 -0700325 Primitive::Type type = instruction_->GetType();
326 mips64_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700327 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700328 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700329 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800330
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700331 // Store the resolved string to the BSS entry.
332 if (!baker_or_no_read_barriers) {
333 // For non-Baker read barriers we need to re-calculate the address of
334 // the string entry.
335 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
336 mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
337 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
338 mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, info_high);
339 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, info_low);
340 __ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
341 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700342 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700343 }
344
Roland Levillain46648892015-06-19 16:07:18 +0100345 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
346
Alexey Frunze4dda3372015-06-01 18:31:49 -0700347 private:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700348 // Pointer to the high half PC-relative patch info.
349 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high_;
350
Alexey Frunze4dda3372015-06-01 18:31:49 -0700351 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
352};
353
354class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
355 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000356 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700357
358 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
359 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
360 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000361 if (instruction_->CanThrowIntoCatchBlock()) {
362 // Live registers will be restored in the catch block if caught.
363 SaveLiveRegisters(codegen, instruction_->GetLocations());
364 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100365 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700366 instruction_,
367 instruction_->GetDexPc(),
368 this);
369 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
370 }
371
Alexandre Rames8158f282015-08-07 10:26:17 +0100372 bool IsFatal() const OVERRIDE { return true; }
373
Roland Levillain46648892015-06-19 16:07:18 +0100374 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
375
Alexey Frunze4dda3372015-06-01 18:31:49 -0700376 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700377 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
378};
379
380class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
381 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100382 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000383 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700384
385 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200386 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700387 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
388 __ Bind(GetEntryLabel());
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200389 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufc734082016-07-19 17:18:07 +0100390 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700391 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200392 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700393 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700394 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700395 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700396 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700397 }
398 }
399
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700400 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700401 DCHECK(successor_ == nullptr);
402 return &return_label_;
403 }
404
Roland Levillain46648892015-06-19 16:07:18 +0100405 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
406
Alexey Frunze4dda3372015-06-01 18:31:49 -0700407 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700408 // If not null, the block to branch to after the suspend check.
409 HBasicBlock* const successor_;
410
411 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700412 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700413
414 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
415};
416
417class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
418 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800419 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction, bool is_fatal)
420 : SlowPathCodeMIPS64(instruction), is_fatal_(is_fatal) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700421
422 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
423 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800424
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100425 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700426 DCHECK(instruction_->IsCheckCast()
427 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
428 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
429
430 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800431 if (!is_fatal_) {
432 SaveLiveRegisters(codegen, locations);
433 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700434
435 // We're moving two locations to locations that could overlap, so we need a parallel
436 // move resolver.
437 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800438 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700439 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
440 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800441 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700442 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
443 Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700444 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100445 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800446 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700447 Primitive::Type ret_type = instruction_->GetType();
448 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
449 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700450 } else {
451 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800452 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
453 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700454 }
455
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800456 if (!is_fatal_) {
457 RestoreLiveRegisters(codegen, locations);
458 __ Bc(GetExitLabel());
459 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700460 }
461
Roland Levillain46648892015-06-19 16:07:18 +0100462 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
463
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800464 bool IsFatal() const OVERRIDE { return is_fatal_; }
465
Alexey Frunze4dda3372015-06-01 18:31:49 -0700466 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800467 const bool is_fatal_;
468
Alexey Frunze4dda3372015-06-01 18:31:49 -0700469 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
470};
471
472class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
473 public:
Aart Bik42249c32016-01-07 15:33:50 -0800474 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000475 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700476
477 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800478 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700479 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100480 LocationSummary* locations = instruction_->GetLocations();
481 SaveLiveRegisters(codegen, locations);
482 InvokeRuntimeCallingConvention calling_convention;
483 __ LoadConst32(calling_convention.GetRegisterAt(0),
484 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufc734082016-07-19 17:18:07 +0100485 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100486 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700487 }
488
Roland Levillain46648892015-06-19 16:07:18 +0100489 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
490
Alexey Frunze4dda3372015-06-01 18:31:49 -0700491 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700492 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
493};
494
Alexey Frunze15958152017-02-09 19:08:30 -0800495class ArraySetSlowPathMIPS64 : public SlowPathCodeMIPS64 {
496 public:
497 explicit ArraySetSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
498
499 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
500 LocationSummary* locations = instruction_->GetLocations();
501 __ Bind(GetEntryLabel());
502 SaveLiveRegisters(codegen, locations);
503
504 InvokeRuntimeCallingConvention calling_convention;
505 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
506 parallel_move.AddMove(
507 locations->InAt(0),
508 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
509 Primitive::kPrimNot,
510 nullptr);
511 parallel_move.AddMove(
512 locations->InAt(1),
513 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
514 Primitive::kPrimInt,
515 nullptr);
516 parallel_move.AddMove(
517 locations->InAt(2),
518 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
519 Primitive::kPrimNot,
520 nullptr);
521 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
522
523 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
524 mips64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
525 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
526 RestoreLiveRegisters(codegen, locations);
527 __ Bc(GetExitLabel());
528 }
529
530 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS64"; }
531
532 private:
533 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS64);
534};
535
536// Slow path marking an object reference `ref` during a read
537// barrier. The field `obj.field` in the object `obj` holding this
538// reference does not get updated by this slow path after marking (see
539// ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 below for that).
540//
541// This means that after the execution of this slow path, `ref` will
542// always be up-to-date, but `obj.field` may not; i.e., after the
543// flip, `ref` will be a to-space reference, but `obj.field` will
544// probably still be a from-space reference (unless it gets updated by
545// another thread, or if another thread installed another object
546// reference (different from `ref`) in `obj.field`).
547//
548// If `entrypoint` is a valid location it is assumed to already be
549// holding the entrypoint. The case where the entrypoint is passed in
550// is for the GcRoot read barrier.
551class ReadBarrierMarkSlowPathMIPS64 : public SlowPathCodeMIPS64 {
552 public:
553 ReadBarrierMarkSlowPathMIPS64(HInstruction* instruction,
554 Location ref,
555 Location entrypoint = Location::NoLocation())
556 : SlowPathCodeMIPS64(instruction), ref_(ref), entrypoint_(entrypoint) {
557 DCHECK(kEmitCompilerReadBarrier);
558 }
559
560 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
561
562 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
563 LocationSummary* locations = instruction_->GetLocations();
564 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
565 DCHECK(locations->CanCall());
566 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
567 DCHECK(instruction_->IsInstanceFieldGet() ||
568 instruction_->IsStaticFieldGet() ||
569 instruction_->IsArrayGet() ||
570 instruction_->IsArraySet() ||
571 instruction_->IsLoadClass() ||
572 instruction_->IsLoadString() ||
573 instruction_->IsInstanceOf() ||
574 instruction_->IsCheckCast() ||
575 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
576 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
577 << "Unexpected instruction in read barrier marking slow path: "
578 << instruction_->DebugName();
579
580 __ Bind(GetEntryLabel());
581 // No need to save live registers; it's taken care of by the
582 // entrypoint. Also, there is no need to update the stack mask,
583 // as this runtime call will not trigger a garbage collection.
584 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
585 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
586 (S2 <= ref_reg && ref_reg <= S7) ||
587 (ref_reg == S8)) << ref_reg;
588 // "Compact" slow path, saving two moves.
589 //
590 // Instead of using the standard runtime calling convention (input
591 // and output in A0 and V0 respectively):
592 //
593 // A0 <- ref
594 // V0 <- ReadBarrierMark(A0)
595 // ref <- V0
596 //
597 // we just use rX (the register containing `ref`) as input and output
598 // of a dedicated entrypoint:
599 //
600 // rX <- ReadBarrierMarkRegX(rX)
601 //
602 if (entrypoint_.IsValid()) {
603 mips64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
604 DCHECK_EQ(entrypoint_.AsRegister<GpuRegister>(), T9);
605 __ Jalr(entrypoint_.AsRegister<GpuRegister>());
606 __ Nop();
607 } else {
608 int32_t entry_point_offset =
609 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
610 // This runtime call does not require a stack map.
611 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
612 instruction_,
613 this);
614 }
615 __ Bc(GetExitLabel());
616 }
617
618 private:
619 // The location (register) of the marked object reference.
620 const Location ref_;
621
622 // The location of the entrypoint if already loaded.
623 const Location entrypoint_;
624
625 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS64);
626};
627
628// Slow path marking an object reference `ref` during a read barrier,
629// and if needed, atomically updating the field `obj.field` in the
630// object `obj` holding this reference after marking (contrary to
631// ReadBarrierMarkSlowPathMIPS64 above, which never tries to update
632// `obj.field`).
633//
634// This means that after the execution of this slow path, both `ref`
635// and `obj.field` will be up-to-date; i.e., after the flip, both will
636// hold the same to-space reference (unless another thread installed
637// another object reference (different from `ref`) in `obj.field`).
638class ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 : public SlowPathCodeMIPS64 {
639 public:
640 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(HInstruction* instruction,
641 Location ref,
642 GpuRegister obj,
643 Location field_offset,
644 GpuRegister temp1)
645 : SlowPathCodeMIPS64(instruction),
646 ref_(ref),
647 obj_(obj),
648 field_offset_(field_offset),
649 temp1_(temp1) {
650 DCHECK(kEmitCompilerReadBarrier);
651 }
652
653 const char* GetDescription() const OVERRIDE {
654 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS64";
655 }
656
657 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
658 LocationSummary* locations = instruction_->GetLocations();
659 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
660 DCHECK(locations->CanCall());
661 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
662 // This slow path is only used by the UnsafeCASObject intrinsic.
663 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
664 << "Unexpected instruction in read barrier marking and field updating slow path: "
665 << instruction_->DebugName();
666 DCHECK(instruction_->GetLocations()->Intrinsified());
667 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
668 DCHECK(field_offset_.IsRegister()) << field_offset_;
669
670 __ Bind(GetEntryLabel());
671
672 // Save the old reference.
673 // Note that we cannot use AT or TMP to save the old reference, as those
674 // are used by the code that follows, but we need the old reference after
675 // the call to the ReadBarrierMarkRegX entry point.
676 DCHECK_NE(temp1_, AT);
677 DCHECK_NE(temp1_, TMP);
678 __ Move(temp1_, ref_reg);
679
680 // No need to save live registers; it's taken care of by the
681 // entrypoint. Also, there is no need to update the stack mask,
682 // as this runtime call will not trigger a garbage collection.
683 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
684 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
685 (S2 <= ref_reg && ref_reg <= S7) ||
686 (ref_reg == S8)) << ref_reg;
687 // "Compact" slow path, saving two moves.
688 //
689 // Instead of using the standard runtime calling convention (input
690 // and output in A0 and V0 respectively):
691 //
692 // A0 <- ref
693 // V0 <- ReadBarrierMark(A0)
694 // ref <- V0
695 //
696 // we just use rX (the register containing `ref`) as input and output
697 // of a dedicated entrypoint:
698 //
699 // rX <- ReadBarrierMarkRegX(rX)
700 //
701 int32_t entry_point_offset =
702 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
703 // This runtime call does not require a stack map.
704 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
705 instruction_,
706 this);
707
708 // If the new reference is different from the old reference,
709 // update the field in the holder (`*(obj_ + field_offset_)`).
710 //
711 // Note that this field could also hold a different object, if
712 // another thread had concurrently changed it. In that case, the
713 // the compare-and-set (CAS) loop below would abort, leaving the
714 // field as-is.
715 Mips64Label done;
716 __ Beqc(temp1_, ref_reg, &done);
717
718 // Update the the holder's field atomically. This may fail if
719 // mutator updates before us, but it's OK. This is achieved
720 // using a strong compare-and-set (CAS) operation with relaxed
721 // memory synchronization ordering, where the expected value is
722 // the old reference and the desired value is the new reference.
723
724 // Convenience aliases.
725 GpuRegister base = obj_;
726 GpuRegister offset = field_offset_.AsRegister<GpuRegister>();
727 GpuRegister expected = temp1_;
728 GpuRegister value = ref_reg;
729 GpuRegister tmp_ptr = TMP; // Pointer to actual memory.
730 GpuRegister tmp = AT; // Value in memory.
731
732 __ Daddu(tmp_ptr, base, offset);
733
734 if (kPoisonHeapReferences) {
735 __ PoisonHeapReference(expected);
736 // Do not poison `value` if it is the same register as
737 // `expected`, which has just been poisoned.
738 if (value != expected) {
739 __ PoisonHeapReference(value);
740 }
741 }
742
743 // do {
744 // tmp = [r_ptr] - expected;
745 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
746
747 Mips64Label loop_head, exit_loop;
748 __ Bind(&loop_head);
749 __ Ll(tmp, tmp_ptr);
750 // The LL instruction sign-extends the 32-bit value, but
751 // 32-bit references must be zero-extended. Zero-extend `tmp`.
752 __ Dext(tmp, tmp, 0, 32);
753 __ Bnec(tmp, expected, &exit_loop);
754 __ Move(tmp, value);
755 __ Sc(tmp, tmp_ptr);
756 __ Beqzc(tmp, &loop_head);
757 __ Bind(&exit_loop);
758
759 if (kPoisonHeapReferences) {
760 __ UnpoisonHeapReference(expected);
761 // Do not unpoison `value` if it is the same register as
762 // `expected`, which has just been unpoisoned.
763 if (value != expected) {
764 __ UnpoisonHeapReference(value);
765 }
766 }
767
768 __ Bind(&done);
769 __ Bc(GetExitLabel());
770 }
771
772 private:
773 // The location (register) of the marked object reference.
774 const Location ref_;
775 // The register containing the object holding the marked object reference field.
776 const GpuRegister obj_;
777 // The location of the offset of the marked reference field within `obj_`.
778 Location field_offset_;
779
780 const GpuRegister temp1_;
781
782 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS64);
783};
784
785// Slow path generating a read barrier for a heap reference.
786class ReadBarrierForHeapReferenceSlowPathMIPS64 : public SlowPathCodeMIPS64 {
787 public:
788 ReadBarrierForHeapReferenceSlowPathMIPS64(HInstruction* instruction,
789 Location out,
790 Location ref,
791 Location obj,
792 uint32_t offset,
793 Location index)
794 : SlowPathCodeMIPS64(instruction),
795 out_(out),
796 ref_(ref),
797 obj_(obj),
798 offset_(offset),
799 index_(index) {
800 DCHECK(kEmitCompilerReadBarrier);
801 // If `obj` is equal to `out` or `ref`, it means the initial object
802 // has been overwritten by (or after) the heap object reference load
803 // to be instrumented, e.g.:
804 //
805 // __ LoadFromOffset(kLoadWord, out, out, offset);
806 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
807 //
808 // In that case, we have lost the information about the original
809 // object, and the emitted read barrier cannot work properly.
810 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
811 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
812 }
813
814 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
815 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
816 LocationSummary* locations = instruction_->GetLocations();
817 Primitive::Type type = Primitive::kPrimNot;
818 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
819 DCHECK(locations->CanCall());
820 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
821 DCHECK(instruction_->IsInstanceFieldGet() ||
822 instruction_->IsStaticFieldGet() ||
823 instruction_->IsArrayGet() ||
824 instruction_->IsInstanceOf() ||
825 instruction_->IsCheckCast() ||
826 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
827 << "Unexpected instruction in read barrier for heap reference slow path: "
828 << instruction_->DebugName();
829
830 __ Bind(GetEntryLabel());
831 SaveLiveRegisters(codegen, locations);
832
833 // We may have to change the index's value, but as `index_` is a
834 // constant member (like other "inputs" of this slow path),
835 // introduce a copy of it, `index`.
836 Location index = index_;
837 if (index_.IsValid()) {
838 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
839 if (instruction_->IsArrayGet()) {
840 // Compute the actual memory offset and store it in `index`.
841 GpuRegister index_reg = index_.AsRegister<GpuRegister>();
842 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
843 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
844 // We are about to change the value of `index_reg` (see the
845 // calls to art::mips64::Mips64Assembler::Sll and
846 // art::mips64::MipsAssembler::Addiu32 below), but it has
847 // not been saved by the previous call to
848 // art::SlowPathCode::SaveLiveRegisters, as it is a
849 // callee-save register --
850 // art::SlowPathCode::SaveLiveRegisters does not consider
851 // callee-save registers, as it has been designed with the
852 // assumption that callee-save registers are supposed to be
853 // handled by the called function. So, as a callee-save
854 // register, `index_reg` _would_ eventually be saved onto
855 // the stack, but it would be too late: we would have
856 // changed its value earlier. Therefore, we manually save
857 // it here into another freely available register,
858 // `free_reg`, chosen of course among the caller-save
859 // registers (as a callee-save `free_reg` register would
860 // exhibit the same problem).
861 //
862 // Note we could have requested a temporary register from
863 // the register allocator instead; but we prefer not to, as
864 // this is a slow path, and we know we can find a
865 // caller-save register that is available.
866 GpuRegister free_reg = FindAvailableCallerSaveRegister(codegen);
867 __ Move(free_reg, index_reg);
868 index_reg = free_reg;
869 index = Location::RegisterLocation(index_reg);
870 } else {
871 // The initial register stored in `index_` has already been
872 // saved in the call to art::SlowPathCode::SaveLiveRegisters
873 // (as it is not a callee-save register), so we can freely
874 // use it.
875 }
876 // Shifting the index value contained in `index_reg` by the scale
877 // factor (2) cannot overflow in practice, as the runtime is
878 // unable to allocate object arrays with a size larger than
879 // 2^26 - 1 (that is, 2^28 - 4 bytes).
880 __ Sll(index_reg, index_reg, TIMES_4);
881 static_assert(
882 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
883 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
884 __ Addiu32(index_reg, index_reg, offset_);
885 } else {
886 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
887 // intrinsics, `index_` is not shifted by a scale factor of 2
888 // (as in the case of ArrayGet), as it is actually an offset
889 // to an object field within an object.
890 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
891 DCHECK(instruction_->GetLocations()->Intrinsified());
892 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
893 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
894 << instruction_->AsInvoke()->GetIntrinsic();
895 DCHECK_EQ(offset_, 0U);
896 DCHECK(index_.IsRegister());
897 }
898 }
899
900 // We're moving two or three locations to locations that could
901 // overlap, so we need a parallel move resolver.
902 InvokeRuntimeCallingConvention calling_convention;
903 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
904 parallel_move.AddMove(ref_,
905 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
906 Primitive::kPrimNot,
907 nullptr);
908 parallel_move.AddMove(obj_,
909 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
910 Primitive::kPrimNot,
911 nullptr);
912 if (index.IsValid()) {
913 parallel_move.AddMove(index,
914 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
915 Primitive::kPrimInt,
916 nullptr);
917 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
918 } else {
919 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
920 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
921 }
922 mips64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
923 instruction_,
924 instruction_->GetDexPc(),
925 this);
926 CheckEntrypointTypes<
927 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
928 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
929
930 RestoreLiveRegisters(codegen, locations);
931 __ Bc(GetExitLabel());
932 }
933
934 const char* GetDescription() const OVERRIDE {
935 return "ReadBarrierForHeapReferenceSlowPathMIPS64";
936 }
937
938 private:
939 GpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
940 size_t ref = static_cast<int>(ref_.AsRegister<GpuRegister>());
941 size_t obj = static_cast<int>(obj_.AsRegister<GpuRegister>());
942 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
943 if (i != ref &&
944 i != obj &&
945 !codegen->IsCoreCalleeSaveRegister(i) &&
946 !codegen->IsBlockedCoreRegister(i)) {
947 return static_cast<GpuRegister>(i);
948 }
949 }
950 // We shall never fail to find a free caller-save register, as
951 // there are more than two core caller-save registers on MIPS64
952 // (meaning it is possible to find one which is different from
953 // `ref` and `obj`).
954 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
955 LOG(FATAL) << "Could not find a free caller-save register";
956 UNREACHABLE();
957 }
958
959 const Location out_;
960 const Location ref_;
961 const Location obj_;
962 const uint32_t offset_;
963 // An additional location containing an index to an array.
964 // Only used for HArrayGet and the UnsafeGetObject &
965 // UnsafeGetObjectVolatile intrinsics.
966 const Location index_;
967
968 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS64);
969};
970
971// Slow path generating a read barrier for a GC root.
972class ReadBarrierForRootSlowPathMIPS64 : public SlowPathCodeMIPS64 {
973 public:
974 ReadBarrierForRootSlowPathMIPS64(HInstruction* instruction, Location out, Location root)
975 : SlowPathCodeMIPS64(instruction), out_(out), root_(root) {
976 DCHECK(kEmitCompilerReadBarrier);
977 }
978
979 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
980 LocationSummary* locations = instruction_->GetLocations();
981 Primitive::Type type = Primitive::kPrimNot;
982 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
983 DCHECK(locations->CanCall());
984 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
985 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
986 << "Unexpected instruction in read barrier for GC root slow path: "
987 << instruction_->DebugName();
988
989 __ Bind(GetEntryLabel());
990 SaveLiveRegisters(codegen, locations);
991
992 InvokeRuntimeCallingConvention calling_convention;
993 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
994 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
995 root_,
996 Primitive::kPrimNot);
997 mips64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
998 instruction_,
999 instruction_->GetDexPc(),
1000 this);
1001 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1002 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1003
1004 RestoreLiveRegisters(codegen, locations);
1005 __ Bc(GetExitLabel());
1006 }
1007
1008 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS64"; }
1009
1010 private:
1011 const Location out_;
1012 const Location root_;
1013
1014 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS64);
1015};
1016
Alexey Frunze4dda3372015-06-01 18:31:49 -07001017CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
1018 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001019 const CompilerOptions& compiler_options,
1020 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -07001021 : CodeGenerator(graph,
1022 kNumberOfGpuRegisters,
1023 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001024 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001025 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1026 arraysize(kCoreCalleeSaves)),
1027 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1028 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001029 compiler_options,
1030 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001031 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001032 location_builder_(graph, this),
1033 instruction_visitor_(graph, this),
1034 move_resolver_(graph->GetArena(), this),
Goran Jakovljevic19680d32017-05-11 10:38:36 +02001035 assembler_(graph->GetArena(), &isa_features),
Alexey Frunze19f6c692016-11-30 19:19:55 -08001036 isa_features_(isa_features),
Alexey Frunzef63f5692016-12-13 17:43:11 -08001037 uint32_literals_(std::less<uint32_t>(),
1038 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -08001039 uint64_literals_(std::less<uint64_t>(),
1040 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001041 pc_relative_method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001042 method_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunzef63f5692016-12-13 17:43:11 -08001043 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001044 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001045 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001046 jit_string_patches_(StringReferenceValueComparator(),
1047 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1048 jit_class_patches_(TypeReferenceValueComparator(),
1049 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001050 // Save RA (containing the return address) to mimic Quick.
1051 AddAllocatedRegister(Location::RegisterLocation(RA));
1052}
1053
1054#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001055// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1056#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001057#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -07001058
1059void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001060 // Ensure that we fix up branches.
1061 __ FinalizeCode();
1062
1063 // Adjust native pc offsets in stack maps.
1064 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001065 uint32_t old_position =
1066 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips64);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001067 uint32_t new_position = __ GetAdjustedPosition(old_position);
1068 DCHECK_GE(new_position, old_position);
1069 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
1070 }
1071
1072 // Adjust pc offsets for the disassembly information.
1073 if (disasm_info_ != nullptr) {
1074 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1075 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1076 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1077 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1078 it.second.start = __ GetAdjustedPosition(it.second.start);
1079 it.second.end = __ GetAdjustedPosition(it.second.end);
1080 }
1081 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1082 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1083 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1084 }
1085 }
1086
Alexey Frunze4dda3372015-06-01 18:31:49 -07001087 CodeGenerator::Finalize(allocator);
1088}
1089
1090Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
1091 return codegen_->GetAssembler();
1092}
1093
1094void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001095 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001096 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1097}
1098
1099void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001100 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001101 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
1102}
1103
1104void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
1105 // Pop reg
1106 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +02001107 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001108}
1109
1110void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
1111 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +02001112 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001113 __ Sd(GpuRegister(reg), SP, 0);
1114}
1115
1116void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
1117 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
1118 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
1119 // Allocate a scratch register other than TMP, if available.
1120 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1121 // automatically unspilled when the scratch scope object is destroyed).
1122 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1123 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +02001124 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001125 __ LoadFromOffset(load_type,
1126 GpuRegister(ensure_scratch.GetRegister()),
1127 SP,
1128 index1 + stack_offset);
1129 __ LoadFromOffset(load_type,
1130 TMP,
1131 SP,
1132 index2 + stack_offset);
1133 __ StoreToOffset(store_type,
1134 GpuRegister(ensure_scratch.GetRegister()),
1135 SP,
1136 index2 + stack_offset);
1137 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
1138}
1139
1140static dwarf::Reg DWARFReg(GpuRegister reg) {
1141 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1142}
1143
David Srbeckyba702002016-02-01 18:15:29 +00001144static dwarf::Reg DWARFReg(FpuRegister reg) {
1145 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
1146}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001147
1148void CodeGeneratorMIPS64::GenerateFrameEntry() {
1149 __ Bind(&frame_entry_label_);
1150
1151 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips64) || !IsLeafMethod();
1152
1153 if (do_overflow_check) {
1154 __ LoadFromOffset(kLoadWord,
1155 ZERO,
1156 SP,
1157 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips64)));
1158 RecordPcInfo(nullptr, 0);
1159 }
1160
Alexey Frunze4dda3372015-06-01 18:31:49 -07001161 if (HasEmptyFrame()) {
1162 return;
1163 }
1164
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001165 // Make sure the frame size isn't unreasonably large.
1166 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips64)) {
1167 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips64) << " bytes";
1168 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001169
1170 // Spill callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001171
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001172 uint32_t ofs = GetFrameSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001173 __ IncreaseFrameSize(ofs);
1174
1175 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1176 GpuRegister reg = kCoreCalleeSaves[i];
1177 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001178 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001179 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001180 __ cfi().RelOffset(DWARFReg(reg), ofs);
1181 }
1182 }
1183
1184 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1185 FpuRegister reg = kFpuCalleeSaves[i];
1186 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001187 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001188 __ StoreFpuToOffset(kStoreDoubleword, reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +00001189 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001190 }
1191 }
1192
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001193 // Save the current method if we need it. Note that we do not
1194 // do this in HCurrentMethod, as the instruction might have been removed
1195 // in the SSA graph.
1196 if (RequiresCurrentMethod()) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001197 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001198 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001199
1200 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1201 // Initialize should_deoptimize flag to 0.
1202 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1203 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001204}
1205
1206void CodeGeneratorMIPS64::GenerateFrameExit() {
1207 __ cfi().RememberState();
1208
Alexey Frunze4dda3372015-06-01 18:31:49 -07001209 if (!HasEmptyFrame()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001210 // Restore callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001211
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001212 // For better instruction scheduling restore RA before other registers.
1213 uint32_t ofs = GetFrameSize();
1214 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001215 GpuRegister reg = kCoreCalleeSaves[i];
1216 if (allocated_registers_.ContainsCoreRegister(reg)) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001217 ofs -= kMips64DoublewordSize;
1218 __ LoadFromOffset(kLoadDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001219 __ cfi().Restore(DWARFReg(reg));
1220 }
1221 }
1222
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001223 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1224 FpuRegister reg = kFpuCalleeSaves[i];
1225 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
1226 ofs -= kMips64DoublewordSize;
1227 __ LoadFpuFromOffset(kLoadDoubleword, reg, SP, ofs);
1228 __ cfi().Restore(DWARFReg(reg));
1229 }
1230 }
1231
1232 __ DecreaseFrameSize(GetFrameSize());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001233 }
1234
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001235 __ Jic(RA, 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001236
1237 __ cfi().RestoreState();
1238 __ cfi().DefCFAOffset(GetFrameSize());
1239}
1240
1241void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
1242 __ Bind(GetLabelOf(block));
1243}
1244
1245void CodeGeneratorMIPS64::MoveLocation(Location destination,
1246 Location source,
Calin Juravlee460d1d2015-09-29 04:52:17 +01001247 Primitive::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001248 if (source.Equals(destination)) {
1249 return;
1250 }
1251
1252 // A valid move can always be inferred from the destination and source
1253 // locations. When moving from and to a register, the argument type can be
1254 // used to generate 32bit instead of 64bit moves.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001255 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001256 DCHECK_EQ(unspecified_type, false);
1257
1258 if (destination.IsRegister() || destination.IsFpuRegister()) {
1259 if (unspecified_type) {
1260 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1261 if (source.IsStackSlot() ||
1262 (src_cst != nullptr && (src_cst->IsIntConstant()
1263 || src_cst->IsFloatConstant()
1264 || src_cst->IsNullConstant()))) {
1265 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001266 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001267 } else {
1268 // If the source is a double stack slot or a 64bit constant, a 64bit
1269 // type is appropriate. Else the source is a register, and since the
1270 // type has not been specified, we chose a 64bit type to force a 64bit
1271 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001272 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001273 }
1274 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001275 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1276 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001277 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1278 // Move to GPR/FPR from stack
1279 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001280 if (Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001281 __ LoadFpuFromOffset(load_type,
1282 destination.AsFpuRegister<FpuRegister>(),
1283 SP,
1284 source.GetStackIndex());
1285 } else {
1286 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
1287 __ LoadFromOffset(load_type,
1288 destination.AsRegister<GpuRegister>(),
1289 SP,
1290 source.GetStackIndex());
1291 }
1292 } else if (source.IsConstant()) {
1293 // Move to GPR/FPR from constant
1294 GpuRegister gpr = AT;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001295 if (!Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001296 gpr = destination.AsRegister<GpuRegister>();
1297 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001298 if (dst_type == Primitive::kPrimInt || dst_type == Primitive::kPrimFloat) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001299 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
1300 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
1301 gpr = ZERO;
1302 } else {
1303 __ LoadConst32(gpr, value);
1304 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001305 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001306 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
1307 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
1308 gpr = ZERO;
1309 } else {
1310 __ LoadConst64(gpr, value);
1311 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001312 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001313 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001314 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001315 } else if (dst_type == Primitive::kPrimDouble) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001316 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
1317 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001318 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001319 if (destination.IsRegister()) {
1320 // Move to GPR from GPR
1321 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
1322 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001323 DCHECK(destination.IsFpuRegister());
1324 if (Primitive::Is64BitType(dst_type)) {
1325 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1326 } else {
1327 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1328 }
1329 }
1330 } else if (source.IsFpuRegister()) {
1331 if (destination.IsFpuRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001332 // Move to FPR from FPR
Calin Juravlee460d1d2015-09-29 04:52:17 +01001333 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001334 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1335 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001336 DCHECK_EQ(dst_type, Primitive::kPrimDouble);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001337 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1338 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001339 } else {
1340 DCHECK(destination.IsRegister());
1341 if (Primitive::Is64BitType(dst_type)) {
1342 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1343 } else {
1344 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1345 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001346 }
1347 }
1348 } else { // The destination is not a register. It must be a stack slot.
1349 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1350 if (source.IsRegister() || source.IsFpuRegister()) {
1351 if (unspecified_type) {
1352 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001353 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001354 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001355 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001356 }
1357 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001358 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1359 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001360 // Move to stack from GPR/FPR
1361 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1362 if (source.IsRegister()) {
1363 __ StoreToOffset(store_type,
1364 source.AsRegister<GpuRegister>(),
1365 SP,
1366 destination.GetStackIndex());
1367 } else {
1368 __ StoreFpuToOffset(store_type,
1369 source.AsFpuRegister<FpuRegister>(),
1370 SP,
1371 destination.GetStackIndex());
1372 }
1373 } else if (source.IsConstant()) {
1374 // Move to stack from constant
1375 HConstant* src_cst = source.GetConstant();
1376 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001377 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001378 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001379 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
1380 if (value != 0) {
1381 gpr = TMP;
1382 __ LoadConst32(gpr, value);
1383 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001384 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001385 DCHECK(destination.IsDoubleStackSlot());
1386 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
1387 if (value != 0) {
1388 gpr = TMP;
1389 __ LoadConst64(gpr, value);
1390 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001391 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001392 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001393 } else {
1394 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
1395 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
1396 // Move to stack from stack
1397 if (destination.IsStackSlot()) {
1398 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1399 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1400 } else {
1401 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
1402 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
1403 }
1404 }
1405 }
1406}
1407
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001408void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, Primitive::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001409 DCHECK(!loc1.IsConstant());
1410 DCHECK(!loc2.IsConstant());
1411
1412 if (loc1.Equals(loc2)) {
1413 return;
1414 }
1415
1416 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
1417 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
1418 bool is_fp_reg1 = loc1.IsFpuRegister();
1419 bool is_fp_reg2 = loc2.IsFpuRegister();
1420
1421 if (loc2.IsRegister() && loc1.IsRegister()) {
1422 // Swap 2 GPRs
1423 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
1424 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
1425 __ Move(TMP, r2);
1426 __ Move(r2, r1);
1427 __ Move(r1, TMP);
1428 } else if (is_fp_reg2 && is_fp_reg1) {
1429 // Swap 2 FPRs
1430 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
1431 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001432 if (type == Primitive::kPrimFloat) {
1433 __ MovS(FTMP, r1);
1434 __ MovS(r1, r2);
1435 __ MovS(r2, FTMP);
1436 } else {
1437 DCHECK_EQ(type, Primitive::kPrimDouble);
1438 __ MovD(FTMP, r1);
1439 __ MovD(r1, r2);
1440 __ MovD(r2, FTMP);
1441 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001442 } else if (is_slot1 != is_slot2) {
1443 // Swap GPR/FPR and stack slot
1444 Location reg_loc = is_slot1 ? loc2 : loc1;
1445 Location mem_loc = is_slot1 ? loc1 : loc2;
1446 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
1447 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1448 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
1449 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
1450 if (reg_loc.IsFpuRegister()) {
1451 __ StoreFpuToOffset(store_type,
1452 reg_loc.AsFpuRegister<FpuRegister>(),
1453 SP,
1454 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001455 if (mem_loc.IsStackSlot()) {
1456 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1457 } else {
1458 DCHECK(mem_loc.IsDoubleStackSlot());
1459 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1460 }
1461 } else {
1462 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
1463 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
1464 }
1465 } else if (is_slot1 && is_slot2) {
1466 move_resolver_.Exchange(loc1.GetStackIndex(),
1467 loc2.GetStackIndex(),
1468 loc1.IsDoubleStackSlot());
1469 } else {
1470 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
1471 }
1472}
1473
Calin Juravle175dc732015-08-25 15:42:32 +01001474void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
1475 DCHECK(location.IsRegister());
1476 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
1477}
1478
Calin Juravlee460d1d2015-09-29 04:52:17 +01001479void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1480 if (location.IsRegister()) {
1481 locations->AddTemp(location);
1482 } else {
1483 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1484 }
1485}
1486
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001487void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
1488 GpuRegister value,
1489 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001490 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001491 GpuRegister card = AT;
1492 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001493 if (value_can_be_null) {
1494 __ Beqzc(value, &done);
1495 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001496 __ LoadFromOffset(kLoadDoubleword,
1497 card,
1498 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001499 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001500 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
1501 __ Daddu(temp, card, temp);
1502 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001503 if (value_can_be_null) {
1504 __ Bind(&done);
1505 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001506}
1507
Alexey Frunze19f6c692016-11-30 19:19:55 -08001508template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1509inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
1510 const ArenaDeque<PcRelativePatchInfo>& infos,
1511 ArenaVector<LinkerPatch>* linker_patches) {
1512 for (const PcRelativePatchInfo& info : infos) {
1513 const DexFile& dex_file = info.target_dex_file;
1514 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001515 DCHECK(info.label.IsBound());
1516 uint32_t literal_offset = __ GetLabelLocation(&info.label);
1517 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1518 uint32_t pc_rel_offset = __ GetLabelLocation(&info_high.label);
1519 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Alexey Frunze19f6c692016-11-30 19:19:55 -08001520 }
1521}
1522
1523void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1524 DCHECK(linker_patches->empty());
1525 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001526 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001527 method_bss_entry_patches_.size() +
Alexey Frunzef63f5692016-12-13 17:43:11 -08001528 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001529 type_bss_entry_patches_.size() +
1530 pc_relative_string_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -08001531 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001532 if (GetCompilerOptions().IsBootImage()) {
1533 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
Alexey Frunzef63f5692016-12-13 17:43:11 -08001534 linker_patches);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001535 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
1536 linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001537 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
1538 linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001539 } else {
1540 DCHECK(pc_relative_method_patches_.empty());
1541 DCHECK(pc_relative_type_patches_.empty());
1542 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
1543 linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001544 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001545 EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
1546 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001547 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1548 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001549 DCHECK_EQ(size, linker_patches->size());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001550}
1551
Vladimir Marko65979462017-05-19 17:25:12 +01001552CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001553 MethodReference target_method,
1554 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001555 return NewPcRelativePatch(*target_method.dex_file,
1556 target_method.dex_method_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001557 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001558 &pc_relative_method_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001559}
1560
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001561CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001562 MethodReference target_method,
1563 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001564 return NewPcRelativePatch(*target_method.dex_file,
1565 target_method.dex_method_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001566 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001567 &method_bss_entry_patches_);
1568}
1569
Alexey Frunzef63f5692016-12-13 17:43:11 -08001570CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001571 const DexFile& dex_file,
1572 dex::TypeIndex type_index,
1573 const PcRelativePatchInfo* info_high) {
1574 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001575}
1576
Vladimir Marko1998cd02017-01-13 13:02:58 +00001577CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001578 const DexFile& dex_file,
1579 dex::TypeIndex type_index,
1580 const PcRelativePatchInfo* info_high) {
1581 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001582}
1583
Vladimir Marko65979462017-05-19 17:25:12 +01001584CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001585 const DexFile& dex_file,
1586 dex::StringIndex string_index,
1587 const PcRelativePatchInfo* info_high) {
1588 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001589}
1590
Alexey Frunze19f6c692016-11-30 19:19:55 -08001591CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001592 const DexFile& dex_file,
1593 uint32_t offset_or_index,
1594 const PcRelativePatchInfo* info_high,
1595 ArenaDeque<PcRelativePatchInfo>* patches) {
1596 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001597 return &patches->back();
1598}
1599
Alexey Frunzef63f5692016-12-13 17:43:11 -08001600Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1601 return map->GetOrCreate(
1602 value,
1603 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1604}
1605
Alexey Frunze19f6c692016-11-30 19:19:55 -08001606Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1607 return uint64_literals_.GetOrCreate(
1608 value,
1609 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1610}
1611
Alexey Frunzef63f5692016-12-13 17:43:11 -08001612Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001613 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001614}
1615
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001616void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
1617 GpuRegister out,
1618 PcRelativePatchInfo* info_low) {
1619 DCHECK(!info_high->patch_info_high);
1620 __ Bind(&info_high->label);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001621 // Add the high half of a 32-bit offset to PC.
1622 __ Auipc(out, /* placeholder */ 0x1234);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001623 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001624 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001625 DCHECK_EQ(info_low->patch_info_high, info_high);
1626 __ Bind(&info_low->label);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001627}
1628
Alexey Frunze627c1a02017-01-30 19:28:14 -08001629Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file,
1630 dex::StringIndex string_index,
1631 Handle<mirror::String> handle) {
1632 jit_string_roots_.Overwrite(StringReference(&dex_file, string_index),
1633 reinterpret_cast64<uint64_t>(handle.GetReference()));
1634 return jit_string_patches_.GetOrCreate(
1635 StringReference(&dex_file, string_index),
1636 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1637}
1638
1639Literal* CodeGeneratorMIPS64::DeduplicateJitClassLiteral(const DexFile& dex_file,
1640 dex::TypeIndex type_index,
1641 Handle<mirror::Class> handle) {
1642 jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
1643 reinterpret_cast64<uint64_t>(handle.GetReference()));
1644 return jit_class_patches_.GetOrCreate(
1645 TypeReference(&dex_file, type_index),
1646 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1647}
1648
1649void CodeGeneratorMIPS64::PatchJitRootUse(uint8_t* code,
1650 const uint8_t* roots_data,
1651 const Literal* literal,
1652 uint64_t index_in_table) const {
1653 uint32_t literal_offset = GetAssembler().GetLabelLocation(literal->GetLabel());
1654 uintptr_t address =
1655 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1656 reinterpret_cast<uint32_t*>(code + literal_offset)[0] = dchecked_integral_cast<uint32_t>(address);
1657}
1658
1659void CodeGeneratorMIPS64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1660 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001661 const StringReference& string_reference = entry.first;
1662 Literal* table_entry_literal = entry.second;
1663 const auto it = jit_string_roots_.find(string_reference);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001664 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001665 uint64_t index_in_table = it->second;
1666 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001667 }
1668 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001669 const TypeReference& type_reference = entry.first;
1670 Literal* table_entry_literal = entry.second;
1671 const auto it = jit_class_roots_.find(type_reference);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001672 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001673 uint64_t index_in_table = it->second;
1674 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001675 }
1676}
1677
David Brazdil58282f42016-01-14 12:45:10 +00001678void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001679 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1680 blocked_core_registers_[ZERO] = true;
1681 blocked_core_registers_[K0] = true;
1682 blocked_core_registers_[K1] = true;
1683 blocked_core_registers_[GP] = true;
1684 blocked_core_registers_[SP] = true;
1685 blocked_core_registers_[RA] = true;
1686
Lazar Trsicd9672662015-09-03 17:33:01 +02001687 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1688 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001689 blocked_core_registers_[AT] = true;
1690 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001691 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001692 blocked_fpu_registers_[FTMP] = true;
1693
1694 // Reserve suspend and thread registers.
1695 blocked_core_registers_[S0] = true;
1696 blocked_core_registers_[TR] = true;
1697
1698 // Reserve T9 for function calls
1699 blocked_core_registers_[T9] = true;
1700
Goran Jakovljevic782be112016-06-21 12:39:04 +02001701 if (GetGraph()->IsDebuggable()) {
1702 // Stubs do not save callee-save floating point registers. If the graph
1703 // is debuggable, we need to deal with these registers differently. For
1704 // now, just block them.
1705 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1706 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1707 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001708 }
1709}
1710
Alexey Frunze4dda3372015-06-01 18:31:49 -07001711size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1712 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001713 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001714}
1715
1716size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1717 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001718 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001719}
1720
1721size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001722 __ StoreFpuToOffset(GetGraph()->HasSIMD() ? kStoreQuadword : kStoreDoubleword,
1723 FpuRegister(reg_id),
1724 SP,
1725 stack_index);
1726 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001727}
1728
1729size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001730 __ LoadFpuFromOffset(GetGraph()->HasSIMD() ? kLoadQuadword : kLoadDoubleword,
1731 FpuRegister(reg_id),
1732 SP,
1733 stack_index);
1734 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001735}
1736
1737void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001738 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001739}
1740
1741void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001742 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001743}
1744
Calin Juravle175dc732015-08-25 15:42:32 +01001745void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001746 HInstruction* instruction,
1747 uint32_t dex_pc,
1748 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001749 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001750 GenerateInvokeRuntime(GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Serban Constantinescufc734082016-07-19 17:18:07 +01001751 if (EntrypointRequiresStackMap(entrypoint)) {
1752 RecordPcInfo(instruction, dex_pc, slow_path);
1753 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001754}
1755
Alexey Frunze15958152017-02-09 19:08:30 -08001756void CodeGeneratorMIPS64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1757 HInstruction* instruction,
1758 SlowPathCode* slow_path) {
1759 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1760 GenerateInvokeRuntime(entry_point_offset);
1761}
1762
1763void CodeGeneratorMIPS64::GenerateInvokeRuntime(int32_t entry_point_offset) {
1764 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
1765 __ Jalr(T9);
1766 __ Nop();
1767}
1768
Alexey Frunze4dda3372015-06-01 18:31:49 -07001769void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1770 GpuRegister class_reg) {
1771 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1772 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1773 __ Bltc(TMP, AT, slow_path->GetEntryLabel());
Alexey Frunze15958152017-02-09 19:08:30 -08001774 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1775 __ Sync(0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001776 __ Bind(slow_path->GetExitLabel());
1777}
1778
1779void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1780 __ Sync(0); // only stype 0 is supported
1781}
1782
1783void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1784 HBasicBlock* successor) {
1785 SuspendCheckSlowPathMIPS64* slow_path =
1786 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS64(instruction, successor);
1787 codegen_->AddSlowPath(slow_path);
1788
1789 __ LoadFromOffset(kLoadUnsignedHalfword,
1790 TMP,
1791 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001792 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001793 if (successor == nullptr) {
1794 __ Bnezc(TMP, slow_path->GetEntryLabel());
1795 __ Bind(slow_path->GetReturnLabel());
1796 } else {
1797 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001798 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001799 // slow_path will return to GetLabelOf(successor).
1800 }
1801}
1802
1803InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1804 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001805 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001806 assembler_(codegen->GetAssembler()),
1807 codegen_(codegen) {}
1808
1809void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1810 DCHECK_EQ(instruction->InputCount(), 2U);
1811 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1812 Primitive::Type type = instruction->GetResultType();
1813 switch (type) {
1814 case Primitive::kPrimInt:
1815 case Primitive::kPrimLong: {
1816 locations->SetInAt(0, Location::RequiresRegister());
1817 HInstruction* right = instruction->InputAt(1);
1818 bool can_use_imm = false;
1819 if (right->IsConstant()) {
1820 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1821 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1822 can_use_imm = IsUint<16>(imm);
1823 } else if (instruction->IsAdd()) {
1824 can_use_imm = IsInt<16>(imm);
1825 } else {
1826 DCHECK(instruction->IsSub());
1827 can_use_imm = IsInt<16>(-imm);
1828 }
1829 }
1830 if (can_use_imm)
1831 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1832 else
1833 locations->SetInAt(1, Location::RequiresRegister());
1834 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1835 }
1836 break;
1837
1838 case Primitive::kPrimFloat:
1839 case Primitive::kPrimDouble:
1840 locations->SetInAt(0, Location::RequiresFpuRegister());
1841 locations->SetInAt(1, Location::RequiresFpuRegister());
1842 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1843 break;
1844
1845 default:
1846 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1847 }
1848}
1849
1850void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1851 Primitive::Type type = instruction->GetType();
1852 LocationSummary* locations = instruction->GetLocations();
1853
1854 switch (type) {
1855 case Primitive::kPrimInt:
1856 case Primitive::kPrimLong: {
1857 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1858 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1859 Location rhs_location = locations->InAt(1);
1860
1861 GpuRegister rhs_reg = ZERO;
1862 int64_t rhs_imm = 0;
1863 bool use_imm = rhs_location.IsConstant();
1864 if (use_imm) {
1865 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1866 } else {
1867 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1868 }
1869
1870 if (instruction->IsAnd()) {
1871 if (use_imm)
1872 __ Andi(dst, lhs, rhs_imm);
1873 else
1874 __ And(dst, lhs, rhs_reg);
1875 } else if (instruction->IsOr()) {
1876 if (use_imm)
1877 __ Ori(dst, lhs, rhs_imm);
1878 else
1879 __ Or(dst, lhs, rhs_reg);
1880 } else if (instruction->IsXor()) {
1881 if (use_imm)
1882 __ Xori(dst, lhs, rhs_imm);
1883 else
1884 __ Xor(dst, lhs, rhs_reg);
1885 } else if (instruction->IsAdd()) {
1886 if (type == Primitive::kPrimInt) {
1887 if (use_imm)
1888 __ Addiu(dst, lhs, rhs_imm);
1889 else
1890 __ Addu(dst, lhs, rhs_reg);
1891 } else {
1892 if (use_imm)
1893 __ Daddiu(dst, lhs, rhs_imm);
1894 else
1895 __ Daddu(dst, lhs, rhs_reg);
1896 }
1897 } else {
1898 DCHECK(instruction->IsSub());
1899 if (type == Primitive::kPrimInt) {
1900 if (use_imm)
1901 __ Addiu(dst, lhs, -rhs_imm);
1902 else
1903 __ Subu(dst, lhs, rhs_reg);
1904 } else {
1905 if (use_imm)
1906 __ Daddiu(dst, lhs, -rhs_imm);
1907 else
1908 __ Dsubu(dst, lhs, rhs_reg);
1909 }
1910 }
1911 break;
1912 }
1913 case Primitive::kPrimFloat:
1914 case Primitive::kPrimDouble: {
1915 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
1916 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1917 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1918 if (instruction->IsAdd()) {
1919 if (type == Primitive::kPrimFloat)
1920 __ AddS(dst, lhs, rhs);
1921 else
1922 __ AddD(dst, lhs, rhs);
1923 } else if (instruction->IsSub()) {
1924 if (type == Primitive::kPrimFloat)
1925 __ SubS(dst, lhs, rhs);
1926 else
1927 __ SubD(dst, lhs, rhs);
1928 } else {
1929 LOG(FATAL) << "Unexpected floating-point binary operation";
1930 }
1931 break;
1932 }
1933 default:
1934 LOG(FATAL) << "Unexpected binary operation type " << type;
1935 }
1936}
1937
1938void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001939 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001940
1941 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1942 Primitive::Type type = instr->GetResultType();
1943 switch (type) {
1944 case Primitive::kPrimInt:
1945 case Primitive::kPrimLong: {
1946 locations->SetInAt(0, Location::RequiresRegister());
1947 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001948 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001949 break;
1950 }
1951 default:
1952 LOG(FATAL) << "Unexpected shift type " << type;
1953 }
1954}
1955
1956void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001957 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001958 LocationSummary* locations = instr->GetLocations();
1959 Primitive::Type type = instr->GetType();
1960
1961 switch (type) {
1962 case Primitive::kPrimInt:
1963 case Primitive::kPrimLong: {
1964 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1965 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1966 Location rhs_location = locations->InAt(1);
1967
1968 GpuRegister rhs_reg = ZERO;
1969 int64_t rhs_imm = 0;
1970 bool use_imm = rhs_location.IsConstant();
1971 if (use_imm) {
1972 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1973 } else {
1974 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1975 }
1976
1977 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001978 uint32_t shift_value = rhs_imm &
1979 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001980
Alexey Frunze92d90602015-12-18 18:16:36 -08001981 if (shift_value == 0) {
1982 if (dst != lhs) {
1983 __ Move(dst, lhs);
1984 }
1985 } else if (type == Primitive::kPrimInt) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001986 if (instr->IsShl()) {
1987 __ Sll(dst, lhs, shift_value);
1988 } else if (instr->IsShr()) {
1989 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001990 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001991 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001992 } else {
1993 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001994 }
1995 } else {
1996 if (shift_value < 32) {
1997 if (instr->IsShl()) {
1998 __ Dsll(dst, lhs, shift_value);
1999 } else if (instr->IsShr()) {
2000 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002001 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002002 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002003 } else {
2004 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002005 }
2006 } else {
2007 shift_value -= 32;
2008 if (instr->IsShl()) {
2009 __ Dsll32(dst, lhs, shift_value);
2010 } else if (instr->IsShr()) {
2011 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002012 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002013 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002014 } else {
2015 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002016 }
2017 }
2018 }
2019 } else {
2020 if (type == Primitive::kPrimInt) {
2021 if (instr->IsShl()) {
2022 __ Sllv(dst, lhs, rhs_reg);
2023 } else if (instr->IsShr()) {
2024 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002025 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002026 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002027 } else {
2028 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002029 }
2030 } else {
2031 if (instr->IsShl()) {
2032 __ Dsllv(dst, lhs, rhs_reg);
2033 } else if (instr->IsShr()) {
2034 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002035 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002036 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002037 } else {
2038 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002039 }
2040 }
2041 }
2042 break;
2043 }
2044 default:
2045 LOG(FATAL) << "Unexpected shift operation type " << type;
2046 }
2047}
2048
2049void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
2050 HandleBinaryOp(instruction);
2051}
2052
2053void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
2054 HandleBinaryOp(instruction);
2055}
2056
2057void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
2058 HandleBinaryOp(instruction);
2059}
2060
2061void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
2062 HandleBinaryOp(instruction);
2063}
2064
2065void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002066 Primitive::Type type = instruction->GetType();
2067 bool object_array_get_with_read_barrier =
2068 kEmitCompilerReadBarrier && (type == Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002069 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08002070 new (GetGraph()->GetArena()) LocationSummary(instruction,
2071 object_array_get_with_read_barrier
2072 ? LocationSummary::kCallOnSlowPath
2073 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002074 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2075 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2076 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002077 locations->SetInAt(0, Location::RequiresRegister());
2078 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexey Frunze15958152017-02-09 19:08:30 -08002079 if (Primitive::IsFloatingPointType(type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002080 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2081 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002082 // The output overlaps in the case of an object array get with
2083 // read barriers enabled: we do not want the move to overwrite the
2084 // array's location, as we need it to emit the read barrier.
2085 locations->SetOut(Location::RequiresRegister(),
2086 object_array_get_with_read_barrier
2087 ? Location::kOutputOverlap
2088 : Location::kNoOutputOverlap);
2089 }
2090 // We need a temporary register for the read barrier marking slow
2091 // path in CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier.
2092 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2093 locations->AddTemp(Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002094 }
2095}
2096
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002097static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS64* codegen) {
2098 auto null_checker = [codegen, instruction]() {
2099 codegen->MaybeRecordImplicitNullCheck(instruction);
2100 };
2101 return null_checker;
2102}
2103
Alexey Frunze4dda3372015-06-01 18:31:49 -07002104void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
2105 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002106 Location obj_loc = locations->InAt(0);
2107 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
2108 Location out_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002109 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002110 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002111 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002112
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002113 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002114 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2115 instruction->IsStringCharAt();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002116 switch (type) {
2117 case Primitive::kPrimBoolean: {
Alexey Frunze15958152017-02-09 19:08:30 -08002118 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002119 if (index.IsConstant()) {
2120 size_t offset =
2121 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002122 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002123 } else {
2124 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002125 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002126 }
2127 break;
2128 }
2129
2130 case Primitive::kPrimByte: {
Alexey Frunze15958152017-02-09 19:08:30 -08002131 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002132 if (index.IsConstant()) {
2133 size_t offset =
2134 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002135 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002136 } else {
2137 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002138 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002139 }
2140 break;
2141 }
2142
2143 case Primitive::kPrimShort: {
Alexey Frunze15958152017-02-09 19:08:30 -08002144 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002145 if (index.IsConstant()) {
2146 size_t offset =
2147 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002148 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002149 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002150 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002151 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002152 }
2153 break;
2154 }
2155
2156 case Primitive::kPrimChar: {
Alexey Frunze15958152017-02-09 19:08:30 -08002157 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002158 if (maybe_compressed_char_at) {
2159 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002160 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002161 __ Dext(TMP, TMP, 0, 1);
2162 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2163 "Expecting 0=compressed, 1=uncompressed");
2164 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002165 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002166 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2167 if (maybe_compressed_char_at) {
2168 Mips64Label uncompressed_load, done;
2169 __ Bnezc(TMP, &uncompressed_load);
2170 __ LoadFromOffset(kLoadUnsignedByte,
2171 out,
2172 obj,
2173 data_offset + (const_index << TIMES_1));
2174 __ Bc(&done);
2175 __ Bind(&uncompressed_load);
2176 __ LoadFromOffset(kLoadUnsignedHalfword,
2177 out,
2178 obj,
2179 data_offset + (const_index << TIMES_2));
2180 __ Bind(&done);
2181 } else {
2182 __ LoadFromOffset(kLoadUnsignedHalfword,
2183 out,
2184 obj,
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002185 data_offset + (const_index << TIMES_2),
2186 null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002187 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002188 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002189 GpuRegister index_reg = index.AsRegister<GpuRegister>();
2190 if (maybe_compressed_char_at) {
2191 Mips64Label uncompressed_load, done;
2192 __ Bnezc(TMP, &uncompressed_load);
2193 __ Daddu(TMP, obj, index_reg);
2194 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2195 __ Bc(&done);
2196 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002197 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002198 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2199 __ Bind(&done);
2200 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002201 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002202 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002203 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002204 }
2205 break;
2206 }
2207
Alexey Frunze15958152017-02-09 19:08:30 -08002208 case Primitive::kPrimInt: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002209 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002210 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002211 LoadOperandType load_type = (type == Primitive::kPrimNot) ? kLoadUnsignedWord : kLoadWord;
2212 if (index.IsConstant()) {
2213 size_t offset =
2214 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002215 __ LoadFromOffset(load_type, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002216 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002217 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002218 __ LoadFromOffset(load_type, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002219 }
2220 break;
2221 }
2222
Alexey Frunze15958152017-02-09 19:08:30 -08002223 case Primitive::kPrimNot: {
2224 static_assert(
2225 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2226 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2227 // /* HeapReference<Object> */ out =
2228 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2229 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2230 Location temp = locations->GetTemp(0);
2231 // Note that a potential implicit null check is handled in this
2232 // CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier call.
2233 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2234 out_loc,
2235 obj,
2236 data_offset,
2237 index,
2238 temp,
2239 /* needs_null_check */ true);
2240 } else {
2241 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2242 if (index.IsConstant()) {
2243 size_t offset =
2244 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2245 __ LoadFromOffset(kLoadUnsignedWord, out, obj, offset, null_checker);
2246 // If read barriers are enabled, emit read barriers other than
2247 // Baker's using a slow path (and also unpoison the loaded
2248 // reference, if heap poisoning is enabled).
2249 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2250 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002251 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002252 __ LoadFromOffset(kLoadUnsignedWord, out, TMP, data_offset, null_checker);
2253 // If read barriers are enabled, emit read barriers other than
2254 // Baker's using a slow path (and also unpoison the loaded
2255 // reference, if heap poisoning is enabled).
2256 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2257 out_loc,
2258 out_loc,
2259 obj_loc,
2260 data_offset,
2261 index);
2262 }
2263 }
2264 break;
2265 }
2266
Alexey Frunze4dda3372015-06-01 18:31:49 -07002267 case Primitive::kPrimLong: {
Alexey Frunze15958152017-02-09 19:08:30 -08002268 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002269 if (index.IsConstant()) {
2270 size_t offset =
2271 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002272 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002273 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002274 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002275 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002276 }
2277 break;
2278 }
2279
2280 case Primitive::kPrimFloat: {
Alexey Frunze15958152017-02-09 19:08:30 -08002281 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002282 if (index.IsConstant()) {
2283 size_t offset =
2284 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002285 __ LoadFpuFromOffset(kLoadWord, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002286 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002287 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002288 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002289 }
2290 break;
2291 }
2292
2293 case Primitive::kPrimDouble: {
Alexey Frunze15958152017-02-09 19:08:30 -08002294 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002295 if (index.IsConstant()) {
2296 size_t offset =
2297 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002298 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002299 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002300 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002301 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002302 }
2303 break;
2304 }
2305
2306 case Primitive::kPrimVoid:
2307 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2308 UNREACHABLE();
2309 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002310}
2311
2312void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
2313 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2314 locations->SetInAt(0, Location::RequiresRegister());
2315 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2316}
2317
2318void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
2319 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002320 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002321 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2322 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2323 __ LoadFromOffset(kLoadWord, out, obj, offset);
2324 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002325 // Mask out compression flag from String's array length.
2326 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2327 __ Srl(out, out, 1u);
2328 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002329}
2330
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002331Location LocationsBuilderMIPS64::RegisterOrZeroConstant(HInstruction* instruction) {
2332 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2333 ? Location::ConstantLocation(instruction->AsConstant())
2334 : Location::RequiresRegister();
2335}
2336
2337Location LocationsBuilderMIPS64::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2338 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2339 // We can store a non-zero float or double constant without first loading it into the FPU,
2340 // but we should only prefer this if the constant has a single use.
2341 if (instruction->IsConstant() &&
2342 (instruction->AsConstant()->IsZeroBitPattern() ||
2343 instruction->GetUses().HasExactlyOneElement())) {
2344 return Location::ConstantLocation(instruction->AsConstant());
2345 // Otherwise fall through and require an FPU register for the constant.
2346 }
2347 return Location::RequiresFpuRegister();
2348}
2349
Alexey Frunze4dda3372015-06-01 18:31:49 -07002350void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002351 Primitive::Type value_type = instruction->GetComponentType();
2352
2353 bool needs_write_barrier =
2354 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2355 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2356
Alexey Frunze4dda3372015-06-01 18:31:49 -07002357 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2358 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002359 may_need_runtime_call_for_type_check ?
2360 LocationSummary::kCallOnSlowPath :
2361 LocationSummary::kNoCall);
2362
2363 locations->SetInAt(0, Location::RequiresRegister());
2364 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2365 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
2366 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002367 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002368 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2369 }
2370 if (needs_write_barrier) {
2371 // Temporary register for the write barrier.
2372 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002373 }
2374}
2375
2376void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
2377 LocationSummary* locations = instruction->GetLocations();
2378 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2379 Location index = locations->InAt(1);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002380 Location value_location = locations->InAt(2);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002381 Primitive::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002382 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002383 bool needs_write_barrier =
2384 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002385 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002386 GpuRegister base_reg = index.IsConstant() ? obj : TMP;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002387
2388 switch (value_type) {
2389 case Primitive::kPrimBoolean:
2390 case Primitive::kPrimByte: {
2391 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002392 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002393 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002394 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002395 __ Daddu(base_reg, obj, index.AsRegister<GpuRegister>());
2396 }
2397 if (value_location.IsConstant()) {
2398 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2399 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2400 } else {
2401 GpuRegister value = value_location.AsRegister<GpuRegister>();
2402 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002403 }
2404 break;
2405 }
2406
2407 case Primitive::kPrimShort:
2408 case Primitive::kPrimChar: {
2409 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002410 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002411 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002412 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002413 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002414 }
2415 if (value_location.IsConstant()) {
2416 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2417 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2418 } else {
2419 GpuRegister value = value_location.AsRegister<GpuRegister>();
2420 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002421 }
2422 break;
2423 }
2424
Alexey Frunze15958152017-02-09 19:08:30 -08002425 case Primitive::kPrimInt: {
2426 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2427 if (index.IsConstant()) {
2428 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2429 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002430 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002431 }
2432 if (value_location.IsConstant()) {
2433 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2434 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2435 } else {
2436 GpuRegister value = value_location.AsRegister<GpuRegister>();
2437 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2438 }
2439 break;
2440 }
2441
Alexey Frunze4dda3372015-06-01 18:31:49 -07002442 case Primitive::kPrimNot: {
Alexey Frunze15958152017-02-09 19:08:30 -08002443 if (value_location.IsConstant()) {
2444 // Just setting null.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002445 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002446 if (index.IsConstant()) {
Alexey Frunzec061de12017-02-14 13:27:23 -08002447 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002448 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002449 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunzec061de12017-02-14 13:27:23 -08002450 }
Alexey Frunze15958152017-02-09 19:08:30 -08002451 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2452 DCHECK_EQ(value, 0);
2453 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2454 DCHECK(!needs_write_barrier);
2455 DCHECK(!may_need_runtime_call_for_type_check);
2456 break;
2457 }
2458
2459 DCHECK(needs_write_barrier);
2460 GpuRegister value = value_location.AsRegister<GpuRegister>();
2461 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
2462 GpuRegister temp2 = TMP; // Doesn't need to survive slow path.
2463 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2464 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2465 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2466 Mips64Label done;
2467 SlowPathCodeMIPS64* slow_path = nullptr;
2468
2469 if (may_need_runtime_call_for_type_check) {
2470 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS64(instruction);
2471 codegen_->AddSlowPath(slow_path);
2472 if (instruction->GetValueCanBeNull()) {
2473 Mips64Label non_zero;
2474 __ Bnezc(value, &non_zero);
2475 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2476 if (index.IsConstant()) {
2477 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002478 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002479 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002480 }
Alexey Frunze15958152017-02-09 19:08:30 -08002481 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2482 __ Bc(&done);
2483 __ Bind(&non_zero);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002484 }
Alexey Frunze15958152017-02-09 19:08:30 -08002485
2486 // Note that when read barriers are enabled, the type checks
2487 // are performed without read barriers. This is fine, even in
2488 // the case where a class object is in the from-space after
2489 // the flip, as a comparison involving such a type would not
2490 // produce a false positive; it may of course produce a false
2491 // negative, in which case we would take the ArraySet slow
2492 // path.
2493
2494 // /* HeapReference<Class> */ temp1 = obj->klass_
2495 __ LoadFromOffset(kLoadUnsignedWord, temp1, obj, class_offset, null_checker);
2496 __ MaybeUnpoisonHeapReference(temp1);
2497
2498 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2499 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, component_offset);
2500 // /* HeapReference<Class> */ temp2 = value->klass_
2501 __ LoadFromOffset(kLoadUnsignedWord, temp2, value, class_offset);
2502 // If heap poisoning is enabled, no need to unpoison `temp1`
2503 // nor `temp2`, as we are comparing two poisoned references.
2504
2505 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2506 Mips64Label do_put;
2507 __ Beqc(temp1, temp2, &do_put);
2508 // If heap poisoning is enabled, the `temp1` reference has
2509 // not been unpoisoned yet; unpoison it now.
2510 __ MaybeUnpoisonHeapReference(temp1);
2511
2512 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2513 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, super_offset);
2514 // If heap poisoning is enabled, no need to unpoison
2515 // `temp1`, as we are comparing against null below.
2516 __ Bnezc(temp1, slow_path->GetEntryLabel());
2517 __ Bind(&do_put);
2518 } else {
2519 __ Bnec(temp1, temp2, slow_path->GetEntryLabel());
2520 }
2521 }
2522
2523 GpuRegister source = value;
2524 if (kPoisonHeapReferences) {
2525 // Note that in the case where `value` is a null reference,
2526 // we do not enter this block, as a null reference does not
2527 // need poisoning.
2528 __ Move(temp1, value);
2529 __ PoisonHeapReference(temp1);
2530 source = temp1;
2531 }
2532
2533 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2534 if (index.IsConstant()) {
2535 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002536 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002537 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002538 }
2539 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2540
2541 if (!may_need_runtime_call_for_type_check) {
2542 codegen_->MaybeRecordImplicitNullCheck(instruction);
2543 }
2544
2545 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2546
2547 if (done.IsLinked()) {
2548 __ Bind(&done);
2549 }
2550
2551 if (slow_path != nullptr) {
2552 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002553 }
2554 break;
2555 }
2556
2557 case Primitive::kPrimLong: {
2558 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002559 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002560 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002561 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002562 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002563 }
2564 if (value_location.IsConstant()) {
2565 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2566 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2567 } else {
2568 GpuRegister value = value_location.AsRegister<GpuRegister>();
2569 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002570 }
2571 break;
2572 }
2573
2574 case Primitive::kPrimFloat: {
2575 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002576 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002577 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002578 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002579 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002580 }
2581 if (value_location.IsConstant()) {
2582 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2583 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2584 } else {
2585 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2586 __ StoreFpuToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002587 }
2588 break;
2589 }
2590
2591 case Primitive::kPrimDouble: {
2592 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002593 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002594 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002595 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002596 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002597 }
2598 if (value_location.IsConstant()) {
2599 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2600 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2601 } else {
2602 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2603 __ StoreFpuToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002604 }
2605 break;
2606 }
2607
2608 case Primitive::kPrimVoid:
2609 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2610 UNREACHABLE();
2611 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002612}
2613
2614void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002615 RegisterSet caller_saves = RegisterSet::Empty();
2616 InvokeRuntimeCallingConvention calling_convention;
2617 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2618 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2619 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002620 locations->SetInAt(0, Location::RequiresRegister());
2621 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002622}
2623
2624void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
2625 LocationSummary* locations = instruction->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002626 BoundsCheckSlowPathMIPS64* slow_path =
2627 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002628 codegen_->AddSlowPath(slow_path);
2629
2630 GpuRegister index = locations->InAt(0).AsRegister<GpuRegister>();
2631 GpuRegister length = locations->InAt(1).AsRegister<GpuRegister>();
2632
2633 // length is limited by the maximum positive signed 32-bit integer.
2634 // Unsigned comparison of length and index checks for index < 0
2635 // and for length <= index simultaneously.
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002636 __ Bgeuc(index, length, slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002637}
2638
Alexey Frunze15958152017-02-09 19:08:30 -08002639// Temp is used for read barrier.
2640static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
2641 if (kEmitCompilerReadBarrier &&
2642 (kUseBakerReadBarrier ||
2643 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2644 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2645 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
2646 return 1;
2647 }
2648 return 0;
2649}
2650
2651// Extra temp is used for read barrier.
2652static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
2653 return 1 + NumberOfInstanceOfTemps(type_check_kind);
2654}
2655
Alexey Frunze4dda3372015-06-01 18:31:49 -07002656void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002657 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2658 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
2659
2660 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
2661 switch (type_check_kind) {
2662 case TypeCheckKind::kExactCheck:
2663 case TypeCheckKind::kAbstractClassCheck:
2664 case TypeCheckKind::kClassHierarchyCheck:
2665 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08002666 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002667 ? LocationSummary::kCallOnSlowPath
2668 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
2669 break;
2670 case TypeCheckKind::kArrayCheck:
2671 case TypeCheckKind::kUnresolvedCheck:
2672 case TypeCheckKind::kInterfaceCheck:
2673 call_kind = LocationSummary::kCallOnSlowPath;
2674 break;
2675 }
2676
2677 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002678 locations->SetInAt(0, Location::RequiresRegister());
2679 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08002680 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002681}
2682
2683void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002684 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002685 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002686 Location obj_loc = locations->InAt(0);
2687 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002688 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08002689 Location temp_loc = locations->GetTemp(0);
2690 GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
2691 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
2692 DCHECK_LE(num_temps, 2u);
2693 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002694 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2695 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2696 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2697 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2698 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
2699 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
2700 const uint32_t object_array_data_offset =
2701 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2702 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002703
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002704 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
2705 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
2706 // read barriers is done for performance and code size reasons.
2707 bool is_type_check_slow_path_fatal = false;
2708 if (!kEmitCompilerReadBarrier) {
2709 is_type_check_slow_path_fatal =
2710 (type_check_kind == TypeCheckKind::kExactCheck ||
2711 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2712 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2713 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
2714 !instruction->CanThrowIntoCatchBlock();
2715 }
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002716 SlowPathCodeMIPS64* slow_path =
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002717 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
2718 is_type_check_slow_path_fatal);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002719 codegen_->AddSlowPath(slow_path);
2720
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002721 // Avoid this check if we know `obj` is not null.
2722 if (instruction->MustDoNullCheck()) {
2723 __ Beqzc(obj, &done);
2724 }
2725
2726 switch (type_check_kind) {
2727 case TypeCheckKind::kExactCheck:
2728 case TypeCheckKind::kArrayCheck: {
2729 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002730 GenerateReferenceLoadTwoRegisters(instruction,
2731 temp_loc,
2732 obj_loc,
2733 class_offset,
2734 maybe_temp2_loc,
2735 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002736 // Jump to slow path for throwing the exception or doing a
2737 // more involved array check.
2738 __ Bnec(temp, cls, slow_path->GetEntryLabel());
2739 break;
2740 }
2741
2742 case TypeCheckKind::kAbstractClassCheck: {
2743 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002744 GenerateReferenceLoadTwoRegisters(instruction,
2745 temp_loc,
2746 obj_loc,
2747 class_offset,
2748 maybe_temp2_loc,
2749 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002750 // If the class is abstract, we eagerly fetch the super class of the
2751 // object to avoid doing a comparison we know will fail.
2752 Mips64Label loop;
2753 __ Bind(&loop);
2754 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002755 GenerateReferenceLoadOneRegister(instruction,
2756 temp_loc,
2757 super_offset,
2758 maybe_temp2_loc,
2759 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002760 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2761 // exception.
2762 __ Beqzc(temp, slow_path->GetEntryLabel());
2763 // Otherwise, compare the classes.
2764 __ Bnec(temp, cls, &loop);
2765 break;
2766 }
2767
2768 case TypeCheckKind::kClassHierarchyCheck: {
2769 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002770 GenerateReferenceLoadTwoRegisters(instruction,
2771 temp_loc,
2772 obj_loc,
2773 class_offset,
2774 maybe_temp2_loc,
2775 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002776 // Walk over the class hierarchy to find a match.
2777 Mips64Label loop;
2778 __ Bind(&loop);
2779 __ Beqc(temp, cls, &done);
2780 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002781 GenerateReferenceLoadOneRegister(instruction,
2782 temp_loc,
2783 super_offset,
2784 maybe_temp2_loc,
2785 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002786 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2787 // exception. Otherwise, jump to the beginning of the loop.
2788 __ Bnezc(temp, &loop);
2789 __ Bc(slow_path->GetEntryLabel());
2790 break;
2791 }
2792
2793 case TypeCheckKind::kArrayObjectCheck: {
2794 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002795 GenerateReferenceLoadTwoRegisters(instruction,
2796 temp_loc,
2797 obj_loc,
2798 class_offset,
2799 maybe_temp2_loc,
2800 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002801 // Do an exact check.
2802 __ Beqc(temp, cls, &done);
2803 // Otherwise, we need to check that the object's class is a non-primitive array.
2804 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08002805 GenerateReferenceLoadOneRegister(instruction,
2806 temp_loc,
2807 component_offset,
2808 maybe_temp2_loc,
2809 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002810 // If the component type is null, jump to the slow path to throw the exception.
2811 __ Beqzc(temp, slow_path->GetEntryLabel());
2812 // Otherwise, the object is indeed an array, further check that this component
2813 // type is not a primitive type.
2814 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
2815 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2816 __ Bnezc(temp, slow_path->GetEntryLabel());
2817 break;
2818 }
2819
2820 case TypeCheckKind::kUnresolvedCheck:
2821 // We always go into the type check slow path for the unresolved check case.
2822 // We cannot directly call the CheckCast runtime entry point
2823 // without resorting to a type checking slow path here (i.e. by
2824 // calling InvokeRuntime directly), as it would require to
2825 // assign fixed registers for the inputs of this HInstanceOf
2826 // instruction (following the runtime calling convention), which
2827 // might be cluttered by the potential first read barrier
2828 // emission at the beginning of this method.
2829 __ Bc(slow_path->GetEntryLabel());
2830 break;
2831
2832 case TypeCheckKind::kInterfaceCheck: {
2833 // Avoid read barriers to improve performance of the fast path. We can not get false
2834 // positives by doing this.
2835 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002836 GenerateReferenceLoadTwoRegisters(instruction,
2837 temp_loc,
2838 obj_loc,
2839 class_offset,
2840 maybe_temp2_loc,
2841 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002842 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08002843 GenerateReferenceLoadTwoRegisters(instruction,
2844 temp_loc,
2845 temp_loc,
2846 iftable_offset,
2847 maybe_temp2_loc,
2848 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002849 // Iftable is never null.
2850 __ Lw(TMP, temp, array_length_offset);
2851 // Loop through the iftable and check if any class matches.
2852 Mips64Label loop;
2853 __ Bind(&loop);
2854 __ Beqzc(TMP, slow_path->GetEntryLabel());
2855 __ Lwu(AT, temp, object_array_data_offset);
2856 __ MaybeUnpoisonHeapReference(AT);
2857 // Go to next interface.
2858 __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
2859 __ Addiu(TMP, TMP, -2);
2860 // Compare the classes and continue the loop if they do not match.
2861 __ Bnec(AT, cls, &loop);
2862 break;
2863 }
2864 }
2865
2866 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002867 __ Bind(slow_path->GetExitLabel());
2868}
2869
2870void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
2871 LocationSummary* locations =
2872 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2873 locations->SetInAt(0, Location::RequiresRegister());
2874 if (check->HasUses()) {
2875 locations->SetOut(Location::SameAsFirstInput());
2876 }
2877}
2878
2879void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
2880 // We assume the class is not null.
2881 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
2882 check->GetLoadClass(),
2883 check,
2884 check->GetDexPc(),
2885 true);
2886 codegen_->AddSlowPath(slow_path);
2887 GenerateClassInitializationCheck(slow_path,
2888 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
2889}
2890
2891void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
2892 Primitive::Type in_type = compare->InputAt(0)->GetType();
2893
Alexey Frunze299a9392015-12-08 16:08:02 -08002894 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002895
2896 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002897 case Primitive::kPrimBoolean:
2898 case Primitive::kPrimByte:
2899 case Primitive::kPrimShort:
2900 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002901 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002902 case Primitive::kPrimLong:
2903 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002904 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002905 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2906 break;
2907
2908 case Primitive::kPrimFloat:
Alexey Frunze299a9392015-12-08 16:08:02 -08002909 case Primitive::kPrimDouble:
2910 locations->SetInAt(0, Location::RequiresFpuRegister());
2911 locations->SetInAt(1, Location::RequiresFpuRegister());
2912 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002913 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002914
2915 default:
2916 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2917 }
2918}
2919
2920void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
2921 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08002922 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002923 Primitive::Type in_type = instruction->InputAt(0)->GetType();
2924
2925 // 0 if: left == right
2926 // 1 if: left > right
2927 // -1 if: left < right
2928 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002929 case Primitive::kPrimBoolean:
2930 case Primitive::kPrimByte:
2931 case Primitive::kPrimShort:
2932 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002933 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002934 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002935 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002936 Location rhs_location = locations->InAt(1);
2937 bool use_imm = rhs_location.IsConstant();
2938 GpuRegister rhs = ZERO;
2939 if (use_imm) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002940 if (in_type == Primitive::kPrimLong) {
Aart Bika19616e2016-02-01 18:57:58 -08002941 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
2942 if (value != 0) {
2943 rhs = AT;
2944 __ LoadConst64(rhs, value);
2945 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00002946 } else {
2947 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
2948 if (value != 0) {
2949 rhs = AT;
2950 __ LoadConst32(rhs, value);
2951 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002952 }
2953 } else {
2954 rhs = rhs_location.AsRegister<GpuRegister>();
2955 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002956 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08002957 __ Slt(res, rhs, lhs);
2958 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002959 break;
2960 }
2961
Alexey Frunze299a9392015-12-08 16:08:02 -08002962 case Primitive::kPrimFloat: {
2963 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2964 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2965 Mips64Label done;
2966 __ CmpEqS(FTMP, lhs, rhs);
2967 __ LoadConst32(res, 0);
2968 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00002969 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08002970 __ CmpLtS(FTMP, lhs, rhs);
2971 __ LoadConst32(res, -1);
2972 __ Bc1nez(FTMP, &done);
2973 __ LoadConst32(res, 1);
2974 } else {
2975 __ CmpLtS(FTMP, rhs, lhs);
2976 __ LoadConst32(res, 1);
2977 __ Bc1nez(FTMP, &done);
2978 __ LoadConst32(res, -1);
2979 }
2980 __ Bind(&done);
2981 break;
2982 }
2983
Alexey Frunze4dda3372015-06-01 18:31:49 -07002984 case Primitive::kPrimDouble: {
Alexey Frunze299a9392015-12-08 16:08:02 -08002985 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2986 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2987 Mips64Label done;
2988 __ CmpEqD(FTMP, lhs, rhs);
2989 __ LoadConst32(res, 0);
2990 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00002991 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08002992 __ CmpLtD(FTMP, lhs, rhs);
2993 __ LoadConst32(res, -1);
2994 __ Bc1nez(FTMP, &done);
2995 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002996 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08002997 __ CmpLtD(FTMP, rhs, lhs);
2998 __ LoadConst32(res, 1);
2999 __ Bc1nez(FTMP, &done);
3000 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003001 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003002 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003003 break;
3004 }
3005
3006 default:
3007 LOG(FATAL) << "Unimplemented compare type " << in_type;
3008 }
3009}
3010
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003011void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003012 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08003013 switch (instruction->InputAt(0)->GetType()) {
3014 default:
3015 case Primitive::kPrimLong:
3016 locations->SetInAt(0, Location::RequiresRegister());
3017 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3018 break;
3019
3020 case Primitive::kPrimFloat:
3021 case Primitive::kPrimDouble:
3022 locations->SetInAt(0, Location::RequiresFpuRegister());
3023 locations->SetInAt(1, Location::RequiresFpuRegister());
3024 break;
3025 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003026 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003027 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3028 }
3029}
3030
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003031void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003032 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003033 return;
3034 }
3035
Alexey Frunze299a9392015-12-08 16:08:02 -08003036 Primitive::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003037 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003038 switch (type) {
3039 default:
3040 // Integer case.
3041 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
3042 return;
3043 case Primitive::kPrimLong:
3044 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
3045 return;
Alexey Frunze299a9392015-12-08 16:08:02 -08003046 case Primitive::kPrimFloat:
3047 case Primitive::kPrimDouble:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003048 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3049 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003050 }
3051}
3052
Alexey Frunzec857c742015-09-23 15:12:39 -07003053void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3054 DCHECK(instruction->IsDiv() || instruction->IsRem());
3055 Primitive::Type type = instruction->GetResultType();
3056
3057 LocationSummary* locations = instruction->GetLocations();
3058 Location second = locations->InAt(1);
3059 DCHECK(second.IsConstant());
3060
3061 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3062 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3063 int64_t imm = Int64FromConstant(second.GetConstant());
3064 DCHECK(imm == 1 || imm == -1);
3065
3066 if (instruction->IsRem()) {
3067 __ Move(out, ZERO);
3068 } else {
3069 if (imm == -1) {
3070 if (type == Primitive::kPrimInt) {
3071 __ Subu(out, ZERO, dividend);
3072 } else {
3073 DCHECK_EQ(type, Primitive::kPrimLong);
3074 __ Dsubu(out, ZERO, dividend);
3075 }
3076 } else if (out != dividend) {
3077 __ Move(out, dividend);
3078 }
3079 }
3080}
3081
3082void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3083 DCHECK(instruction->IsDiv() || instruction->IsRem());
3084 Primitive::Type type = instruction->GetResultType();
3085
3086 LocationSummary* locations = instruction->GetLocations();
3087 Location second = locations->InAt(1);
3088 DCHECK(second.IsConstant());
3089
3090 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3091 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3092 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003093 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07003094 int ctz_imm = CTZ(abs_imm);
3095
3096 if (instruction->IsDiv()) {
3097 if (type == Primitive::kPrimInt) {
3098 if (ctz_imm == 1) {
3099 // Fast path for division by +/-2, which is very common.
3100 __ Srl(TMP, dividend, 31);
3101 } else {
3102 __ Sra(TMP, dividend, 31);
3103 __ Srl(TMP, TMP, 32 - ctz_imm);
3104 }
3105 __ Addu(out, dividend, TMP);
3106 __ Sra(out, out, ctz_imm);
3107 if (imm < 0) {
3108 __ Subu(out, ZERO, out);
3109 }
3110 } else {
3111 DCHECK_EQ(type, Primitive::kPrimLong);
3112 if (ctz_imm == 1) {
3113 // Fast path for division by +/-2, which is very common.
3114 __ Dsrl32(TMP, dividend, 31);
3115 } else {
3116 __ Dsra32(TMP, dividend, 31);
3117 if (ctz_imm > 32) {
3118 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3119 } else {
3120 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3121 }
3122 }
3123 __ Daddu(out, dividend, TMP);
3124 if (ctz_imm < 32) {
3125 __ Dsra(out, out, ctz_imm);
3126 } else {
3127 __ Dsra32(out, out, ctz_imm - 32);
3128 }
3129 if (imm < 0) {
3130 __ Dsubu(out, ZERO, out);
3131 }
3132 }
3133 } else {
3134 if (type == Primitive::kPrimInt) {
3135 if (ctz_imm == 1) {
3136 // Fast path for modulo +/-2, which is very common.
3137 __ Sra(TMP, dividend, 31);
3138 __ Subu(out, dividend, TMP);
3139 __ Andi(out, out, 1);
3140 __ Addu(out, out, TMP);
3141 } else {
3142 __ Sra(TMP, dividend, 31);
3143 __ Srl(TMP, TMP, 32 - ctz_imm);
3144 __ Addu(out, dividend, TMP);
3145 if (IsUint<16>(abs_imm - 1)) {
3146 __ Andi(out, out, abs_imm - 1);
3147 } else {
3148 __ Sll(out, out, 32 - ctz_imm);
3149 __ Srl(out, out, 32 - ctz_imm);
3150 }
3151 __ Subu(out, out, TMP);
3152 }
3153 } else {
3154 DCHECK_EQ(type, Primitive::kPrimLong);
3155 if (ctz_imm == 1) {
3156 // Fast path for modulo +/-2, which is very common.
3157 __ Dsra32(TMP, dividend, 31);
3158 __ Dsubu(out, dividend, TMP);
3159 __ Andi(out, out, 1);
3160 __ Daddu(out, out, TMP);
3161 } else {
3162 __ Dsra32(TMP, dividend, 31);
3163 if (ctz_imm > 32) {
3164 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3165 } else {
3166 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3167 }
3168 __ Daddu(out, dividend, TMP);
3169 if (IsUint<16>(abs_imm - 1)) {
3170 __ Andi(out, out, abs_imm - 1);
3171 } else {
3172 if (ctz_imm > 32) {
3173 __ Dsll(out, out, 64 - ctz_imm);
3174 __ Dsrl(out, out, 64 - ctz_imm);
3175 } else {
3176 __ Dsll32(out, out, 32 - ctz_imm);
3177 __ Dsrl32(out, out, 32 - ctz_imm);
3178 }
3179 }
3180 __ Dsubu(out, out, TMP);
3181 }
3182 }
3183 }
3184}
3185
3186void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3187 DCHECK(instruction->IsDiv() || instruction->IsRem());
3188
3189 LocationSummary* locations = instruction->GetLocations();
3190 Location second = locations->InAt(1);
3191 DCHECK(second.IsConstant());
3192
3193 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3194 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3195 int64_t imm = Int64FromConstant(second.GetConstant());
3196
3197 Primitive::Type type = instruction->GetResultType();
3198 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
3199
3200 int64_t magic;
3201 int shift;
3202 CalculateMagicAndShiftForDivRem(imm,
3203 (type == Primitive::kPrimLong),
3204 &magic,
3205 &shift);
3206
3207 if (type == Primitive::kPrimInt) {
3208 __ LoadConst32(TMP, magic);
3209 __ MuhR6(TMP, dividend, TMP);
3210
3211 if (imm > 0 && magic < 0) {
3212 __ Addu(TMP, TMP, dividend);
3213 } else if (imm < 0 && magic > 0) {
3214 __ Subu(TMP, TMP, dividend);
3215 }
3216
3217 if (shift != 0) {
3218 __ Sra(TMP, TMP, shift);
3219 }
3220
3221 if (instruction->IsDiv()) {
3222 __ Sra(out, TMP, 31);
3223 __ Subu(out, TMP, out);
3224 } else {
3225 __ Sra(AT, TMP, 31);
3226 __ Subu(AT, TMP, AT);
3227 __ LoadConst32(TMP, imm);
3228 __ MulR6(TMP, AT, TMP);
3229 __ Subu(out, dividend, TMP);
3230 }
3231 } else {
3232 __ LoadConst64(TMP, magic);
3233 __ Dmuh(TMP, dividend, TMP);
3234
3235 if (imm > 0 && magic < 0) {
3236 __ Daddu(TMP, TMP, dividend);
3237 } else if (imm < 0 && magic > 0) {
3238 __ Dsubu(TMP, TMP, dividend);
3239 }
3240
3241 if (shift >= 32) {
3242 __ Dsra32(TMP, TMP, shift - 32);
3243 } else if (shift > 0) {
3244 __ Dsra(TMP, TMP, shift);
3245 }
3246
3247 if (instruction->IsDiv()) {
3248 __ Dsra32(out, TMP, 31);
3249 __ Dsubu(out, TMP, out);
3250 } else {
3251 __ Dsra32(AT, TMP, 31);
3252 __ Dsubu(AT, TMP, AT);
3253 __ LoadConst64(TMP, imm);
3254 __ Dmul(TMP, AT, TMP);
3255 __ Dsubu(out, dividend, TMP);
3256 }
3257 }
3258}
3259
3260void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3261 DCHECK(instruction->IsDiv() || instruction->IsRem());
3262 Primitive::Type type = instruction->GetResultType();
3263 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
3264
3265 LocationSummary* locations = instruction->GetLocations();
3266 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3267 Location second = locations->InAt(1);
3268
3269 if (second.IsConstant()) {
3270 int64_t imm = Int64FromConstant(second.GetConstant());
3271 if (imm == 0) {
3272 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3273 } else if (imm == 1 || imm == -1) {
3274 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003275 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003276 DivRemByPowerOfTwo(instruction);
3277 } else {
3278 DCHECK(imm <= -2 || imm >= 2);
3279 GenerateDivRemWithAnyConstant(instruction);
3280 }
3281 } else {
3282 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3283 GpuRegister divisor = second.AsRegister<GpuRegister>();
3284 if (instruction->IsDiv()) {
3285 if (type == Primitive::kPrimInt)
3286 __ DivR6(out, dividend, divisor);
3287 else
3288 __ Ddiv(out, dividend, divisor);
3289 } else {
3290 if (type == Primitive::kPrimInt)
3291 __ ModR6(out, dividend, divisor);
3292 else
3293 __ Dmod(out, dividend, divisor);
3294 }
3295 }
3296}
3297
Alexey Frunze4dda3372015-06-01 18:31:49 -07003298void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
3299 LocationSummary* locations =
3300 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3301 switch (div->GetResultType()) {
3302 case Primitive::kPrimInt:
3303 case Primitive::kPrimLong:
3304 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003305 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003306 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3307 break;
3308
3309 case Primitive::kPrimFloat:
3310 case Primitive::kPrimDouble:
3311 locations->SetInAt(0, Location::RequiresFpuRegister());
3312 locations->SetInAt(1, Location::RequiresFpuRegister());
3313 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3314 break;
3315
3316 default:
3317 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3318 }
3319}
3320
3321void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
3322 Primitive::Type type = instruction->GetType();
3323 LocationSummary* locations = instruction->GetLocations();
3324
3325 switch (type) {
3326 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07003327 case Primitive::kPrimLong:
3328 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003329 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003330 case Primitive::kPrimFloat:
3331 case Primitive::kPrimDouble: {
3332 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3333 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3334 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3335 if (type == Primitive::kPrimFloat)
3336 __ DivS(dst, lhs, rhs);
3337 else
3338 __ DivD(dst, lhs, rhs);
3339 break;
3340 }
3341 default:
3342 LOG(FATAL) << "Unexpected div type " << type;
3343 }
3344}
3345
3346void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003347 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003348 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003349}
3350
3351void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3352 SlowPathCodeMIPS64* slow_path =
3353 new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS64(instruction);
3354 codegen_->AddSlowPath(slow_path);
3355 Location value = instruction->GetLocations()->InAt(0);
3356
3357 Primitive::Type type = instruction->GetType();
3358
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003359 if (!Primitive::IsIntegralType(type)) {
3360 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003361 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003362 }
3363
3364 if (value.IsConstant()) {
3365 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
3366 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003367 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003368 } else {
3369 // A division by a non-null constant is valid. We don't need to perform
3370 // any check, so simply fall through.
3371 }
3372 } else {
3373 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3374 }
3375}
3376
3377void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
3378 LocationSummary* locations =
3379 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3380 locations->SetOut(Location::ConstantLocation(constant));
3381}
3382
3383void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3384 // Will be generated at use site.
3385}
3386
3387void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
3388 exit->SetLocations(nullptr);
3389}
3390
3391void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3392}
3393
3394void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
3395 LocationSummary* locations =
3396 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3397 locations->SetOut(Location::ConstantLocation(constant));
3398}
3399
3400void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3401 // Will be generated at use site.
3402}
3403
David Brazdilfc6a86a2015-06-26 10:33:45 +00003404void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003405 DCHECK(!successor->IsExitBlock());
3406 HBasicBlock* block = got->GetBlock();
3407 HInstruction* previous = got->GetPrevious();
3408 HLoopInformation* info = block->GetLoopInformation();
3409
3410 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3411 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3412 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3413 return;
3414 }
3415 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3416 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3417 }
3418 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003419 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003420 }
3421}
3422
David Brazdilfc6a86a2015-06-26 10:33:45 +00003423void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
3424 got->SetLocations(nullptr);
3425}
3426
3427void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
3428 HandleGoto(got, got->GetSuccessor());
3429}
3430
3431void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3432 try_boundary->SetLocations(nullptr);
3433}
3434
3435void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3436 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3437 if (!successor->IsExitBlock()) {
3438 HandleGoto(try_boundary, successor);
3439 }
3440}
3441
Alexey Frunze299a9392015-12-08 16:08:02 -08003442void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
3443 bool is64bit,
3444 LocationSummary* locations) {
3445 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3446 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3447 Location rhs_location = locations->InAt(1);
3448 GpuRegister rhs_reg = ZERO;
3449 int64_t rhs_imm = 0;
3450 bool use_imm = rhs_location.IsConstant();
3451 if (use_imm) {
3452 if (is64bit) {
3453 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3454 } else {
3455 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3456 }
3457 } else {
3458 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3459 }
3460 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3461
3462 switch (cond) {
3463 case kCondEQ:
3464 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003465 if (use_imm && IsInt<16>(-rhs_imm)) {
3466 if (rhs_imm == 0) {
3467 if (cond == kCondEQ) {
3468 __ Sltiu(dst, lhs, 1);
3469 } else {
3470 __ Sltu(dst, ZERO, lhs);
3471 }
3472 } else {
3473 if (is64bit) {
3474 __ Daddiu(dst, lhs, -rhs_imm);
3475 } else {
3476 __ Addiu(dst, lhs, -rhs_imm);
3477 }
3478 if (cond == kCondEQ) {
3479 __ Sltiu(dst, dst, 1);
3480 } else {
3481 __ Sltu(dst, ZERO, dst);
3482 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003483 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003484 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003485 if (use_imm && IsUint<16>(rhs_imm)) {
3486 __ Xori(dst, lhs, rhs_imm);
3487 } else {
3488 if (use_imm) {
3489 rhs_reg = TMP;
3490 __ LoadConst64(rhs_reg, rhs_imm);
3491 }
3492 __ Xor(dst, lhs, rhs_reg);
3493 }
3494 if (cond == kCondEQ) {
3495 __ Sltiu(dst, dst, 1);
3496 } else {
3497 __ Sltu(dst, ZERO, dst);
3498 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003499 }
3500 break;
3501
3502 case kCondLT:
3503 case kCondGE:
3504 if (use_imm && IsInt<16>(rhs_imm)) {
3505 __ Slti(dst, lhs, rhs_imm);
3506 } else {
3507 if (use_imm) {
3508 rhs_reg = TMP;
3509 __ LoadConst64(rhs_reg, rhs_imm);
3510 }
3511 __ Slt(dst, lhs, rhs_reg);
3512 }
3513 if (cond == kCondGE) {
3514 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3515 // only the slt instruction but no sge.
3516 __ Xori(dst, dst, 1);
3517 }
3518 break;
3519
3520 case kCondLE:
3521 case kCondGT:
3522 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3523 // Simulate lhs <= rhs via lhs < rhs + 1.
3524 __ Slti(dst, lhs, rhs_imm_plus_one);
3525 if (cond == kCondGT) {
3526 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3527 // only the slti instruction but no sgti.
3528 __ Xori(dst, dst, 1);
3529 }
3530 } else {
3531 if (use_imm) {
3532 rhs_reg = TMP;
3533 __ LoadConst64(rhs_reg, rhs_imm);
3534 }
3535 __ Slt(dst, rhs_reg, lhs);
3536 if (cond == kCondLE) {
3537 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3538 // only the slt instruction but no sle.
3539 __ Xori(dst, dst, 1);
3540 }
3541 }
3542 break;
3543
3544 case kCondB:
3545 case kCondAE:
3546 if (use_imm && IsInt<16>(rhs_imm)) {
3547 // Sltiu sign-extends its 16-bit immediate operand before
3548 // the comparison and thus lets us compare directly with
3549 // unsigned values in the ranges [0, 0x7fff] and
3550 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3551 __ Sltiu(dst, lhs, rhs_imm);
3552 } else {
3553 if (use_imm) {
3554 rhs_reg = TMP;
3555 __ LoadConst64(rhs_reg, rhs_imm);
3556 }
3557 __ Sltu(dst, lhs, rhs_reg);
3558 }
3559 if (cond == kCondAE) {
3560 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3561 // only the sltu instruction but no sgeu.
3562 __ Xori(dst, dst, 1);
3563 }
3564 break;
3565
3566 case kCondBE:
3567 case kCondA:
3568 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3569 // Simulate lhs <= rhs via lhs < rhs + 1.
3570 // Note that this only works if rhs + 1 does not overflow
3571 // to 0, hence the check above.
3572 // Sltiu sign-extends its 16-bit immediate operand before
3573 // the comparison and thus lets us compare directly with
3574 // unsigned values in the ranges [0, 0x7fff] and
3575 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3576 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3577 if (cond == kCondA) {
3578 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3579 // only the sltiu instruction but no sgtiu.
3580 __ Xori(dst, dst, 1);
3581 }
3582 } else {
3583 if (use_imm) {
3584 rhs_reg = TMP;
3585 __ LoadConst64(rhs_reg, rhs_imm);
3586 }
3587 __ Sltu(dst, rhs_reg, lhs);
3588 if (cond == kCondBE) {
3589 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3590 // only the sltu instruction but no sleu.
3591 __ Xori(dst, dst, 1);
3592 }
3593 }
3594 break;
3595 }
3596}
3597
3598void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
3599 bool is64bit,
3600 LocationSummary* locations,
3601 Mips64Label* label) {
3602 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3603 Location rhs_location = locations->InAt(1);
3604 GpuRegister rhs_reg = ZERO;
3605 int64_t rhs_imm = 0;
3606 bool use_imm = rhs_location.IsConstant();
3607 if (use_imm) {
3608 if (is64bit) {
3609 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3610 } else {
3611 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3612 }
3613 } else {
3614 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3615 }
3616
3617 if (use_imm && rhs_imm == 0) {
3618 switch (cond) {
3619 case kCondEQ:
3620 case kCondBE: // <= 0 if zero
3621 __ Beqzc(lhs, label);
3622 break;
3623 case kCondNE:
3624 case kCondA: // > 0 if non-zero
3625 __ Bnezc(lhs, label);
3626 break;
3627 case kCondLT:
3628 __ Bltzc(lhs, label);
3629 break;
3630 case kCondGE:
3631 __ Bgezc(lhs, label);
3632 break;
3633 case kCondLE:
3634 __ Blezc(lhs, label);
3635 break;
3636 case kCondGT:
3637 __ Bgtzc(lhs, label);
3638 break;
3639 case kCondB: // always false
3640 break;
3641 case kCondAE: // always true
3642 __ Bc(label);
3643 break;
3644 }
3645 } else {
3646 if (use_imm) {
3647 rhs_reg = TMP;
3648 __ LoadConst64(rhs_reg, rhs_imm);
3649 }
3650 switch (cond) {
3651 case kCondEQ:
3652 __ Beqc(lhs, rhs_reg, label);
3653 break;
3654 case kCondNE:
3655 __ Bnec(lhs, rhs_reg, label);
3656 break;
3657 case kCondLT:
3658 __ Bltc(lhs, rhs_reg, label);
3659 break;
3660 case kCondGE:
3661 __ Bgec(lhs, rhs_reg, label);
3662 break;
3663 case kCondLE:
3664 __ Bgec(rhs_reg, lhs, label);
3665 break;
3666 case kCondGT:
3667 __ Bltc(rhs_reg, lhs, label);
3668 break;
3669 case kCondB:
3670 __ Bltuc(lhs, rhs_reg, label);
3671 break;
3672 case kCondAE:
3673 __ Bgeuc(lhs, rhs_reg, label);
3674 break;
3675 case kCondBE:
3676 __ Bgeuc(rhs_reg, lhs, label);
3677 break;
3678 case kCondA:
3679 __ Bltuc(rhs_reg, lhs, label);
3680 break;
3681 }
3682 }
3683}
3684
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003685void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
3686 bool gt_bias,
3687 Primitive::Type type,
3688 LocationSummary* locations) {
3689 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3690 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3691 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3692 if (type == Primitive::kPrimFloat) {
3693 switch (cond) {
3694 case kCondEQ:
3695 __ CmpEqS(FTMP, lhs, rhs);
3696 __ Mfc1(dst, FTMP);
3697 __ Andi(dst, dst, 1);
3698 break;
3699 case kCondNE:
3700 __ CmpEqS(FTMP, lhs, rhs);
3701 __ Mfc1(dst, FTMP);
3702 __ Addiu(dst, dst, 1);
3703 break;
3704 case kCondLT:
3705 if (gt_bias) {
3706 __ CmpLtS(FTMP, lhs, rhs);
3707 } else {
3708 __ CmpUltS(FTMP, lhs, rhs);
3709 }
3710 __ Mfc1(dst, FTMP);
3711 __ Andi(dst, dst, 1);
3712 break;
3713 case kCondLE:
3714 if (gt_bias) {
3715 __ CmpLeS(FTMP, lhs, rhs);
3716 } else {
3717 __ CmpUleS(FTMP, lhs, rhs);
3718 }
3719 __ Mfc1(dst, FTMP);
3720 __ Andi(dst, dst, 1);
3721 break;
3722 case kCondGT:
3723 if (gt_bias) {
3724 __ CmpUltS(FTMP, rhs, lhs);
3725 } else {
3726 __ CmpLtS(FTMP, rhs, lhs);
3727 }
3728 __ Mfc1(dst, FTMP);
3729 __ Andi(dst, dst, 1);
3730 break;
3731 case kCondGE:
3732 if (gt_bias) {
3733 __ CmpUleS(FTMP, rhs, lhs);
3734 } else {
3735 __ CmpLeS(FTMP, rhs, lhs);
3736 }
3737 __ Mfc1(dst, FTMP);
3738 __ Andi(dst, dst, 1);
3739 break;
3740 default:
3741 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3742 UNREACHABLE();
3743 }
3744 } else {
3745 DCHECK_EQ(type, Primitive::kPrimDouble);
3746 switch (cond) {
3747 case kCondEQ:
3748 __ CmpEqD(FTMP, lhs, rhs);
3749 __ Mfc1(dst, FTMP);
3750 __ Andi(dst, dst, 1);
3751 break;
3752 case kCondNE:
3753 __ CmpEqD(FTMP, lhs, rhs);
3754 __ Mfc1(dst, FTMP);
3755 __ Addiu(dst, dst, 1);
3756 break;
3757 case kCondLT:
3758 if (gt_bias) {
3759 __ CmpLtD(FTMP, lhs, rhs);
3760 } else {
3761 __ CmpUltD(FTMP, lhs, rhs);
3762 }
3763 __ Mfc1(dst, FTMP);
3764 __ Andi(dst, dst, 1);
3765 break;
3766 case kCondLE:
3767 if (gt_bias) {
3768 __ CmpLeD(FTMP, lhs, rhs);
3769 } else {
3770 __ CmpUleD(FTMP, lhs, rhs);
3771 }
3772 __ Mfc1(dst, FTMP);
3773 __ Andi(dst, dst, 1);
3774 break;
3775 case kCondGT:
3776 if (gt_bias) {
3777 __ CmpUltD(FTMP, rhs, lhs);
3778 } else {
3779 __ CmpLtD(FTMP, rhs, lhs);
3780 }
3781 __ Mfc1(dst, FTMP);
3782 __ Andi(dst, dst, 1);
3783 break;
3784 case kCondGE:
3785 if (gt_bias) {
3786 __ CmpUleD(FTMP, rhs, lhs);
3787 } else {
3788 __ CmpLeD(FTMP, rhs, lhs);
3789 }
3790 __ Mfc1(dst, FTMP);
3791 __ Andi(dst, dst, 1);
3792 break;
3793 default:
3794 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3795 UNREACHABLE();
3796 }
3797 }
3798}
3799
Alexey Frunze299a9392015-12-08 16:08:02 -08003800void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
3801 bool gt_bias,
3802 Primitive::Type type,
3803 LocationSummary* locations,
3804 Mips64Label* label) {
3805 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3806 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3807 if (type == Primitive::kPrimFloat) {
3808 switch (cond) {
3809 case kCondEQ:
3810 __ CmpEqS(FTMP, lhs, rhs);
3811 __ Bc1nez(FTMP, label);
3812 break;
3813 case kCondNE:
3814 __ CmpEqS(FTMP, lhs, rhs);
3815 __ Bc1eqz(FTMP, label);
3816 break;
3817 case kCondLT:
3818 if (gt_bias) {
3819 __ CmpLtS(FTMP, lhs, rhs);
3820 } else {
3821 __ CmpUltS(FTMP, lhs, rhs);
3822 }
3823 __ Bc1nez(FTMP, label);
3824 break;
3825 case kCondLE:
3826 if (gt_bias) {
3827 __ CmpLeS(FTMP, lhs, rhs);
3828 } else {
3829 __ CmpUleS(FTMP, lhs, rhs);
3830 }
3831 __ Bc1nez(FTMP, label);
3832 break;
3833 case kCondGT:
3834 if (gt_bias) {
3835 __ CmpUltS(FTMP, rhs, lhs);
3836 } else {
3837 __ CmpLtS(FTMP, rhs, lhs);
3838 }
3839 __ Bc1nez(FTMP, label);
3840 break;
3841 case kCondGE:
3842 if (gt_bias) {
3843 __ CmpUleS(FTMP, rhs, lhs);
3844 } else {
3845 __ CmpLeS(FTMP, rhs, lhs);
3846 }
3847 __ Bc1nez(FTMP, label);
3848 break;
3849 default:
3850 LOG(FATAL) << "Unexpected non-floating-point condition";
3851 }
3852 } else {
3853 DCHECK_EQ(type, Primitive::kPrimDouble);
3854 switch (cond) {
3855 case kCondEQ:
3856 __ CmpEqD(FTMP, lhs, rhs);
3857 __ Bc1nez(FTMP, label);
3858 break;
3859 case kCondNE:
3860 __ CmpEqD(FTMP, lhs, rhs);
3861 __ Bc1eqz(FTMP, label);
3862 break;
3863 case kCondLT:
3864 if (gt_bias) {
3865 __ CmpLtD(FTMP, lhs, rhs);
3866 } else {
3867 __ CmpUltD(FTMP, lhs, rhs);
3868 }
3869 __ Bc1nez(FTMP, label);
3870 break;
3871 case kCondLE:
3872 if (gt_bias) {
3873 __ CmpLeD(FTMP, lhs, rhs);
3874 } else {
3875 __ CmpUleD(FTMP, lhs, rhs);
3876 }
3877 __ Bc1nez(FTMP, label);
3878 break;
3879 case kCondGT:
3880 if (gt_bias) {
3881 __ CmpUltD(FTMP, rhs, lhs);
3882 } else {
3883 __ CmpLtD(FTMP, rhs, lhs);
3884 }
3885 __ Bc1nez(FTMP, label);
3886 break;
3887 case kCondGE:
3888 if (gt_bias) {
3889 __ CmpUleD(FTMP, rhs, lhs);
3890 } else {
3891 __ CmpLeD(FTMP, rhs, lhs);
3892 }
3893 __ Bc1nez(FTMP, label);
3894 break;
3895 default:
3896 LOG(FATAL) << "Unexpected non-floating-point condition";
3897 }
3898 }
3899}
3900
Alexey Frunze4dda3372015-06-01 18:31:49 -07003901void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003902 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003903 Mips64Label* true_target,
3904 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003905 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003906
David Brazdil0debae72015-11-12 18:37:00 +00003907 if (true_target == nullptr && false_target == nullptr) {
3908 // Nothing to do. The code always falls through.
3909 return;
3910 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003911 // Constant condition, statically compared against "true" (integer value 1).
3912 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003913 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003914 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003915 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003916 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003917 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003918 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003919 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003920 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003921 }
David Brazdil0debae72015-11-12 18:37:00 +00003922 return;
3923 }
3924
3925 // The following code generates these patterns:
3926 // (1) true_target == nullptr && false_target != nullptr
3927 // - opposite condition true => branch to false_target
3928 // (2) true_target != nullptr && false_target == nullptr
3929 // - condition true => branch to true_target
3930 // (3) true_target != nullptr && false_target != nullptr
3931 // - condition true => branch to true_target
3932 // - branch to false_target
3933 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003934 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003935 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003936 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003937 if (true_target == nullptr) {
3938 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
3939 } else {
3940 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
3941 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003942 } else {
3943 // The condition instruction has not been materialized, use its inputs as
3944 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003945 HCondition* condition = cond->AsCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08003946 Primitive::Type type = condition->InputAt(0)->GetType();
3947 LocationSummary* locations = cond->GetLocations();
3948 IfCondition if_cond = condition->GetCondition();
3949 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00003950
David Brazdil0debae72015-11-12 18:37:00 +00003951 if (true_target == nullptr) {
3952 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08003953 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00003954 }
3955
Alexey Frunze299a9392015-12-08 16:08:02 -08003956 switch (type) {
3957 default:
3958 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
3959 break;
3960 case Primitive::kPrimLong:
3961 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
3962 break;
3963 case Primitive::kPrimFloat:
3964 case Primitive::kPrimDouble:
3965 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
3966 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003967 }
3968 }
David Brazdil0debae72015-11-12 18:37:00 +00003969
3970 // If neither branch falls through (case 3), the conditional branch to `true_target`
3971 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3972 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003973 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003974 }
3975}
3976
3977void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
3978 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003979 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003980 locations->SetInAt(0, Location::RequiresRegister());
3981 }
3982}
3983
3984void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003985 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3986 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003987 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00003988 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003989 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00003990 nullptr : codegen_->GetLabelOf(false_successor);
3991 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003992}
3993
3994void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
3995 LocationSummary* locations = new (GetGraph()->GetArena())
3996 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01003997 InvokeRuntimeCallingConvention calling_convention;
3998 RegisterSet caller_saves = RegisterSet::Empty();
3999 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4000 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00004001 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004002 locations->SetInAt(0, Location::RequiresRegister());
4003 }
4004}
4005
4006void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08004007 SlowPathCodeMIPS64* slow_path =
4008 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00004009 GenerateTestAndBranch(deoptimize,
4010 /* condition_input_index */ 0,
4011 slow_path->GetEntryLabel(),
4012 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004013}
4014
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004015void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4016 LocationSummary* locations = new (GetGraph()->GetArena())
4017 LocationSummary(flag, LocationSummary::kNoCall);
4018 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07004019}
4020
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004021void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4022 __ LoadFromOffset(kLoadWord,
4023 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
4024 SP,
4025 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07004026}
4027
David Brazdil74eb1b22015-12-14 11:44:01 +00004028void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
4029 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
4030 if (Primitive::IsFloatingPointType(select->GetType())) {
4031 locations->SetInAt(0, Location::RequiresFpuRegister());
4032 locations->SetInAt(1, Location::RequiresFpuRegister());
4033 } else {
4034 locations->SetInAt(0, Location::RequiresRegister());
4035 locations->SetInAt(1, Location::RequiresRegister());
4036 }
4037 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
4038 locations->SetInAt(2, Location::RequiresRegister());
4039 }
4040 locations->SetOut(Location::SameAsFirstInput());
4041}
4042
4043void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
4044 LocationSummary* locations = select->GetLocations();
4045 Mips64Label false_target;
4046 GenerateTestAndBranch(select,
4047 /* condition_input_index */ 2,
4048 /* true_target */ nullptr,
4049 &false_target);
4050 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
4051 __ Bind(&false_target);
4052}
4053
David Srbecky0cf44932015-12-09 14:09:59 +00004054void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
4055 new (GetGraph()->GetArena()) LocationSummary(info);
4056}
4057
David Srbeckyd28f4a02016-03-14 17:14:24 +00004058void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
4059 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00004060}
4061
4062void CodeGeneratorMIPS64::GenerateNop() {
4063 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00004064}
4065
Alexey Frunze4dda3372015-06-01 18:31:49 -07004066void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08004067 const FieldInfo& field_info) {
4068 Primitive::Type field_type = field_info.GetFieldType();
4069 bool object_field_get_with_read_barrier =
4070 kEmitCompilerReadBarrier && (field_type == Primitive::kPrimNot);
4071 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
4072 instruction,
4073 object_field_get_with_read_barrier
4074 ? LocationSummary::kCallOnSlowPath
4075 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004076 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4077 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4078 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004079 locations->SetInAt(0, Location::RequiresRegister());
4080 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4081 locations->SetOut(Location::RequiresFpuRegister());
4082 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004083 // The output overlaps in the case of an object field get with
4084 // read barriers enabled: we do not want the move to overwrite the
4085 // object's location, as we need it to emit the read barrier.
4086 locations->SetOut(Location::RequiresRegister(),
4087 object_field_get_with_read_barrier
4088 ? Location::kOutputOverlap
4089 : Location::kNoOutputOverlap);
4090 }
4091 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4092 // We need a temporary register for the read barrier marking slow
4093 // path in CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier.
4094 locations->AddTemp(Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004095 }
4096}
4097
4098void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
4099 const FieldInfo& field_info) {
4100 Primitive::Type type = field_info.GetFieldType();
4101 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004102 Location obj_loc = locations->InAt(0);
4103 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
4104 Location dst_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004105 LoadOperandType load_type = kLoadUnsignedByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004106 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004107 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004108 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4109
Alexey Frunze4dda3372015-06-01 18:31:49 -07004110 switch (type) {
4111 case Primitive::kPrimBoolean:
4112 load_type = kLoadUnsignedByte;
4113 break;
4114 case Primitive::kPrimByte:
4115 load_type = kLoadSignedByte;
4116 break;
4117 case Primitive::kPrimShort:
4118 load_type = kLoadSignedHalfword;
4119 break;
4120 case Primitive::kPrimChar:
4121 load_type = kLoadUnsignedHalfword;
4122 break;
4123 case Primitive::kPrimInt:
4124 case Primitive::kPrimFloat:
4125 load_type = kLoadWord;
4126 break;
4127 case Primitive::kPrimLong:
4128 case Primitive::kPrimDouble:
4129 load_type = kLoadDoubleword;
4130 break;
4131 case Primitive::kPrimNot:
4132 load_type = kLoadUnsignedWord;
4133 break;
4134 case Primitive::kPrimVoid:
4135 LOG(FATAL) << "Unreachable type " << type;
4136 UNREACHABLE();
4137 }
4138 if (!Primitive::IsFloatingPointType(type)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004139 DCHECK(dst_loc.IsRegister());
4140 GpuRegister dst = dst_loc.AsRegister<GpuRegister>();
4141 if (type == Primitive::kPrimNot) {
4142 // /* HeapReference<Object> */ dst = *(obj + offset)
4143 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4144 Location temp_loc = locations->GetTemp(0);
4145 // Note that a potential implicit null check is handled in this
4146 // CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier call.
4147 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4148 dst_loc,
4149 obj,
4150 offset,
4151 temp_loc,
4152 /* needs_null_check */ true);
4153 if (is_volatile) {
4154 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4155 }
4156 } else {
4157 __ LoadFromOffset(kLoadUnsignedWord, dst, obj, offset, null_checker);
4158 if (is_volatile) {
4159 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4160 }
4161 // If read barriers are enabled, emit read barriers other than
4162 // Baker's using a slow path (and also unpoison the loaded
4163 // reference, if heap poisoning is enabled).
4164 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
4165 }
4166 } else {
4167 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
4168 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004169 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004170 DCHECK(dst_loc.IsFpuRegister());
4171 FpuRegister dst = dst_loc.AsFpuRegister<FpuRegister>();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004172 __ LoadFpuFromOffset(load_type, dst, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004173 }
Alexey Frunzec061de12017-02-14 13:27:23 -08004174
Alexey Frunze15958152017-02-09 19:08:30 -08004175 // Memory barriers, in the case of references, are handled in the
4176 // previous switch statement.
4177 if (is_volatile && (type != Primitive::kPrimNot)) {
4178 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Alexey Frunzec061de12017-02-14 13:27:23 -08004179 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004180}
4181
4182void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
4183 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
4184 LocationSummary* locations =
4185 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4186 locations->SetInAt(0, Location::RequiresRegister());
4187 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004188 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004189 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004190 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004191 }
4192}
4193
4194void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004195 const FieldInfo& field_info,
4196 bool value_can_be_null) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004197 Primitive::Type type = field_info.GetFieldType();
4198 LocationSummary* locations = instruction->GetLocations();
4199 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004200 Location value_location = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004201 StoreOperandType store_type = kStoreByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004202 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004203 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4204 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004205 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4206
Alexey Frunze4dda3372015-06-01 18:31:49 -07004207 switch (type) {
4208 case Primitive::kPrimBoolean:
4209 case Primitive::kPrimByte:
4210 store_type = kStoreByte;
4211 break;
4212 case Primitive::kPrimShort:
4213 case Primitive::kPrimChar:
4214 store_type = kStoreHalfword;
4215 break;
4216 case Primitive::kPrimInt:
4217 case Primitive::kPrimFloat:
4218 case Primitive::kPrimNot:
4219 store_type = kStoreWord;
4220 break;
4221 case Primitive::kPrimLong:
4222 case Primitive::kPrimDouble:
4223 store_type = kStoreDoubleword;
4224 break;
4225 case Primitive::kPrimVoid:
4226 LOG(FATAL) << "Unreachable type " << type;
4227 UNREACHABLE();
4228 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004229
Alexey Frunze15958152017-02-09 19:08:30 -08004230 if (is_volatile) {
4231 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
4232 }
4233
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004234 if (value_location.IsConstant()) {
4235 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
4236 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
4237 } else {
4238 if (!Primitive::IsFloatingPointType(type)) {
4239 DCHECK(value_location.IsRegister());
4240 GpuRegister src = value_location.AsRegister<GpuRegister>();
4241 if (kPoisonHeapReferences && needs_write_barrier) {
4242 // Note that in the case where `value` is a null reference,
4243 // we do not enter this block, as a null reference does not
4244 // need poisoning.
4245 DCHECK_EQ(type, Primitive::kPrimNot);
4246 __ PoisonHeapReference(TMP, src);
4247 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
4248 } else {
4249 __ StoreToOffset(store_type, src, obj, offset, null_checker);
4250 }
4251 } else {
4252 DCHECK(value_location.IsFpuRegister());
4253 FpuRegister src = value_location.AsFpuRegister<FpuRegister>();
4254 __ StoreFpuToOffset(store_type, src, obj, offset, null_checker);
4255 }
4256 }
Alexey Frunze15958152017-02-09 19:08:30 -08004257
Alexey Frunzec061de12017-02-14 13:27:23 -08004258 if (needs_write_barrier) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004259 DCHECK(value_location.IsRegister());
4260 GpuRegister src = value_location.AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004261 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004262 }
Alexey Frunze15958152017-02-09 19:08:30 -08004263
4264 if (is_volatile) {
4265 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
4266 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004267}
4268
4269void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4270 HandleFieldGet(instruction, instruction->GetFieldInfo());
4271}
4272
4273void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4274 HandleFieldGet(instruction, instruction->GetFieldInfo());
4275}
4276
4277void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4278 HandleFieldSet(instruction, instruction->GetFieldInfo());
4279}
4280
4281void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004282 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004283}
4284
Alexey Frunze15958152017-02-09 19:08:30 -08004285void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadOneRegister(
4286 HInstruction* instruction,
4287 Location out,
4288 uint32_t offset,
4289 Location maybe_temp,
4290 ReadBarrierOption read_barrier_option) {
4291 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4292 if (read_barrier_option == kWithReadBarrier) {
4293 CHECK(kEmitCompilerReadBarrier);
4294 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4295 if (kUseBakerReadBarrier) {
4296 // Load with fast path based Baker's read barrier.
4297 // /* HeapReference<Object> */ out = *(out + offset)
4298 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4299 out,
4300 out_reg,
4301 offset,
4302 maybe_temp,
4303 /* needs_null_check */ false);
4304 } else {
4305 // Load with slow path based read barrier.
4306 // Save the value of `out` into `maybe_temp` before overwriting it
4307 // in the following move operation, as we will need it for the
4308 // read barrier below.
4309 __ Move(maybe_temp.AsRegister<GpuRegister>(), out_reg);
4310 // /* HeapReference<Object> */ out = *(out + offset)
4311 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4312 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4313 }
4314 } else {
4315 // Plain load with no read barrier.
4316 // /* HeapReference<Object> */ out = *(out + offset)
4317 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4318 __ MaybeUnpoisonHeapReference(out_reg);
4319 }
4320}
4321
4322void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadTwoRegisters(
4323 HInstruction* instruction,
4324 Location out,
4325 Location obj,
4326 uint32_t offset,
4327 Location maybe_temp,
4328 ReadBarrierOption read_barrier_option) {
4329 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4330 GpuRegister obj_reg = obj.AsRegister<GpuRegister>();
4331 if (read_barrier_option == kWithReadBarrier) {
4332 CHECK(kEmitCompilerReadBarrier);
4333 if (kUseBakerReadBarrier) {
4334 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4335 // Load with fast path based Baker's read barrier.
4336 // /* HeapReference<Object> */ out = *(obj + offset)
4337 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4338 out,
4339 obj_reg,
4340 offset,
4341 maybe_temp,
4342 /* needs_null_check */ false);
4343 } else {
4344 // Load with slow path based read barrier.
4345 // /* HeapReference<Object> */ out = *(obj + offset)
4346 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
4347 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4348 }
4349 } else {
4350 // Plain load with no read barrier.
4351 // /* HeapReference<Object> */ out = *(obj + offset)
4352 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
4353 __ MaybeUnpoisonHeapReference(out_reg);
4354 }
4355}
4356
Alexey Frunzef63f5692016-12-13 17:43:11 -08004357void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(
Alexey Frunze15958152017-02-09 19:08:30 -08004358 HInstruction* instruction,
Alexey Frunzef63f5692016-12-13 17:43:11 -08004359 Location root,
4360 GpuRegister obj,
Alexey Frunze15958152017-02-09 19:08:30 -08004361 uint32_t offset,
4362 ReadBarrierOption read_barrier_option) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08004363 GpuRegister root_reg = root.AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08004364 if (read_barrier_option == kWithReadBarrier) {
4365 DCHECK(kEmitCompilerReadBarrier);
4366 if (kUseBakerReadBarrier) {
4367 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4368 // Baker's read barrier are used:
4369 //
4370 // root = obj.field;
4371 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
4372 // if (temp != null) {
4373 // root = temp(root)
4374 // }
4375
4376 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
4377 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
4378 static_assert(
4379 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
4380 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
4381 "have different sizes.");
4382 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
4383 "art::mirror::CompressedReference<mirror::Object> and int32_t "
4384 "have different sizes.");
4385
4386 // Slow path marking the GC root `root`.
4387 Location temp = Location::RegisterLocation(T9);
4388 SlowPathCodeMIPS64* slow_path =
4389 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS64(
4390 instruction,
4391 root,
4392 /*entrypoint*/ temp);
4393 codegen_->AddSlowPath(slow_path);
4394
4395 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
4396 const int32_t entry_point_offset =
4397 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(root.reg() - 1);
4398 // Loading the entrypoint does not require a load acquire since it is only changed when
4399 // threads are suspended or running a checkpoint.
4400 __ LoadFromOffset(kLoadDoubleword, temp.AsRegister<GpuRegister>(), TR, entry_point_offset);
4401 // The entrypoint is null when the GC is not marking, this prevents one load compared to
4402 // checking GetIsGcMarking.
4403 __ Bnezc(temp.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
4404 __ Bind(slow_path->GetExitLabel());
4405 } else {
4406 // GC root loaded through a slow path for read barriers other
4407 // than Baker's.
4408 // /* GcRoot<mirror::Object>* */ root = obj + offset
4409 __ Daddiu64(root_reg, obj, static_cast<int32_t>(offset));
4410 // /* mirror::Object* */ root = root->Read()
4411 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
4412 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08004413 } else {
4414 // Plain GC root load with no read barrier.
4415 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
4416 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
4417 // Note that GC roots are not affected by heap poisoning, thus we
4418 // do not have to unpoison `root_reg` here.
4419 }
4420}
4421
Alexey Frunze15958152017-02-09 19:08:30 -08004422void CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
4423 Location ref,
4424 GpuRegister obj,
4425 uint32_t offset,
4426 Location temp,
4427 bool needs_null_check) {
4428 DCHECK(kEmitCompilerReadBarrier);
4429 DCHECK(kUseBakerReadBarrier);
4430
4431 // /* HeapReference<Object> */ ref = *(obj + offset)
4432 Location no_index = Location::NoLocation();
4433 ScaleFactor no_scale_factor = TIMES_1;
4434 GenerateReferenceLoadWithBakerReadBarrier(instruction,
4435 ref,
4436 obj,
4437 offset,
4438 no_index,
4439 no_scale_factor,
4440 temp,
4441 needs_null_check);
4442}
4443
4444void CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
4445 Location ref,
4446 GpuRegister obj,
4447 uint32_t data_offset,
4448 Location index,
4449 Location temp,
4450 bool needs_null_check) {
4451 DCHECK(kEmitCompilerReadBarrier);
4452 DCHECK(kUseBakerReadBarrier);
4453
4454 static_assert(
4455 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4456 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
4457 // /* HeapReference<Object> */ ref =
4458 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4459 ScaleFactor scale_factor = TIMES_4;
4460 GenerateReferenceLoadWithBakerReadBarrier(instruction,
4461 ref,
4462 obj,
4463 data_offset,
4464 index,
4465 scale_factor,
4466 temp,
4467 needs_null_check);
4468}
4469
4470void CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
4471 Location ref,
4472 GpuRegister obj,
4473 uint32_t offset,
4474 Location index,
4475 ScaleFactor scale_factor,
4476 Location temp,
4477 bool needs_null_check,
4478 bool always_update_field) {
4479 DCHECK(kEmitCompilerReadBarrier);
4480 DCHECK(kUseBakerReadBarrier);
4481
4482 // In slow path based read barriers, the read barrier call is
4483 // inserted after the original load. However, in fast path based
4484 // Baker's read barriers, we need to perform the load of
4485 // mirror::Object::monitor_ *before* the original reference load.
4486 // This load-load ordering is required by the read barrier.
4487 // The fast path/slow path (for Baker's algorithm) should look like:
4488 //
4489 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
4490 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
4491 // HeapReference<Object> ref = *src; // Original reference load.
4492 // bool is_gray = (rb_state == ReadBarrier::GrayState());
4493 // if (is_gray) {
4494 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
4495 // }
4496 //
4497 // Note: the original implementation in ReadBarrier::Barrier is
4498 // slightly more complex as it performs additional checks that we do
4499 // not do here for performance reasons.
4500
4501 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
4502 GpuRegister temp_reg = temp.AsRegister<GpuRegister>();
4503 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
4504
4505 // /* int32_t */ monitor = obj->monitor_
4506 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
4507 if (needs_null_check) {
4508 MaybeRecordImplicitNullCheck(instruction);
4509 }
4510 // /* LockWord */ lock_word = LockWord(monitor)
4511 static_assert(sizeof(LockWord) == sizeof(int32_t),
4512 "art::LockWord and int32_t have different sizes.");
4513
4514 __ Sync(0); // Barrier to prevent load-load reordering.
4515
4516 // The actual reference load.
4517 if (index.IsValid()) {
4518 // Load types involving an "index": ArrayGet,
4519 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
4520 // intrinsics.
4521 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
4522 if (index.IsConstant()) {
4523 size_t computed_offset =
4524 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
4525 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, computed_offset);
4526 } else {
4527 GpuRegister index_reg = index.AsRegister<GpuRegister>();
Chris Larsencd0295d2017-03-31 15:26:54 -07004528 if (scale_factor == TIMES_1) {
4529 __ Daddu(TMP, index_reg, obj);
4530 } else {
4531 __ Dlsa(TMP, index_reg, obj, scale_factor);
4532 }
Alexey Frunze15958152017-02-09 19:08:30 -08004533 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset);
4534 }
4535 } else {
4536 // /* HeapReference<Object> */ ref = *(obj + offset)
4537 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset);
4538 }
4539
4540 // Object* ref = ref_addr->AsMirrorPtr()
4541 __ MaybeUnpoisonHeapReference(ref_reg);
4542
4543 // Slow path marking the object `ref` when it is gray.
4544 SlowPathCodeMIPS64* slow_path;
4545 if (always_update_field) {
4546 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 only supports address
4547 // of the form `obj + field_offset`, where `obj` is a register and
4548 // `field_offset` is a register. Thus `offset` and `scale_factor`
4549 // above are expected to be null in this code path.
4550 DCHECK_EQ(offset, 0u);
4551 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
4552 slow_path = new (GetGraph()->GetArena())
4553 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(instruction,
4554 ref,
4555 obj,
4556 /* field_offset */ index,
4557 temp_reg);
4558 } else {
4559 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS64(instruction, ref);
4560 }
4561 AddSlowPath(slow_path);
4562
4563 // if (rb_state == ReadBarrier::GrayState())
4564 // ref = ReadBarrier::Mark(ref);
4565 // Given the numeric representation, it's enough to check the low bit of the
4566 // rb_state. We do that by shifting the bit into the sign bit (31) and
4567 // performing a branch on less than zero.
4568 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
4569 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
4570 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
4571 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
4572 __ Bltzc(temp_reg, slow_path->GetEntryLabel());
4573 __ Bind(slow_path->GetExitLabel());
4574}
4575
4576void CodeGeneratorMIPS64::GenerateReadBarrierSlow(HInstruction* instruction,
4577 Location out,
4578 Location ref,
4579 Location obj,
4580 uint32_t offset,
4581 Location index) {
4582 DCHECK(kEmitCompilerReadBarrier);
4583
4584 // Insert a slow path based read barrier *after* the reference load.
4585 //
4586 // If heap poisoning is enabled, the unpoisoning of the loaded
4587 // reference will be carried out by the runtime within the slow
4588 // path.
4589 //
4590 // Note that `ref` currently does not get unpoisoned (when heap
4591 // poisoning is enabled), which is alright as the `ref` argument is
4592 // not used by the artReadBarrierSlow entry point.
4593 //
4594 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
4595 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena())
4596 ReadBarrierForHeapReferenceSlowPathMIPS64(instruction, out, ref, obj, offset, index);
4597 AddSlowPath(slow_path);
4598
4599 __ Bc(slow_path->GetEntryLabel());
4600 __ Bind(slow_path->GetExitLabel());
4601}
4602
4603void CodeGeneratorMIPS64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
4604 Location out,
4605 Location ref,
4606 Location obj,
4607 uint32_t offset,
4608 Location index) {
4609 if (kEmitCompilerReadBarrier) {
4610 // Baker's read barriers shall be handled by the fast path
4611 // (CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier).
4612 DCHECK(!kUseBakerReadBarrier);
4613 // If heap poisoning is enabled, unpoisoning will be taken care of
4614 // by the runtime within the slow path.
4615 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
4616 } else if (kPoisonHeapReferences) {
4617 __ UnpoisonHeapReference(out.AsRegister<GpuRegister>());
4618 }
4619}
4620
4621void CodeGeneratorMIPS64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
4622 Location out,
4623 Location root) {
4624 DCHECK(kEmitCompilerReadBarrier);
4625
4626 // Insert a slow path based read barrier *after* the GC root load.
4627 //
4628 // Note that GC roots are not affected by heap poisoning, so we do
4629 // not need to do anything special for this here.
4630 SlowPathCodeMIPS64* slow_path =
4631 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS64(instruction, out, root);
4632 AddSlowPath(slow_path);
4633
4634 __ Bc(slow_path->GetEntryLabel());
4635 __ Bind(slow_path->GetExitLabel());
4636}
4637
Alexey Frunze4dda3372015-06-01 18:31:49 -07004638void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004639 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
4640 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07004641 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004642 switch (type_check_kind) {
4643 case TypeCheckKind::kExactCheck:
4644 case TypeCheckKind::kAbstractClassCheck:
4645 case TypeCheckKind::kClassHierarchyCheck:
4646 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08004647 call_kind =
4648 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07004649 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004650 break;
4651 case TypeCheckKind::kArrayCheck:
4652 case TypeCheckKind::kUnresolvedCheck:
4653 case TypeCheckKind::kInterfaceCheck:
4654 call_kind = LocationSummary::kCallOnSlowPath;
4655 break;
4656 }
4657
Alexey Frunze4dda3372015-06-01 18:31:49 -07004658 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004659 if (baker_read_barrier_slow_path) {
4660 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4661 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004662 locations->SetInAt(0, Location::RequiresRegister());
4663 locations->SetInAt(1, Location::RequiresRegister());
4664 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01004665 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07004666 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08004667 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004668}
4669
4670void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004671 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004672 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004673 Location obj_loc = locations->InAt(0);
4674 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004675 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08004676 Location out_loc = locations->Out();
4677 GpuRegister out = out_loc.AsRegister<GpuRegister>();
4678 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
4679 DCHECK_LE(num_temps, 1u);
4680 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004681 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4682 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4683 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
4684 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004685 Mips64Label done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004686 SlowPathCodeMIPS64* slow_path = nullptr;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004687
4688 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004689 // Avoid this check if we know `obj` is not null.
4690 if (instruction->MustDoNullCheck()) {
4691 __ Move(out, ZERO);
4692 __ Beqzc(obj, &done);
4693 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004694
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004695 switch (type_check_kind) {
4696 case TypeCheckKind::kExactCheck: {
4697 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08004698 GenerateReferenceLoadTwoRegisters(instruction,
4699 out_loc,
4700 obj_loc,
4701 class_offset,
4702 maybe_temp_loc,
4703 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004704 // Classes must be equal for the instanceof to succeed.
4705 __ Xor(out, out, cls);
4706 __ Sltiu(out, out, 1);
4707 break;
4708 }
4709
4710 case TypeCheckKind::kAbstractClassCheck: {
4711 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08004712 GenerateReferenceLoadTwoRegisters(instruction,
4713 out_loc,
4714 obj_loc,
4715 class_offset,
4716 maybe_temp_loc,
4717 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004718 // If the class is abstract, we eagerly fetch the super class of the
4719 // object to avoid doing a comparison we know will fail.
4720 Mips64Label loop;
4721 __ Bind(&loop);
4722 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08004723 GenerateReferenceLoadOneRegister(instruction,
4724 out_loc,
4725 super_offset,
4726 maybe_temp_loc,
4727 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004728 // If `out` is null, we use it for the result, and jump to `done`.
4729 __ Beqzc(out, &done);
4730 __ Bnec(out, cls, &loop);
4731 __ LoadConst32(out, 1);
4732 break;
4733 }
4734
4735 case TypeCheckKind::kClassHierarchyCheck: {
4736 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08004737 GenerateReferenceLoadTwoRegisters(instruction,
4738 out_loc,
4739 obj_loc,
4740 class_offset,
4741 maybe_temp_loc,
4742 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004743 // Walk over the class hierarchy to find a match.
4744 Mips64Label loop, success;
4745 __ Bind(&loop);
4746 __ Beqc(out, cls, &success);
4747 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08004748 GenerateReferenceLoadOneRegister(instruction,
4749 out_loc,
4750 super_offset,
4751 maybe_temp_loc,
4752 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004753 __ Bnezc(out, &loop);
4754 // If `out` is null, we use it for the result, and jump to `done`.
4755 __ Bc(&done);
4756 __ Bind(&success);
4757 __ LoadConst32(out, 1);
4758 break;
4759 }
4760
4761 case TypeCheckKind::kArrayObjectCheck: {
4762 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08004763 GenerateReferenceLoadTwoRegisters(instruction,
4764 out_loc,
4765 obj_loc,
4766 class_offset,
4767 maybe_temp_loc,
4768 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004769 // Do an exact check.
4770 Mips64Label success;
4771 __ Beqc(out, cls, &success);
4772 // Otherwise, we need to check that the object's class is a non-primitive array.
4773 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08004774 GenerateReferenceLoadOneRegister(instruction,
4775 out_loc,
4776 component_offset,
4777 maybe_temp_loc,
4778 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004779 // If `out` is null, we use it for the result, and jump to `done`.
4780 __ Beqzc(out, &done);
4781 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
4782 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
4783 __ Sltiu(out, out, 1);
4784 __ Bc(&done);
4785 __ Bind(&success);
4786 __ LoadConst32(out, 1);
4787 break;
4788 }
4789
4790 case TypeCheckKind::kArrayCheck: {
4791 // No read barrier since the slow path will retry upon failure.
4792 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08004793 GenerateReferenceLoadTwoRegisters(instruction,
4794 out_loc,
4795 obj_loc,
4796 class_offset,
4797 maybe_temp_loc,
4798 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004799 DCHECK(locations->OnlyCallsOnSlowPath());
4800 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
4801 /* is_fatal */ false);
4802 codegen_->AddSlowPath(slow_path);
4803 __ Bnec(out, cls, slow_path->GetEntryLabel());
4804 __ LoadConst32(out, 1);
4805 break;
4806 }
4807
4808 case TypeCheckKind::kUnresolvedCheck:
4809 case TypeCheckKind::kInterfaceCheck: {
4810 // Note that we indeed only call on slow path, but we always go
4811 // into the slow path for the unresolved and interface check
4812 // cases.
4813 //
4814 // We cannot directly call the InstanceofNonTrivial runtime
4815 // entry point without resorting to a type checking slow path
4816 // here (i.e. by calling InvokeRuntime directly), as it would
4817 // require to assign fixed registers for the inputs of this
4818 // HInstanceOf instruction (following the runtime calling
4819 // convention), which might be cluttered by the potential first
4820 // read barrier emission at the beginning of this method.
4821 //
4822 // TODO: Introduce a new runtime entry point taking the object
4823 // to test (instead of its class) as argument, and let it deal
4824 // with the read barrier issues. This will let us refactor this
4825 // case of the `switch` code as it was previously (with a direct
4826 // call to the runtime not using a type checking slow path).
4827 // This should also be beneficial for the other cases above.
4828 DCHECK(locations->OnlyCallsOnSlowPath());
4829 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
4830 /* is_fatal */ false);
4831 codegen_->AddSlowPath(slow_path);
4832 __ Bc(slow_path->GetEntryLabel());
4833 break;
4834 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004835 }
4836
4837 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004838
4839 if (slow_path != nullptr) {
4840 __ Bind(slow_path->GetExitLabel());
4841 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004842}
4843
4844void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
4845 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4846 locations->SetOut(Location::ConstantLocation(constant));
4847}
4848
4849void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
4850 // Will be generated at use site.
4851}
4852
4853void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
4854 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4855 locations->SetOut(Location::ConstantLocation(constant));
4856}
4857
4858void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
4859 // Will be generated at use site.
4860}
4861
Calin Juravle175dc732015-08-25 15:42:32 +01004862void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4863 // The trampoline uses the same calling convention as dex calling conventions,
4864 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
4865 // the method_idx.
4866 HandleInvoke(invoke);
4867}
4868
4869void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4870 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
4871}
4872
Alexey Frunze4dda3372015-06-01 18:31:49 -07004873void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
4874 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
4875 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
4876}
4877
4878void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
4879 HandleInvoke(invoke);
4880 // The register T0 is required to be used for the hidden argument in
4881 // art_quick_imt_conflict_trampoline, so add the hidden argument.
4882 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
4883}
4884
4885void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
4886 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
4887 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004888 Location receiver = invoke->GetLocations()->InAt(0);
4889 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07004890 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004891
4892 // Set the hidden argument.
4893 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
4894 invoke->GetDexMethodIndex());
4895
4896 // temp = object->GetClass();
4897 if (receiver.IsStackSlot()) {
4898 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
4899 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
4900 } else {
4901 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
4902 }
4903 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08004904 // Instead of simply (possibly) unpoisoning `temp` here, we should
4905 // emit a read barrier for the previous class reference load.
4906 // However this is not required in practice, as this is an
4907 // intermediate/temporary reference and because the current
4908 // concurrent copying collector keeps the from-space memory
4909 // intact/accessible until the end of the marking phase (the
4910 // concurrent copying collector may not in the future).
4911 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004912 __ LoadFromOffset(kLoadDoubleword, temp, temp,
4913 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
4914 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004915 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004916 // temp = temp->GetImtEntryAt(method_offset);
4917 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
4918 // T9 = temp->GetEntryPoint();
4919 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
4920 // T9();
4921 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004922 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004923 DCHECK(!codegen_->IsLeafMethod());
4924 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4925}
4926
4927void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07004928 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
4929 if (intrinsic.TryDispatch(invoke)) {
4930 return;
4931 }
4932
Alexey Frunze4dda3372015-06-01 18:31:49 -07004933 HandleInvoke(invoke);
4934}
4935
4936void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004937 // Explicit clinit checks triggered by static invokes must have been pruned by
4938 // art::PrepareForRegisterAllocation.
4939 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004940
Chris Larsen3039e382015-08-26 07:54:08 -07004941 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
4942 if (intrinsic.TryDispatch(invoke)) {
4943 return;
4944 }
4945
Alexey Frunze4dda3372015-06-01 18:31:49 -07004946 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004947}
4948
Orion Hodsonac141392017-01-13 11:53:47 +00004949void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4950 HandleInvoke(invoke);
4951}
4952
4953void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4954 codegen_->GenerateInvokePolymorphicCall(invoke);
4955}
4956
Chris Larsen3039e382015-08-26 07:54:08 -07004957static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004958 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07004959 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
4960 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004961 return true;
4962 }
4963 return false;
4964}
4965
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004966HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08004967 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08004968 bool fallback_load = false;
4969 switch (desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08004970 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Alexey Frunzef63f5692016-12-13 17:43:11 -08004971 case HLoadString::LoadKind::kBssEntry:
4972 DCHECK(!Runtime::Current()->UseJitCompilation());
4973 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08004974 case HLoadString::LoadKind::kJitTableAddress:
4975 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08004976 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01004977 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004978 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko764d4542017-05-16 10:31:41 +01004979 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08004980 }
4981 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004982 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08004983 }
4984 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004985}
4986
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004987HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
4988 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08004989 bool fallback_load = false;
4990 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004991 case HLoadClass::LoadKind::kInvalid:
4992 LOG(FATAL) << "UNREACHABLE";
4993 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08004994 case HLoadClass::LoadKind::kReferrersClass:
4995 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08004996 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004997 case HLoadClass::LoadKind::kBssEntry:
4998 DCHECK(!Runtime::Current()->UseJitCompilation());
4999 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005000 case HLoadClass::LoadKind::kJitTableAddress:
5001 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005002 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005003 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005004 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005005 break;
5006 }
5007 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005008 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005009 }
5010 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005011}
5012
Vladimir Markodc151b22015-10-15 18:02:30 +01005013HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
5014 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01005015 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08005016 // On MIPS64 we support all dispatch types.
5017 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01005018}
5019
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005020void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(
5021 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005022 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00005023 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08005024 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
5025 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
5026
Alexey Frunze19f6c692016-11-30 19:19:55 -08005027 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005028 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00005029 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005030 uint32_t offset =
5031 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00005032 __ LoadFromOffset(kLoadDoubleword,
5033 temp.AsRegister<GpuRegister>(),
5034 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005035 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00005036 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005037 }
Vladimir Marko58155012015-08-19 12:49:41 +00005038 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00005039 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00005040 break;
Vladimir Marko65979462017-05-19 17:25:12 +01005041 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
5042 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005043 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko65979462017-05-19 17:25:12 +01005044 NewPcRelativeMethodPatch(invoke->GetTargetMethod());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005045 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
5046 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
5047 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Vladimir Marko65979462017-05-19 17:25:12 +01005048 __ Daddiu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5049 break;
5050 }
Vladimir Marko58155012015-08-19 12:49:41 +00005051 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005052 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
5053 kLoadDoubleword,
5054 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00005055 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005056 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005057 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005058 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005059 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
5060 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
5061 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunze19f6c692016-11-30 19:19:55 -08005062 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5063 break;
5064 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005065 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
5066 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
5067 return; // No code pointer retrieval; the runtime performs the call directly.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005068 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005069 }
5070
Alexey Frunze19f6c692016-11-30 19:19:55 -08005071 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00005072 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005073 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00005074 break;
Vladimir Marko58155012015-08-19 12:49:41 +00005075 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
5076 // T9 = callee_method->entry_point_from_quick_compiled_code_;
5077 __ LoadFromOffset(kLoadDoubleword,
5078 T9,
5079 callee_method.AsRegister<GpuRegister>(),
5080 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07005081 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00005082 // T9()
5083 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005084 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00005085 break;
5086 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005087 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
5088
Alexey Frunze4dda3372015-06-01 18:31:49 -07005089 DCHECK(!IsLeafMethod());
5090}
5091
5092void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005093 // Explicit clinit checks triggered by static invokes must have been pruned by
5094 // art::PrepareForRegisterAllocation.
5095 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005096
5097 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5098 return;
5099 }
5100
5101 LocationSummary* locations = invoke->GetLocations();
5102 codegen_->GenerateStaticOrDirectCall(invoke,
5103 locations->HasTemps()
5104 ? locations->GetTemp(0)
5105 : Location::NoLocation());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005106}
5107
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005108void CodeGeneratorMIPS64::GenerateVirtualCall(
5109 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005110 // Use the calling convention instead of the location of the receiver, as
5111 // intrinsics may have put the receiver in a different register. In the intrinsics
5112 // slow path, the arguments have been moved to the right place, so here we are
5113 // guaranteed that the receiver is the first register of the calling convention.
5114 InvokeDexCallingConvention calling_convention;
5115 GpuRegister receiver = calling_convention.GetRegisterAt(0);
5116
Alexey Frunze53afca12015-11-05 16:34:23 -08005117 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005118 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5119 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
5120 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005121 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005122
5123 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005124 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08005125 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005126 // Instead of simply (possibly) unpoisoning `temp` here, we should
5127 // emit a read barrier for the previous class reference load.
5128 // However this is not required in practice, as this is an
5129 // intermediate/temporary reference and because the current
5130 // concurrent copying collector keeps the from-space memory
5131 // intact/accessible until the end of the marking phase (the
5132 // concurrent copying collector may not in the future).
5133 __ MaybeUnpoisonHeapReference(temp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005134 // temp = temp->GetMethodAt(method_offset);
5135 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5136 // T9 = temp->GetEntryPoint();
5137 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5138 // T9();
5139 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005140 __ Nop();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005141 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Alexey Frunze53afca12015-11-05 16:34:23 -08005142}
5143
5144void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
5145 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5146 return;
5147 }
5148
5149 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005150 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005151}
5152
5153void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005154 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005155 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005156 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005157 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
5158 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005159 return;
5160 }
Vladimir Marko41559982017-01-06 14:04:23 +00005161 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005162
Alexey Frunze15958152017-02-09 19:08:30 -08005163 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5164 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunzef63f5692016-12-13 17:43:11 -08005165 ? LocationSummary::kCallOnSlowPath
5166 : LocationSummary::kNoCall;
5167 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005168 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
5169 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5170 }
Vladimir Marko41559982017-01-06 14:04:23 +00005171 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005172 locations->SetInAt(0, Location::RequiresRegister());
5173 }
5174 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005175 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5176 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5177 // Rely on the type resolution or initialization and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005178 // Request a temp to hold the BSS entry location for the slow path.
5179 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005180 RegisterSet caller_saves = RegisterSet::Empty();
5181 InvokeRuntimeCallingConvention calling_convention;
5182 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5183 locations->SetCustomSlowPathCallerSaves(caller_saves);
5184 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005185 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07005186 }
5187 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005188}
5189
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005190// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5191// move.
5192void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005193 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005194 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005195 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01005196 return;
5197 }
Vladimir Marko41559982017-01-06 14:04:23 +00005198 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005199
Vladimir Marko41559982017-01-06 14:04:23 +00005200 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005201 Location out_loc = locations->Out();
5202 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5203 GpuRegister current_method_reg = ZERO;
5204 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005205 load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005206 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
5207 }
5208
Alexey Frunze15958152017-02-09 19:08:30 -08005209 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5210 ? kWithoutReadBarrier
5211 : kCompilerReadBarrierOption;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005212 bool generate_null_check = false;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005213 CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high = nullptr;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005214 switch (load_kind) {
5215 case HLoadClass::LoadKind::kReferrersClass:
5216 DCHECK(!cls->CanCallRuntime());
5217 DCHECK(!cls->MustGenerateClinitCheck());
5218 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5219 GenerateGcRootFieldLoad(cls,
5220 out_loc,
5221 current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08005222 ArtMethod::DeclaringClassOffset().Int32Value(),
5223 read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005224 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005225 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005226 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08005227 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005228 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Alexey Frunzef63f5692016-12-13 17:43:11 -08005229 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005230 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
5231 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
5232 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005233 __ Daddiu(out, AT, /* placeholder */ 0x5678);
5234 break;
5235 }
5236 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08005237 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005238 uint32_t address = dchecked_integral_cast<uint32_t>(
5239 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
5240 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005241 __ LoadLiteral(out,
5242 kLoadUnsignedWord,
5243 codegen_->DeduplicateBootImageAddressLiteral(address));
5244 break;
5245 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005246 case HLoadClass::LoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005247 bss_info_high = codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
5248 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
5249 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
5250 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
5251 GpuRegister temp = non_baker_read_barrier
5252 ? out
5253 : locations->GetTemp(0).AsRegister<GpuRegister>();
5254 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high, temp, info_low);
5255 GenerateGcRootFieldLoad(cls, out_loc, temp, /* placeholder */ 0x5678, read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005256 generate_null_check = true;
5257 break;
5258 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08005259 case HLoadClass::LoadKind::kJitTableAddress:
5260 __ LoadLiteral(out,
5261 kLoadUnsignedWord,
5262 codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
5263 cls->GetTypeIndex(),
5264 cls->GetClass()));
Alexey Frunze15958152017-02-09 19:08:30 -08005265 GenerateGcRootFieldLoad(cls, out_loc, out, 0, read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005266 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005267 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005268 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00005269 LOG(FATAL) << "UNREACHABLE";
5270 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005271 }
5272
5273 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5274 DCHECK(cls->CanCallRuntime());
5275 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005276 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck(), bss_info_high);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005277 codegen_->AddSlowPath(slow_path);
5278 if (generate_null_check) {
5279 __ Beqzc(out, slow_path->GetEntryLabel());
5280 }
5281 if (cls->MustGenerateClinitCheck()) {
5282 GenerateClassInitializationCheck(slow_path, out);
5283 } else {
5284 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005285 }
5286 }
5287}
5288
David Brazdilcb1c0552015-08-04 16:22:25 +01005289static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005290 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01005291}
5292
Alexey Frunze4dda3372015-06-01 18:31:49 -07005293void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
5294 LocationSummary* locations =
5295 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5296 locations->SetOut(Location::RequiresRegister());
5297}
5298
5299void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
5300 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01005301 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
5302}
5303
5304void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
5305 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5306}
5307
5308void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5309 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005310}
5311
Alexey Frunze4dda3372015-06-01 18:31:49 -07005312void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005313 HLoadString::LoadKind load_kind = load->GetLoadKind();
5314 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005315 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005316 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005317 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005318 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzef63f5692016-12-13 17:43:11 -08005319 } else {
5320 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005321 if (load_kind == HLoadString::LoadKind::kBssEntry) {
5322 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5323 // Rely on the pResolveString and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005324 // Request a temp to hold the BSS entry location for the slow path.
5325 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005326 RegisterSet caller_saves = RegisterSet::Empty();
5327 InvokeRuntimeCallingConvention calling_convention;
5328 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5329 locations->SetCustomSlowPathCallerSaves(caller_saves);
5330 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005331 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07005332 }
5333 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005334 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005335}
5336
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005337// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5338// move.
5339void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005340 HLoadString::LoadKind load_kind = load->GetLoadKind();
5341 LocationSummary* locations = load->GetLocations();
5342 Location out_loc = locations->Out();
5343 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5344
5345 switch (load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005346 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
5347 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005348 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005349 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005350 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
5351 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
5352 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005353 __ Daddiu(out, AT, /* placeholder */ 0x5678);
5354 return; // No dex cache slow path.
5355 }
5356 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005357 uint32_t address = dchecked_integral_cast<uint32_t>(
5358 reinterpret_cast<uintptr_t>(load->GetString().Get()));
5359 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005360 __ LoadLiteral(out,
5361 kLoadUnsignedWord,
5362 codegen_->DeduplicateBootImageAddressLiteral(address));
5363 return; // No dex cache slow path.
5364 }
5365 case HLoadString::LoadKind::kBssEntry: {
5366 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005367 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005368 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005369 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
5370 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
5371 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
5372 GpuRegister temp = non_baker_read_barrier
5373 ? out
5374 : locations->GetTemp(0).AsRegister<GpuRegister>();
5375 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, temp, info_low);
Alexey Frunze15958152017-02-09 19:08:30 -08005376 GenerateGcRootFieldLoad(load,
5377 out_loc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005378 temp,
Alexey Frunze15958152017-02-09 19:08:30 -08005379 /* placeholder */ 0x5678,
5380 kCompilerReadBarrierOption);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005381 SlowPathCodeMIPS64* slow_path =
5382 new (GetGraph()->GetArena()) LoadStringSlowPathMIPS64(load, info_high);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005383 codegen_->AddSlowPath(slow_path);
5384 __ Beqzc(out, slow_path->GetEntryLabel());
5385 __ Bind(slow_path->GetExitLabel());
5386 return;
5387 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08005388 case HLoadString::LoadKind::kJitTableAddress:
5389 __ LoadLiteral(out,
5390 kLoadUnsignedWord,
5391 codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
5392 load->GetStringIndex(),
5393 load->GetString()));
Alexey Frunze15958152017-02-09 19:08:30 -08005394 GenerateGcRootFieldLoad(load, out_loc, out, 0, kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08005395 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005396 default:
5397 break;
5398 }
5399
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005400 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005401 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005402 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005403 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005404 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
5405 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
5406 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005407}
5408
Alexey Frunze4dda3372015-06-01 18:31:49 -07005409void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
5410 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
5411 locations->SetOut(Location::ConstantLocation(constant));
5412}
5413
5414void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
5415 // Will be generated at use site.
5416}
5417
5418void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
5419 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005420 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005421 InvokeRuntimeCallingConvention calling_convention;
5422 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5423}
5424
5425void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01005426 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07005427 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01005428 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005429 if (instruction->IsEnter()) {
5430 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5431 } else {
5432 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5433 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005434}
5435
5436void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
5437 LocationSummary* locations =
5438 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
5439 switch (mul->GetResultType()) {
5440 case Primitive::kPrimInt:
5441 case Primitive::kPrimLong:
5442 locations->SetInAt(0, Location::RequiresRegister());
5443 locations->SetInAt(1, Location::RequiresRegister());
5444 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5445 break;
5446
5447 case Primitive::kPrimFloat:
5448 case Primitive::kPrimDouble:
5449 locations->SetInAt(0, Location::RequiresFpuRegister());
5450 locations->SetInAt(1, Location::RequiresFpuRegister());
5451 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5452 break;
5453
5454 default:
5455 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5456 }
5457}
5458
5459void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
5460 Primitive::Type type = instruction->GetType();
5461 LocationSummary* locations = instruction->GetLocations();
5462
5463 switch (type) {
5464 case Primitive::kPrimInt:
5465 case Primitive::kPrimLong: {
5466 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
5467 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
5468 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
5469 if (type == Primitive::kPrimInt)
5470 __ MulR6(dst, lhs, rhs);
5471 else
5472 __ Dmul(dst, lhs, rhs);
5473 break;
5474 }
5475 case Primitive::kPrimFloat:
5476 case Primitive::kPrimDouble: {
5477 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
5478 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
5479 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
5480 if (type == Primitive::kPrimFloat)
5481 __ MulS(dst, lhs, rhs);
5482 else
5483 __ MulD(dst, lhs, rhs);
5484 break;
5485 }
5486 default:
5487 LOG(FATAL) << "Unexpected mul type " << type;
5488 }
5489}
5490
5491void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
5492 LocationSummary* locations =
5493 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
5494 switch (neg->GetResultType()) {
5495 case Primitive::kPrimInt:
5496 case Primitive::kPrimLong:
5497 locations->SetInAt(0, Location::RequiresRegister());
5498 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5499 break;
5500
5501 case Primitive::kPrimFloat:
5502 case Primitive::kPrimDouble:
5503 locations->SetInAt(0, Location::RequiresFpuRegister());
5504 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5505 break;
5506
5507 default:
5508 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5509 }
5510}
5511
5512void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
5513 Primitive::Type type = instruction->GetType();
5514 LocationSummary* locations = instruction->GetLocations();
5515
5516 switch (type) {
5517 case Primitive::kPrimInt:
5518 case Primitive::kPrimLong: {
5519 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
5520 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
5521 if (type == Primitive::kPrimInt)
5522 __ Subu(dst, ZERO, src);
5523 else
5524 __ Dsubu(dst, ZERO, src);
5525 break;
5526 }
5527 case Primitive::kPrimFloat:
5528 case Primitive::kPrimDouble: {
5529 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
5530 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
5531 if (type == Primitive::kPrimFloat)
5532 __ NegS(dst, src);
5533 else
5534 __ NegD(dst, src);
5535 break;
5536 }
5537 default:
5538 LOG(FATAL) << "Unexpected neg type " << type;
5539 }
5540}
5541
5542void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
5543 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005544 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005545 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005546 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005547 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5548 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005549}
5550
5551void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08005552 // Note: if heap poisoning is enabled, the entry point takes care
5553 // of poisoning the reference.
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005554 codegen_->InvokeRuntime(kQuickAllocArrayResolved, instruction, instruction->GetDexPc());
5555 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005556}
5557
5558void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
5559 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005560 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005561 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00005562 if (instruction->IsStringAlloc()) {
5563 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
5564 } else {
5565 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00005566 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005567 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
5568}
5569
5570void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08005571 // Note: if heap poisoning is enabled, the entry point takes care
5572 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00005573 if (instruction->IsStringAlloc()) {
5574 // String is allocated through StringFactory. Call NewEmptyString entry point.
5575 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02005576 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07005577 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00005578 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
5579 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
5580 __ Jalr(T9);
5581 __ Nop();
5582 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
5583 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01005584 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00005585 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00005586 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005587}
5588
5589void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
5590 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5591 locations->SetInAt(0, Location::RequiresRegister());
5592 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5593}
5594
5595void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
5596 Primitive::Type type = instruction->GetType();
5597 LocationSummary* locations = instruction->GetLocations();
5598
5599 switch (type) {
5600 case Primitive::kPrimInt:
5601 case Primitive::kPrimLong: {
5602 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
5603 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
5604 __ Nor(dst, src, ZERO);
5605 break;
5606 }
5607
5608 default:
5609 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5610 }
5611}
5612
5613void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
5614 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5615 locations->SetInAt(0, Location::RequiresRegister());
5616 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5617}
5618
5619void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
5620 LocationSummary* locations = instruction->GetLocations();
5621 __ Xori(locations->Out().AsRegister<GpuRegister>(),
5622 locations->InAt(0).AsRegister<GpuRegister>(),
5623 1);
5624}
5625
5626void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005627 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5628 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005629}
5630
Calin Juravle2ae48182016-03-16 14:05:09 +00005631void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5632 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005633 return;
5634 }
5635 Location obj = instruction->GetLocations()->InAt(0);
5636
5637 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00005638 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005639}
5640
Calin Juravle2ae48182016-03-16 14:05:09 +00005641void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005642 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005643 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005644
5645 Location obj = instruction->GetLocations()->InAt(0);
5646
5647 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
5648}
5649
5650void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005651 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005652}
5653
5654void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
5655 HandleBinaryOp(instruction);
5656}
5657
5658void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
5659 HandleBinaryOp(instruction);
5660}
5661
5662void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5663 LOG(FATAL) << "Unreachable";
5664}
5665
5666void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
5667 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5668}
5669
5670void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
5671 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5672 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5673 if (location.IsStackSlot()) {
5674 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5675 } else if (location.IsDoubleStackSlot()) {
5676 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5677 }
5678 locations->SetOut(location);
5679}
5680
5681void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
5682 ATTRIBUTE_UNUSED) {
5683 // Nothing to do, the parameter is already at its location.
5684}
5685
5686void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
5687 LocationSummary* locations =
5688 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5689 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
5690}
5691
5692void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
5693 ATTRIBUTE_UNUSED) {
5694 // Nothing to do, the method is already at its location.
5695}
5696
5697void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
5698 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005699 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005700 locations->SetInAt(i, Location::Any());
5701 }
5702 locations->SetOut(Location::Any());
5703}
5704
5705void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
5706 LOG(FATAL) << "Unreachable";
5707}
5708
5709void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
5710 Primitive::Type type = rem->GetResultType();
5711 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005712 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
5713 : LocationSummary::kNoCall;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005714 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
5715
5716 switch (type) {
5717 case Primitive::kPrimInt:
5718 case Primitive::kPrimLong:
5719 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07005720 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005721 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5722 break;
5723
5724 case Primitive::kPrimFloat:
5725 case Primitive::kPrimDouble: {
5726 InvokeRuntimeCallingConvention calling_convention;
5727 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
5728 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
5729 locations->SetOut(calling_convention.GetReturnLocation(type));
5730 break;
5731 }
5732
5733 default:
5734 LOG(FATAL) << "Unexpected rem type " << type;
5735 }
5736}
5737
5738void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
5739 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005740
5741 switch (type) {
5742 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07005743 case Primitive::kPrimLong:
5744 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005745 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005746
5747 case Primitive::kPrimFloat:
5748 case Primitive::kPrimDouble: {
Serban Constantinescufc734082016-07-19 17:18:07 +01005749 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
5750 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005751 if (type == Primitive::kPrimFloat) {
5752 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5753 } else {
5754 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5755 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005756 break;
5757 }
5758 default:
5759 LOG(FATAL) << "Unexpected rem type " << type;
5760 }
5761}
5762
Igor Murashkind01745e2017-04-05 16:40:31 -07005763void LocationsBuilderMIPS64::VisitConstructorFence(HConstructorFence* constructor_fence) {
5764 constructor_fence->SetLocations(nullptr);
5765}
5766
5767void InstructionCodeGeneratorMIPS64::VisitConstructorFence(
5768 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
5769 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
5770}
5771
Alexey Frunze4dda3372015-06-01 18:31:49 -07005772void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5773 memory_barrier->SetLocations(nullptr);
5774}
5775
5776void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5777 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
5778}
5779
5780void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
5781 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
5782 Primitive::Type return_type = ret->InputAt(0)->GetType();
5783 locations->SetInAt(0, Mips64ReturnLocation(return_type));
5784}
5785
5786void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
5787 codegen_->GenerateFrameExit();
5788}
5789
5790void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
5791 ret->SetLocations(nullptr);
5792}
5793
5794void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
5795 codegen_->GenerateFrameExit();
5796}
5797
Alexey Frunze92d90602015-12-18 18:16:36 -08005798void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
5799 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005800}
5801
Alexey Frunze92d90602015-12-18 18:16:36 -08005802void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
5803 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005804}
5805
Alexey Frunze4dda3372015-06-01 18:31:49 -07005806void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
5807 HandleShift(shl);
5808}
5809
5810void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
5811 HandleShift(shl);
5812}
5813
5814void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
5815 HandleShift(shr);
5816}
5817
5818void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
5819 HandleShift(shr);
5820}
5821
Alexey Frunze4dda3372015-06-01 18:31:49 -07005822void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
5823 HandleBinaryOp(instruction);
5824}
5825
5826void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
5827 HandleBinaryOp(instruction);
5828}
5829
5830void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5831 HandleFieldGet(instruction, instruction->GetFieldInfo());
5832}
5833
5834void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5835 HandleFieldGet(instruction, instruction->GetFieldInfo());
5836}
5837
5838void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5839 HandleFieldSet(instruction, instruction->GetFieldInfo());
5840}
5841
5842void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01005843 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005844}
5845
Calin Juravlee460d1d2015-09-29 04:52:17 +01005846void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
5847 HUnresolvedInstanceFieldGet* instruction) {
5848 FieldAccessCallingConventionMIPS64 calling_convention;
5849 codegen_->CreateUnresolvedFieldLocationSummary(
5850 instruction, instruction->GetFieldType(), calling_convention);
5851}
5852
5853void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
5854 HUnresolvedInstanceFieldGet* instruction) {
5855 FieldAccessCallingConventionMIPS64 calling_convention;
5856 codegen_->GenerateUnresolvedFieldAccess(instruction,
5857 instruction->GetFieldType(),
5858 instruction->GetFieldIndex(),
5859 instruction->GetDexPc(),
5860 calling_convention);
5861}
5862
5863void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
5864 HUnresolvedInstanceFieldSet* instruction) {
5865 FieldAccessCallingConventionMIPS64 calling_convention;
5866 codegen_->CreateUnresolvedFieldLocationSummary(
5867 instruction, instruction->GetFieldType(), calling_convention);
5868}
5869
5870void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
5871 HUnresolvedInstanceFieldSet* instruction) {
5872 FieldAccessCallingConventionMIPS64 calling_convention;
5873 codegen_->GenerateUnresolvedFieldAccess(instruction,
5874 instruction->GetFieldType(),
5875 instruction->GetFieldIndex(),
5876 instruction->GetDexPc(),
5877 calling_convention);
5878}
5879
5880void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
5881 HUnresolvedStaticFieldGet* instruction) {
5882 FieldAccessCallingConventionMIPS64 calling_convention;
5883 codegen_->CreateUnresolvedFieldLocationSummary(
5884 instruction, instruction->GetFieldType(), calling_convention);
5885}
5886
5887void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
5888 HUnresolvedStaticFieldGet* instruction) {
5889 FieldAccessCallingConventionMIPS64 calling_convention;
5890 codegen_->GenerateUnresolvedFieldAccess(instruction,
5891 instruction->GetFieldType(),
5892 instruction->GetFieldIndex(),
5893 instruction->GetDexPc(),
5894 calling_convention);
5895}
5896
5897void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
5898 HUnresolvedStaticFieldSet* instruction) {
5899 FieldAccessCallingConventionMIPS64 calling_convention;
5900 codegen_->CreateUnresolvedFieldLocationSummary(
5901 instruction, instruction->GetFieldType(), calling_convention);
5902}
5903
5904void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
5905 HUnresolvedStaticFieldSet* instruction) {
5906 FieldAccessCallingConventionMIPS64 calling_convention;
5907 codegen_->GenerateUnresolvedFieldAccess(instruction,
5908 instruction->GetFieldType(),
5909 instruction->GetFieldIndex(),
5910 instruction->GetDexPc(),
5911 calling_convention);
5912}
5913
Alexey Frunze4dda3372015-06-01 18:31:49 -07005914void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005915 LocationSummary* locations =
5916 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02005917 // In suspend check slow path, usually there are no caller-save registers at all.
5918 // If SIMD instructions are present, however, we force spilling all live SIMD
5919 // registers in full width (since the runtime only saves/restores lower part).
5920 locations->SetCustomSlowPathCallerSaves(
5921 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005922}
5923
5924void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
5925 HBasicBlock* block = instruction->GetBlock();
5926 if (block->GetLoopInformation() != nullptr) {
5927 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5928 // The back edge will generate the suspend check.
5929 return;
5930 }
5931 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5932 // The goto will generate the suspend check.
5933 return;
5934 }
5935 GenerateSuspendCheck(instruction, nullptr);
5936}
5937
Alexey Frunze4dda3372015-06-01 18:31:49 -07005938void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
5939 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005940 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005941 InvokeRuntimeCallingConvention calling_convention;
5942 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5943}
5944
5945void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01005946 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005947 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
5948}
5949
5950void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
5951 Primitive::Type input_type = conversion->GetInputType();
5952 Primitive::Type result_type = conversion->GetResultType();
5953 DCHECK_NE(input_type, result_type);
5954
5955 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
5956 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
5957 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5958 }
5959
Alexey Frunzebaf60b72015-12-22 15:15:03 -08005960 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion);
5961
5962 if (Primitive::IsFloatingPointType(input_type)) {
5963 locations->SetInAt(0, Location::RequiresFpuRegister());
5964 } else {
5965 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005966 }
5967
Alexey Frunzebaf60b72015-12-22 15:15:03 -08005968 if (Primitive::IsFloatingPointType(result_type)) {
5969 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005970 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08005971 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005972 }
5973}
5974
5975void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
5976 LocationSummary* locations = conversion->GetLocations();
5977 Primitive::Type result_type = conversion->GetResultType();
5978 Primitive::Type input_type = conversion->GetInputType();
5979
5980 DCHECK_NE(input_type, result_type);
5981
5982 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
5983 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
5984 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
5985
5986 switch (result_type) {
5987 case Primitive::kPrimChar:
5988 __ Andi(dst, src, 0xFFFF);
5989 break;
5990 case Primitive::kPrimByte:
Vladimir Markob52bbde2016-02-12 12:06:05 +00005991 if (input_type == Primitive::kPrimLong) {
5992 // Type conversion from long to types narrower than int is a result of code
5993 // transformations. To avoid unpredictable results for SEB and SEH, we first
5994 // need to sign-extend the low 32-bit value into bits 32 through 63.
5995 __ Sll(dst, src, 0);
5996 __ Seb(dst, dst);
5997 } else {
5998 __ Seb(dst, src);
5999 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006000 break;
6001 case Primitive::kPrimShort:
Vladimir Markob52bbde2016-02-12 12:06:05 +00006002 if (input_type == Primitive::kPrimLong) {
6003 // Type conversion from long to types narrower than int is a result of code
6004 // transformations. To avoid unpredictable results for SEB and SEH, we first
6005 // need to sign-extend the low 32-bit value into bits 32 through 63.
6006 __ Sll(dst, src, 0);
6007 __ Seh(dst, dst);
6008 } else {
6009 __ Seh(dst, src);
6010 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006011 break;
6012 case Primitive::kPrimInt:
6013 case Primitive::kPrimLong:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01006014 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
6015 // conversions, except when the input and output registers are the same and we are not
6016 // converting longs to shorter types. In these cases, do nothing.
6017 if ((input_type == Primitive::kPrimLong) || (dst != src)) {
6018 __ Sll(dst, src, 0);
6019 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006020 break;
6021
6022 default:
6023 LOG(FATAL) << "Unexpected type conversion from " << input_type
6024 << " to " << result_type;
6025 }
6026 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006027 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6028 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6029 if (input_type == Primitive::kPrimLong) {
6030 __ Dmtc1(src, FTMP);
6031 if (result_type == Primitive::kPrimFloat) {
6032 __ Cvtsl(dst, FTMP);
6033 } else {
6034 __ Cvtdl(dst, FTMP);
6035 }
6036 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006037 __ Mtc1(src, FTMP);
6038 if (result_type == Primitive::kPrimFloat) {
6039 __ Cvtsw(dst, FTMP);
6040 } else {
6041 __ Cvtdw(dst, FTMP);
6042 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006043 }
6044 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
6045 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006046 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6047 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006048
6049 if (result_type == Primitive::kPrimLong) {
Roland Levillain888d0672015-11-23 18:53:50 +00006050 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006051 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006052 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006053 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006054 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006055 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006056 } else {
6057 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006058 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006059 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006060 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006061 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006062 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006063 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006064 } else if (Primitive::IsFloatingPointType(result_type) &&
6065 Primitive::IsFloatingPointType(input_type)) {
6066 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6067 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
6068 if (result_type == Primitive::kPrimFloat) {
6069 __ Cvtsd(dst, src);
6070 } else {
6071 __ Cvtds(dst, src);
6072 }
6073 } else {
6074 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
6075 << " to " << result_type;
6076 }
6077}
6078
6079void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
6080 HandleShift(ushr);
6081}
6082
6083void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
6084 HandleShift(ushr);
6085}
6086
6087void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
6088 HandleBinaryOp(instruction);
6089}
6090
6091void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
6092 HandleBinaryOp(instruction);
6093}
6094
6095void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6096 // Nothing to do, this should be removed during prepare for register allocator.
6097 LOG(FATAL) << "Unreachable";
6098}
6099
6100void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6101 // Nothing to do, this should be removed during prepare for register allocator.
6102 LOG(FATAL) << "Unreachable";
6103}
6104
6105void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006106 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006107}
6108
6109void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006110 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006111}
6112
6113void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006114 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006115}
6116
6117void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006118 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006119}
6120
6121void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006122 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006123}
6124
6125void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006126 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006127}
6128
6129void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006130 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006131}
6132
6133void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006134 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006135}
6136
6137void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006138 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006139}
6140
6141void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006142 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006143}
6144
6145void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006146 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006147}
6148
6149void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006150 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006151}
6152
Aart Bike9f37602015-10-09 11:15:55 -07006153void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006154 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006155}
6156
6157void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006158 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006159}
6160
6161void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006162 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006163}
6164
6165void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006166 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006167}
6168
6169void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006170 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006171}
6172
6173void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006174 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006175}
6176
6177void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006178 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006179}
6180
6181void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006182 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006183}
6184
Mark Mendellfe57faa2015-09-18 09:26:15 -04006185// Simple implementation of packed switch - generate cascaded compare/jumps.
6186void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6187 LocationSummary* locations =
6188 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6189 locations->SetInAt(0, Location::RequiresRegister());
6190}
6191
Alexey Frunze0960ac52016-12-20 17:24:59 -08006192void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
6193 int32_t lower_bound,
6194 uint32_t num_entries,
6195 HBasicBlock* switch_block,
6196 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006197 // Create a set of compare/jumps.
6198 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08006199 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006200 // Jump to default if index is negative
6201 // Note: We don't check the case that index is positive while value < lower_bound, because in
6202 // this case, index >= num_entries must be true. So that we can save one branch instruction.
6203 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
6204
Alexey Frunze0960ac52016-12-20 17:24:59 -08006205 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006206 // Jump to successors[0] if value == lower_bound.
6207 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
6208 int32_t last_index = 0;
6209 for (; num_entries - last_index > 2; last_index += 2) {
6210 __ Addiu(temp_reg, temp_reg, -2);
6211 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
6212 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
6213 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
6214 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
6215 }
6216 if (num_entries - last_index == 2) {
6217 // The last missing case_value.
6218 __ Addiu(temp_reg, temp_reg, -1);
6219 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006220 }
6221
6222 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08006223 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07006224 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006225 }
6226}
6227
Alexey Frunze0960ac52016-12-20 17:24:59 -08006228void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
6229 int32_t lower_bound,
6230 uint32_t num_entries,
6231 HBasicBlock* switch_block,
6232 HBasicBlock* default_block) {
6233 // Create a jump table.
6234 std::vector<Mips64Label*> labels(num_entries);
6235 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
6236 for (uint32_t i = 0; i < num_entries; i++) {
6237 labels[i] = codegen_->GetLabelOf(successors[i]);
6238 }
6239 JumpTable* table = __ CreateJumpTable(std::move(labels));
6240
6241 // Is the value in range?
6242 __ Addiu32(TMP, value_reg, -lower_bound);
6243 __ LoadConst32(AT, num_entries);
6244 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
6245
6246 // We are in the range of the table.
6247 // Load the target address from the jump table, indexing by the value.
6248 __ LoadLabelAddress(AT, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07006249 __ Dlsa(TMP, TMP, AT, 2);
Alexey Frunze0960ac52016-12-20 17:24:59 -08006250 __ Lw(TMP, TMP, 0);
6251 // Compute the absolute target address by adding the table start address
6252 // (the table contains offsets to targets relative to its start).
6253 __ Daddu(TMP, TMP, AT);
6254 // And jump.
6255 __ Jr(TMP);
6256 __ Nop();
6257}
6258
6259void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6260 int32_t lower_bound = switch_instr->GetStartValue();
6261 uint32_t num_entries = switch_instr->GetNumEntries();
6262 LocationSummary* locations = switch_instr->GetLocations();
6263 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
6264 HBasicBlock* switch_block = switch_instr->GetBlock();
6265 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6266
6267 if (num_entries > kPackedSwitchJumpTableThreshold) {
6268 GenTableBasedPackedSwitch(value_reg,
6269 lower_bound,
6270 num_entries,
6271 switch_block,
6272 default_block);
6273 } else {
6274 GenPackedSwitchWithCompares(value_reg,
6275 lower_bound,
6276 num_entries,
6277 switch_block,
6278 default_block);
6279 }
6280}
6281
Chris Larsenc9905a62017-03-13 17:06:18 -07006282void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
6283 LocationSummary* locations =
6284 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6285 locations->SetInAt(0, Location::RequiresRegister());
6286 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006287}
6288
Chris Larsenc9905a62017-03-13 17:06:18 -07006289void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
6290 LocationSummary* locations = instruction->GetLocations();
6291 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
6292 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
6293 instruction->GetIndex(), kMips64PointerSize).SizeValue();
6294 __ LoadFromOffset(kLoadDoubleword,
6295 locations->Out().AsRegister<GpuRegister>(),
6296 locations->InAt(0).AsRegister<GpuRegister>(),
6297 method_offset);
6298 } else {
6299 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
6300 instruction->GetIndex(), kMips64PointerSize));
6301 __ LoadFromOffset(kLoadDoubleword,
6302 locations->Out().AsRegister<GpuRegister>(),
6303 locations->InAt(0).AsRegister<GpuRegister>(),
6304 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
6305 __ LoadFromOffset(kLoadDoubleword,
6306 locations->Out().AsRegister<GpuRegister>(),
6307 locations->Out().AsRegister<GpuRegister>(),
6308 method_offset);
6309 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006310}
6311
Alexey Frunze4dda3372015-06-01 18:31:49 -07006312} // namespace mips64
6313} // namespace art