blob: 7051ccefdc79cb01d1c76ba053eaaa7146e27d4f [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips64/asm_support_mips64.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070020#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070022#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080023#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070028#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "intrinsics_mips64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010030#include "linker/linker_patch.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070031#include "mirror/array-inl.h"
32#include "mirror/class-inl.h"
33#include "offsets.h"
34#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070035#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070036#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070037#include "utils/stack_checks.h"
38
39namespace art {
40namespace mips64 {
41
42static constexpr int kCurrentMethodStackOffset = 0;
43static constexpr GpuRegister kMethodRegisterArgument = A0;
44
Alexey Frunze4147fcc2017-06-17 19:57:27 -070045// Flags controlling the use of thunks for Baker read barriers.
46constexpr bool kBakerReadBarrierThunksEnableForFields = true;
47constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
48constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
49
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010050Location Mips64ReturnLocation(DataType::Type return_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070051 switch (return_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010052 case DataType::Type::kBool:
53 case DataType::Type::kInt8:
54 case DataType::Type::kUint16:
55 case DataType::Type::kInt16:
56 case DataType::Type::kInt32:
57 case DataType::Type::kReference:
58 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070059 return Location::RegisterLocation(V0);
60
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010061 case DataType::Type::kFloat32:
62 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070063 return Location::FpuRegisterLocation(F0);
64
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010065 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -070066 return Location();
67 }
68 UNREACHABLE();
69}
70
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010071Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(DataType::Type type) const {
Alexey Frunze4dda3372015-06-01 18:31:49 -070072 return Mips64ReturnLocation(type);
73}
74
75Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
76 return Location::RegisterLocation(kMethodRegisterArgument);
77}
78
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010079Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070080 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010081 if (type == DataType::Type::kVoid) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070082 LOG(FATAL) << "Unexpected parameter type " << type;
83 }
84
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010085 if (DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070086 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
87 next_location = Location::FpuRegisterLocation(
88 calling_convention.GetFpuRegisterAt(float_index_++));
89 gp_index_++;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010090 } else if (!DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070091 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
92 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
93 float_index_++;
94 } else {
95 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010096 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
97 : Location::StackSlot(stack_offset);
Alexey Frunze4dda3372015-06-01 18:31:49 -070098 }
99
100 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100101 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700102
Alexey Frunze4dda3372015-06-01 18:31:49 -0700103 return next_location;
104}
105
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100106Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700107 return Mips64ReturnLocation(type);
108}
109
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100110// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
111#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700112#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700113
114class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
115 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000116 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700117
118 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100119 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700120 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
121 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000122 if (instruction_->CanThrowIntoCatchBlock()) {
123 // Live registers will be restored in the catch block if caught.
124 SaveLiveRegisters(codegen, instruction_->GetLocations());
125 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700126 // We're moving two locations to locations that could overlap, so we need a parallel
127 // move resolver.
128 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100129 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700130 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100131 DataType::Type::kInt32,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100132 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700133 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100134 DataType::Type::kInt32);
Serban Constantinescufc734082016-07-19 17:18:07 +0100135 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
136 ? kQuickThrowStringBounds
137 : kQuickThrowArrayBounds;
138 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100139 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700140 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
141 }
142
Alexandre Rames8158f282015-08-07 10:26:17 +0100143 bool IsFatal() const OVERRIDE { return true; }
144
Roland Levillain46648892015-06-19 16:07:18 +0100145 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
146
Alexey Frunze4dda3372015-06-01 18:31:49 -0700147 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700148 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
149};
150
151class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
152 public:
Alexey Frunzec61c0762017-04-10 13:54:23 -0700153 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction)
154 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700155
156 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
157 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
158 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100159 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700160 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
161 }
162
Alexandre Rames8158f282015-08-07 10:26:17 +0100163 bool IsFatal() const OVERRIDE { return true; }
164
Roland Levillain46648892015-06-19 16:07:18 +0100165 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
166
Alexey Frunze4dda3372015-06-01 18:31:49 -0700167 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700168 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
169};
170
171class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
172 public:
173 LoadClassSlowPathMIPS64(HLoadClass* cls,
174 HInstruction* at,
175 uint32_t dex_pc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700176 bool do_clinit,
177 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high = nullptr)
178 : SlowPathCodeMIPS64(at),
179 cls_(cls),
180 dex_pc_(dex_pc),
181 do_clinit_(do_clinit),
182 bss_info_high_(bss_info_high) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700183 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
184 }
185
186 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000187 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700188 Location out = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700189 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700190 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
191 InvokeRuntimeCallingConvention calling_convention;
192 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
193 const bool is_load_class_bss_entry =
194 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700195 __ Bind(GetEntryLabel());
196 SaveLiveRegisters(codegen, locations);
197
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700198 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
199 GpuRegister entry_address = kNoGpuRegister;
200 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
201 GpuRegister temp = locations->GetTemp(0).AsRegister<GpuRegister>();
202 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
203 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
204 // kSaveEverything call.
205 entry_address = temp_is_a0 ? out.AsRegister<GpuRegister>() : temp;
206 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
207 if (temp_is_a0) {
208 __ Move(entry_address, temp);
209 }
210 }
211
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000212 dex::TypeIndex type_index = cls_->GetTypeIndex();
213 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100214 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
215 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000216 mips64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700217 if (do_clinit_) {
218 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
219 } else {
220 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
221 }
222
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700223 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
224 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
225 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
226 DCHECK(bss_info_high_);
227 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
228 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, bss_info_high_);
229 __ Bind(&info_low->label);
230 __ StoreToOffset(kStoreWord,
231 calling_convention.GetRegisterAt(0),
232 entry_address,
233 /* placeholder */ 0x5678);
234 }
235
Alexey Frunze4dda3372015-06-01 18:31:49 -0700236 // Move the class to the desired location.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700237 if (out.IsValid()) {
238 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100239 DataType::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700240 mips64_codegen->MoveLocation(out,
241 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
242 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700243 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700244 RestoreLiveRegisters(codegen, locations);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700245
246 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
247 if (is_load_class_bss_entry && !baker_or_no_read_barriers) {
248 // For non-Baker read barriers we need to re-calculate the address of
249 // the class entry.
250 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000251 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700252 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
253 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, info_high);
254 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, info_low);
255 __ StoreToOffset(kStoreWord, out.AsRegister<GpuRegister>(), TMP, /* placeholder */ 0x5678);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000256 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700257 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700258 }
259
Roland Levillain46648892015-06-19 16:07:18 +0100260 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
261
Alexey Frunze4dda3372015-06-01 18:31:49 -0700262 private:
263 // The class this slow path will load.
264 HLoadClass* const cls_;
265
Alexey Frunze4dda3372015-06-01 18:31:49 -0700266 // The dex PC of `at_`.
267 const uint32_t dex_pc_;
268
269 // Whether to initialize the class.
270 const bool do_clinit_;
271
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700272 // Pointer to the high half PC-relative patch info for HLoadClass/kBssEntry.
273 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high_;
274
Alexey Frunze4dda3372015-06-01 18:31:49 -0700275 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
276};
277
278class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
279 public:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700280 explicit LoadStringSlowPathMIPS64(HLoadString* instruction,
281 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high)
282 : SlowPathCodeMIPS64(instruction), bss_info_high_(bss_info_high) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700283
284 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700285 DCHECK(instruction_->IsLoadString());
286 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700287 LocationSummary* locations = instruction_->GetLocations();
288 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700289 HLoadString* load = instruction_->AsLoadString();
290 const dex::StringIndex string_index = load->GetStringIndex();
291 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700292 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700293 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
294 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700295 __ Bind(GetEntryLabel());
296 SaveLiveRegisters(codegen, locations);
297
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700298 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
299 GpuRegister entry_address = kNoGpuRegister;
300 if (baker_or_no_read_barriers) {
301 GpuRegister temp = locations->GetTemp(0).AsRegister<GpuRegister>();
302 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
303 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
304 // kSaveEverything call.
305 entry_address = temp_is_a0 ? out : temp;
306 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
307 if (temp_is_a0) {
308 __ Move(entry_address, temp);
309 }
310 }
311
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000312 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100313 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700314 instruction_,
315 instruction_->GetDexPc(),
316 this);
317 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700318
319 // Store the resolved string to the BSS entry.
320 if (baker_or_no_read_barriers) {
321 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
322 DCHECK(bss_info_high_);
323 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100324 mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(),
325 string_index,
326 bss_info_high_);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700327 __ Bind(&info_low->label);
328 __ StoreToOffset(kStoreWord,
329 calling_convention.GetRegisterAt(0),
330 entry_address,
331 /* placeholder */ 0x5678);
332 }
333
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100334 DataType::Type type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700335 mips64_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700336 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700337 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700338 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800339
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700340 // Store the resolved string to the BSS entry.
341 if (!baker_or_no_read_barriers) {
342 // For non-Baker read barriers we need to re-calculate the address of
343 // the string entry.
344 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100345 mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700346 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100347 mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700348 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, info_low);
349 __ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
350 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700351 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700352 }
353
Roland Levillain46648892015-06-19 16:07:18 +0100354 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
355
Alexey Frunze4dda3372015-06-01 18:31:49 -0700356 private:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700357 // Pointer to the high half PC-relative patch info.
358 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high_;
359
Alexey Frunze4dda3372015-06-01 18:31:49 -0700360 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
361};
362
363class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
364 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000365 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700366
367 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
368 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
369 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000370 if (instruction_->CanThrowIntoCatchBlock()) {
371 // Live registers will be restored in the catch block if caught.
372 SaveLiveRegisters(codegen, instruction_->GetLocations());
373 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100374 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700375 instruction_,
376 instruction_->GetDexPc(),
377 this);
378 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
379 }
380
Alexandre Rames8158f282015-08-07 10:26:17 +0100381 bool IsFatal() const OVERRIDE { return true; }
382
Roland Levillain46648892015-06-19 16:07:18 +0100383 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
384
Alexey Frunze4dda3372015-06-01 18:31:49 -0700385 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700386 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
387};
388
389class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
390 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100391 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000392 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700393
394 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200395 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700396 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
397 __ Bind(GetEntryLabel());
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200398 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufc734082016-07-19 17:18:07 +0100399 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700400 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200401 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700402 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700403 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700404 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700405 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700406 }
407 }
408
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700409 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700410 DCHECK(successor_ == nullptr);
411 return &return_label_;
412 }
413
Roland Levillain46648892015-06-19 16:07:18 +0100414 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
415
Alexey Frunze4dda3372015-06-01 18:31:49 -0700416 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700417 // If not null, the block to branch to after the suspend check.
418 HBasicBlock* const successor_;
419
420 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700421 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700422
423 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
424};
425
426class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
427 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800428 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction, bool is_fatal)
429 : SlowPathCodeMIPS64(instruction), is_fatal_(is_fatal) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700430
431 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
432 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800433
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100434 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700435 DCHECK(instruction_->IsCheckCast()
436 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
437 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
438
439 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800440 if (!is_fatal_) {
441 SaveLiveRegisters(codegen, locations);
442 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700443
444 // We're moving two locations to locations that could overlap, so we need a parallel
445 // move resolver.
446 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800447 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700448 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100449 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800450 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700451 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100452 DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700453 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100454 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800455 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100456 DataType::Type ret_type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700457 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
458 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700459 } else {
460 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800461 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
462 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700463 }
464
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800465 if (!is_fatal_) {
466 RestoreLiveRegisters(codegen, locations);
467 __ Bc(GetExitLabel());
468 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700469 }
470
Roland Levillain46648892015-06-19 16:07:18 +0100471 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
472
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800473 bool IsFatal() const OVERRIDE { return is_fatal_; }
474
Alexey Frunze4dda3372015-06-01 18:31:49 -0700475 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800476 const bool is_fatal_;
477
Alexey Frunze4dda3372015-06-01 18:31:49 -0700478 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
479};
480
481class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
482 public:
Aart Bik42249c32016-01-07 15:33:50 -0800483 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000484 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700485
486 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800487 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700488 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100489 LocationSummary* locations = instruction_->GetLocations();
490 SaveLiveRegisters(codegen, locations);
491 InvokeRuntimeCallingConvention calling_convention;
492 __ LoadConst32(calling_convention.GetRegisterAt(0),
493 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufc734082016-07-19 17:18:07 +0100494 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100495 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700496 }
497
Roland Levillain46648892015-06-19 16:07:18 +0100498 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
499
Alexey Frunze4dda3372015-06-01 18:31:49 -0700500 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700501 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
502};
503
Alexey Frunze15958152017-02-09 19:08:30 -0800504class ArraySetSlowPathMIPS64 : public SlowPathCodeMIPS64 {
505 public:
506 explicit ArraySetSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
507
508 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
509 LocationSummary* locations = instruction_->GetLocations();
510 __ Bind(GetEntryLabel());
511 SaveLiveRegisters(codegen, locations);
512
513 InvokeRuntimeCallingConvention calling_convention;
514 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
515 parallel_move.AddMove(
516 locations->InAt(0),
517 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100518 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800519 nullptr);
520 parallel_move.AddMove(
521 locations->InAt(1),
522 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100523 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800524 nullptr);
525 parallel_move.AddMove(
526 locations->InAt(2),
527 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100528 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800529 nullptr);
530 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
531
532 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
533 mips64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
534 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
535 RestoreLiveRegisters(codegen, locations);
536 __ Bc(GetExitLabel());
537 }
538
539 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS64"; }
540
541 private:
542 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS64);
543};
544
545// Slow path marking an object reference `ref` during a read
546// barrier. The field `obj.field` in the object `obj` holding this
547// reference does not get updated by this slow path after marking (see
548// ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 below for that).
549//
550// This means that after the execution of this slow path, `ref` will
551// always be up-to-date, but `obj.field` may not; i.e., after the
552// flip, `ref` will be a to-space reference, but `obj.field` will
553// probably still be a from-space reference (unless it gets updated by
554// another thread, or if another thread installed another object
555// reference (different from `ref`) in `obj.field`).
556//
557// If `entrypoint` is a valid location it is assumed to already be
558// holding the entrypoint. The case where the entrypoint is passed in
559// is for the GcRoot read barrier.
560class ReadBarrierMarkSlowPathMIPS64 : public SlowPathCodeMIPS64 {
561 public:
562 ReadBarrierMarkSlowPathMIPS64(HInstruction* instruction,
563 Location ref,
564 Location entrypoint = Location::NoLocation())
565 : SlowPathCodeMIPS64(instruction), ref_(ref), entrypoint_(entrypoint) {
566 DCHECK(kEmitCompilerReadBarrier);
567 }
568
569 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
570
571 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
572 LocationSummary* locations = instruction_->GetLocations();
573 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
574 DCHECK(locations->CanCall());
575 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
576 DCHECK(instruction_->IsInstanceFieldGet() ||
577 instruction_->IsStaticFieldGet() ||
578 instruction_->IsArrayGet() ||
579 instruction_->IsArraySet() ||
580 instruction_->IsLoadClass() ||
581 instruction_->IsLoadString() ||
582 instruction_->IsInstanceOf() ||
583 instruction_->IsCheckCast() ||
584 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
585 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
586 << "Unexpected instruction in read barrier marking slow path: "
587 << instruction_->DebugName();
588
589 __ Bind(GetEntryLabel());
590 // No need to save live registers; it's taken care of by the
591 // entrypoint. Also, there is no need to update the stack mask,
592 // as this runtime call will not trigger a garbage collection.
593 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
594 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
595 (S2 <= ref_reg && ref_reg <= S7) ||
596 (ref_reg == S8)) << ref_reg;
597 // "Compact" slow path, saving two moves.
598 //
599 // Instead of using the standard runtime calling convention (input
600 // and output in A0 and V0 respectively):
601 //
602 // A0 <- ref
603 // V0 <- ReadBarrierMark(A0)
604 // ref <- V0
605 //
606 // we just use rX (the register containing `ref`) as input and output
607 // of a dedicated entrypoint:
608 //
609 // rX <- ReadBarrierMarkRegX(rX)
610 //
611 if (entrypoint_.IsValid()) {
612 mips64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
613 DCHECK_EQ(entrypoint_.AsRegister<GpuRegister>(), T9);
614 __ Jalr(entrypoint_.AsRegister<GpuRegister>());
615 __ Nop();
616 } else {
617 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100618 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800619 // This runtime call does not require a stack map.
620 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
621 instruction_,
622 this);
623 }
624 __ Bc(GetExitLabel());
625 }
626
627 private:
628 // The location (register) of the marked object reference.
629 const Location ref_;
630
631 // The location of the entrypoint if already loaded.
632 const Location entrypoint_;
633
634 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS64);
635};
636
637// Slow path marking an object reference `ref` during a read barrier,
638// and if needed, atomically updating the field `obj.field` in the
639// object `obj` holding this reference after marking (contrary to
640// ReadBarrierMarkSlowPathMIPS64 above, which never tries to update
641// `obj.field`).
642//
643// This means that after the execution of this slow path, both `ref`
644// and `obj.field` will be up-to-date; i.e., after the flip, both will
645// hold the same to-space reference (unless another thread installed
646// another object reference (different from `ref`) in `obj.field`).
647class ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 : public SlowPathCodeMIPS64 {
648 public:
649 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(HInstruction* instruction,
650 Location ref,
651 GpuRegister obj,
652 Location field_offset,
653 GpuRegister temp1)
654 : SlowPathCodeMIPS64(instruction),
655 ref_(ref),
656 obj_(obj),
657 field_offset_(field_offset),
658 temp1_(temp1) {
659 DCHECK(kEmitCompilerReadBarrier);
660 }
661
662 const char* GetDescription() const OVERRIDE {
663 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS64";
664 }
665
666 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
667 LocationSummary* locations = instruction_->GetLocations();
668 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
669 DCHECK(locations->CanCall());
670 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
671 // This slow path is only used by the UnsafeCASObject intrinsic.
672 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
673 << "Unexpected instruction in read barrier marking and field updating slow path: "
674 << instruction_->DebugName();
675 DCHECK(instruction_->GetLocations()->Intrinsified());
676 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
677 DCHECK(field_offset_.IsRegister()) << field_offset_;
678
679 __ Bind(GetEntryLabel());
680
681 // Save the old reference.
682 // Note that we cannot use AT or TMP to save the old reference, as those
683 // are used by the code that follows, but we need the old reference after
684 // the call to the ReadBarrierMarkRegX entry point.
685 DCHECK_NE(temp1_, AT);
686 DCHECK_NE(temp1_, TMP);
687 __ Move(temp1_, ref_reg);
688
689 // No need to save live registers; it's taken care of by the
690 // entrypoint. Also, there is no need to update the stack mask,
691 // as this runtime call will not trigger a garbage collection.
692 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
693 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
694 (S2 <= ref_reg && ref_reg <= S7) ||
695 (ref_reg == S8)) << ref_reg;
696 // "Compact" slow path, saving two moves.
697 //
698 // Instead of using the standard runtime calling convention (input
699 // and output in A0 and V0 respectively):
700 //
701 // A0 <- ref
702 // V0 <- ReadBarrierMark(A0)
703 // ref <- V0
704 //
705 // we just use rX (the register containing `ref`) as input and output
706 // of a dedicated entrypoint:
707 //
708 // rX <- ReadBarrierMarkRegX(rX)
709 //
710 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100711 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800712 // This runtime call does not require a stack map.
713 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
714 instruction_,
715 this);
716
717 // If the new reference is different from the old reference,
718 // update the field in the holder (`*(obj_ + field_offset_)`).
719 //
720 // Note that this field could also hold a different object, if
721 // another thread had concurrently changed it. In that case, the
722 // the compare-and-set (CAS) loop below would abort, leaving the
723 // field as-is.
724 Mips64Label done;
725 __ Beqc(temp1_, ref_reg, &done);
726
727 // Update the the holder's field atomically. This may fail if
728 // mutator updates before us, but it's OK. This is achieved
729 // using a strong compare-and-set (CAS) operation with relaxed
730 // memory synchronization ordering, where the expected value is
731 // the old reference and the desired value is the new reference.
732
733 // Convenience aliases.
734 GpuRegister base = obj_;
735 GpuRegister offset = field_offset_.AsRegister<GpuRegister>();
736 GpuRegister expected = temp1_;
737 GpuRegister value = ref_reg;
738 GpuRegister tmp_ptr = TMP; // Pointer to actual memory.
739 GpuRegister tmp = AT; // Value in memory.
740
741 __ Daddu(tmp_ptr, base, offset);
742
743 if (kPoisonHeapReferences) {
744 __ PoisonHeapReference(expected);
745 // Do not poison `value` if it is the same register as
746 // `expected`, which has just been poisoned.
747 if (value != expected) {
748 __ PoisonHeapReference(value);
749 }
750 }
751
752 // do {
753 // tmp = [r_ptr] - expected;
754 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
755
756 Mips64Label loop_head, exit_loop;
757 __ Bind(&loop_head);
758 __ Ll(tmp, tmp_ptr);
759 // The LL instruction sign-extends the 32-bit value, but
760 // 32-bit references must be zero-extended. Zero-extend `tmp`.
761 __ Dext(tmp, tmp, 0, 32);
762 __ Bnec(tmp, expected, &exit_loop);
763 __ Move(tmp, value);
764 __ Sc(tmp, tmp_ptr);
765 __ Beqzc(tmp, &loop_head);
766 __ Bind(&exit_loop);
767
768 if (kPoisonHeapReferences) {
769 __ UnpoisonHeapReference(expected);
770 // Do not unpoison `value` if it is the same register as
771 // `expected`, which has just been unpoisoned.
772 if (value != expected) {
773 __ UnpoisonHeapReference(value);
774 }
775 }
776
777 __ Bind(&done);
778 __ Bc(GetExitLabel());
779 }
780
781 private:
782 // The location (register) of the marked object reference.
783 const Location ref_;
784 // The register containing the object holding the marked object reference field.
785 const GpuRegister obj_;
786 // The location of the offset of the marked reference field within `obj_`.
787 Location field_offset_;
788
789 const GpuRegister temp1_;
790
791 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS64);
792};
793
794// Slow path generating a read barrier for a heap reference.
795class ReadBarrierForHeapReferenceSlowPathMIPS64 : public SlowPathCodeMIPS64 {
796 public:
797 ReadBarrierForHeapReferenceSlowPathMIPS64(HInstruction* instruction,
798 Location out,
799 Location ref,
800 Location obj,
801 uint32_t offset,
802 Location index)
803 : SlowPathCodeMIPS64(instruction),
804 out_(out),
805 ref_(ref),
806 obj_(obj),
807 offset_(offset),
808 index_(index) {
809 DCHECK(kEmitCompilerReadBarrier);
810 // If `obj` is equal to `out` or `ref`, it means the initial object
811 // has been overwritten by (or after) the heap object reference load
812 // to be instrumented, e.g.:
813 //
814 // __ LoadFromOffset(kLoadWord, out, out, offset);
815 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
816 //
817 // In that case, we have lost the information about the original
818 // object, and the emitted read barrier cannot work properly.
819 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
820 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
821 }
822
823 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
824 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
825 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100826 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800827 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
828 DCHECK(locations->CanCall());
829 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
830 DCHECK(instruction_->IsInstanceFieldGet() ||
831 instruction_->IsStaticFieldGet() ||
832 instruction_->IsArrayGet() ||
833 instruction_->IsInstanceOf() ||
834 instruction_->IsCheckCast() ||
835 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
836 << "Unexpected instruction in read barrier for heap reference slow path: "
837 << instruction_->DebugName();
838
839 __ Bind(GetEntryLabel());
840 SaveLiveRegisters(codegen, locations);
841
842 // We may have to change the index's value, but as `index_` is a
843 // constant member (like other "inputs" of this slow path),
844 // introduce a copy of it, `index`.
845 Location index = index_;
846 if (index_.IsValid()) {
847 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
848 if (instruction_->IsArrayGet()) {
849 // Compute the actual memory offset and store it in `index`.
850 GpuRegister index_reg = index_.AsRegister<GpuRegister>();
851 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
852 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
853 // We are about to change the value of `index_reg` (see the
854 // calls to art::mips64::Mips64Assembler::Sll and
855 // art::mips64::MipsAssembler::Addiu32 below), but it has
856 // not been saved by the previous call to
857 // art::SlowPathCode::SaveLiveRegisters, as it is a
858 // callee-save register --
859 // art::SlowPathCode::SaveLiveRegisters does not consider
860 // callee-save registers, as it has been designed with the
861 // assumption that callee-save registers are supposed to be
862 // handled by the called function. So, as a callee-save
863 // register, `index_reg` _would_ eventually be saved onto
864 // the stack, but it would be too late: we would have
865 // changed its value earlier. Therefore, we manually save
866 // it here into another freely available register,
867 // `free_reg`, chosen of course among the caller-save
868 // registers (as a callee-save `free_reg` register would
869 // exhibit the same problem).
870 //
871 // Note we could have requested a temporary register from
872 // the register allocator instead; but we prefer not to, as
873 // this is a slow path, and we know we can find a
874 // caller-save register that is available.
875 GpuRegister free_reg = FindAvailableCallerSaveRegister(codegen);
876 __ Move(free_reg, index_reg);
877 index_reg = free_reg;
878 index = Location::RegisterLocation(index_reg);
879 } else {
880 // The initial register stored in `index_` has already been
881 // saved in the call to art::SlowPathCode::SaveLiveRegisters
882 // (as it is not a callee-save register), so we can freely
883 // use it.
884 }
885 // Shifting the index value contained in `index_reg` by the scale
886 // factor (2) cannot overflow in practice, as the runtime is
887 // unable to allocate object arrays with a size larger than
888 // 2^26 - 1 (that is, 2^28 - 4 bytes).
889 __ Sll(index_reg, index_reg, TIMES_4);
890 static_assert(
891 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
892 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
893 __ Addiu32(index_reg, index_reg, offset_);
894 } else {
895 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
896 // intrinsics, `index_` is not shifted by a scale factor of 2
897 // (as in the case of ArrayGet), as it is actually an offset
898 // to an object field within an object.
899 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
900 DCHECK(instruction_->GetLocations()->Intrinsified());
901 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
902 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
903 << instruction_->AsInvoke()->GetIntrinsic();
904 DCHECK_EQ(offset_, 0U);
905 DCHECK(index_.IsRegister());
906 }
907 }
908
909 // We're moving two or three locations to locations that could
910 // overlap, so we need a parallel move resolver.
911 InvokeRuntimeCallingConvention calling_convention;
912 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
913 parallel_move.AddMove(ref_,
914 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100915 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800916 nullptr);
917 parallel_move.AddMove(obj_,
918 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100919 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800920 nullptr);
921 if (index.IsValid()) {
922 parallel_move.AddMove(index,
923 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100924 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800925 nullptr);
926 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
927 } else {
928 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
929 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
930 }
931 mips64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
932 instruction_,
933 instruction_->GetDexPc(),
934 this);
935 CheckEntrypointTypes<
936 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
937 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
938
939 RestoreLiveRegisters(codegen, locations);
940 __ Bc(GetExitLabel());
941 }
942
943 const char* GetDescription() const OVERRIDE {
944 return "ReadBarrierForHeapReferenceSlowPathMIPS64";
945 }
946
947 private:
948 GpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
949 size_t ref = static_cast<int>(ref_.AsRegister<GpuRegister>());
950 size_t obj = static_cast<int>(obj_.AsRegister<GpuRegister>());
951 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
952 if (i != ref &&
953 i != obj &&
954 !codegen->IsCoreCalleeSaveRegister(i) &&
955 !codegen->IsBlockedCoreRegister(i)) {
956 return static_cast<GpuRegister>(i);
957 }
958 }
959 // We shall never fail to find a free caller-save register, as
960 // there are more than two core caller-save registers on MIPS64
961 // (meaning it is possible to find one which is different from
962 // `ref` and `obj`).
963 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
964 LOG(FATAL) << "Could not find a free caller-save register";
965 UNREACHABLE();
966 }
967
968 const Location out_;
969 const Location ref_;
970 const Location obj_;
971 const uint32_t offset_;
972 // An additional location containing an index to an array.
973 // Only used for HArrayGet and the UnsafeGetObject &
974 // UnsafeGetObjectVolatile intrinsics.
975 const Location index_;
976
977 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS64);
978};
979
980// Slow path generating a read barrier for a GC root.
981class ReadBarrierForRootSlowPathMIPS64 : public SlowPathCodeMIPS64 {
982 public:
983 ReadBarrierForRootSlowPathMIPS64(HInstruction* instruction, Location out, Location root)
984 : SlowPathCodeMIPS64(instruction), out_(out), root_(root) {
985 DCHECK(kEmitCompilerReadBarrier);
986 }
987
988 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
989 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100990 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800991 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
992 DCHECK(locations->CanCall());
993 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
994 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
995 << "Unexpected instruction in read barrier for GC root slow path: "
996 << instruction_->DebugName();
997
998 __ Bind(GetEntryLabel());
999 SaveLiveRegisters(codegen, locations);
1000
1001 InvokeRuntimeCallingConvention calling_convention;
1002 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
1003 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
1004 root_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001005 DataType::Type::kReference);
Alexey Frunze15958152017-02-09 19:08:30 -08001006 mips64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
1007 instruction_,
1008 instruction_->GetDexPc(),
1009 this);
1010 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1011 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1012
1013 RestoreLiveRegisters(codegen, locations);
1014 __ Bc(GetExitLabel());
1015 }
1016
1017 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS64"; }
1018
1019 private:
1020 const Location out_;
1021 const Location root_;
1022
1023 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS64);
1024};
1025
Alexey Frunze4dda3372015-06-01 18:31:49 -07001026CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
1027 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001028 const CompilerOptions& compiler_options,
1029 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -07001030 : CodeGenerator(graph,
1031 kNumberOfGpuRegisters,
1032 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001033 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001034 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1035 arraysize(kCoreCalleeSaves)),
1036 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1037 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001038 compiler_options,
1039 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001040 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001041 location_builder_(graph, this),
1042 instruction_visitor_(graph, this),
1043 move_resolver_(graph->GetArena(), this),
Goran Jakovljevic19680d32017-05-11 10:38:36 +02001044 assembler_(graph->GetArena(), &isa_features),
Alexey Frunze19f6c692016-11-30 19:19:55 -08001045 isa_features_(isa_features),
Alexey Frunzef63f5692016-12-13 17:43:11 -08001046 uint32_literals_(std::less<uint32_t>(),
1047 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -08001048 uint64_literals_(std::less<uint64_t>(),
1049 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001050 pc_relative_method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001051 method_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunzef63f5692016-12-13 17:43:11 -08001052 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001053 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001054 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001055 string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001056 jit_string_patches_(StringReferenceValueComparator(),
1057 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1058 jit_class_patches_(TypeReferenceValueComparator(),
1059 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001060 // Save RA (containing the return address) to mimic Quick.
1061 AddAllocatedRegister(Location::RegisterLocation(RA));
1062}
1063
1064#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001065// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1066#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001067#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -07001068
1069void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001070 // Ensure that we fix up branches.
1071 __ FinalizeCode();
1072
1073 // Adjust native pc offsets in stack maps.
1074 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001075 uint32_t old_position =
1076 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips64);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001077 uint32_t new_position = __ GetAdjustedPosition(old_position);
1078 DCHECK_GE(new_position, old_position);
1079 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
1080 }
1081
1082 // Adjust pc offsets for the disassembly information.
1083 if (disasm_info_ != nullptr) {
1084 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1085 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1086 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1087 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1088 it.second.start = __ GetAdjustedPosition(it.second.start);
1089 it.second.end = __ GetAdjustedPosition(it.second.end);
1090 }
1091 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1092 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1093 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1094 }
1095 }
1096
Alexey Frunze4dda3372015-06-01 18:31:49 -07001097 CodeGenerator::Finalize(allocator);
1098}
1099
1100Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
1101 return codegen_->GetAssembler();
1102}
1103
1104void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001105 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001106 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1107}
1108
1109void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001110 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001111 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
1112}
1113
1114void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
1115 // Pop reg
1116 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +02001117 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001118}
1119
1120void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
1121 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +02001122 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001123 __ Sd(GpuRegister(reg), SP, 0);
1124}
1125
1126void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
1127 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
1128 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
1129 // Allocate a scratch register other than TMP, if available.
1130 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1131 // automatically unspilled when the scratch scope object is destroyed).
1132 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1133 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +02001134 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001135 __ LoadFromOffset(load_type,
1136 GpuRegister(ensure_scratch.GetRegister()),
1137 SP,
1138 index1 + stack_offset);
1139 __ LoadFromOffset(load_type,
1140 TMP,
1141 SP,
1142 index2 + stack_offset);
1143 __ StoreToOffset(store_type,
1144 GpuRegister(ensure_scratch.GetRegister()),
1145 SP,
1146 index2 + stack_offset);
1147 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
1148}
1149
1150static dwarf::Reg DWARFReg(GpuRegister reg) {
1151 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1152}
1153
David Srbeckyba702002016-02-01 18:15:29 +00001154static dwarf::Reg DWARFReg(FpuRegister reg) {
1155 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
1156}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001157
1158void CodeGeneratorMIPS64::GenerateFrameEntry() {
1159 __ Bind(&frame_entry_label_);
1160
1161 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips64) || !IsLeafMethod();
1162
1163 if (do_overflow_check) {
1164 __ LoadFromOffset(kLoadWord,
1165 ZERO,
1166 SP,
1167 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips64)));
1168 RecordPcInfo(nullptr, 0);
1169 }
1170
Alexey Frunze4dda3372015-06-01 18:31:49 -07001171 if (HasEmptyFrame()) {
1172 return;
1173 }
1174
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001175 // Make sure the frame size isn't unreasonably large.
1176 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips64)) {
1177 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips64) << " bytes";
1178 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001179
1180 // Spill callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001181
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001182 uint32_t ofs = GetFrameSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001183 __ IncreaseFrameSize(ofs);
1184
1185 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1186 GpuRegister reg = kCoreCalleeSaves[i];
1187 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001188 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001189 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001190 __ cfi().RelOffset(DWARFReg(reg), ofs);
1191 }
1192 }
1193
1194 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1195 FpuRegister reg = kFpuCalleeSaves[i];
1196 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001197 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001198 __ StoreFpuToOffset(kStoreDoubleword, reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +00001199 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001200 }
1201 }
1202
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001203 // Save the current method if we need it. Note that we do not
1204 // do this in HCurrentMethod, as the instruction might have been removed
1205 // in the SSA graph.
1206 if (RequiresCurrentMethod()) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001207 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001208 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001209
1210 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1211 // Initialize should_deoptimize flag to 0.
1212 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1213 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001214}
1215
1216void CodeGeneratorMIPS64::GenerateFrameExit() {
1217 __ cfi().RememberState();
1218
Alexey Frunze4dda3372015-06-01 18:31:49 -07001219 if (!HasEmptyFrame()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001220 // Restore callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001221
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001222 // For better instruction scheduling restore RA before other registers.
1223 uint32_t ofs = GetFrameSize();
1224 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001225 GpuRegister reg = kCoreCalleeSaves[i];
1226 if (allocated_registers_.ContainsCoreRegister(reg)) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001227 ofs -= kMips64DoublewordSize;
1228 __ LoadFromOffset(kLoadDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001229 __ cfi().Restore(DWARFReg(reg));
1230 }
1231 }
1232
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001233 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1234 FpuRegister reg = kFpuCalleeSaves[i];
1235 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
1236 ofs -= kMips64DoublewordSize;
1237 __ LoadFpuFromOffset(kLoadDoubleword, reg, SP, ofs);
1238 __ cfi().Restore(DWARFReg(reg));
1239 }
1240 }
1241
1242 __ DecreaseFrameSize(GetFrameSize());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001243 }
1244
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001245 __ Jic(RA, 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001246
1247 __ cfi().RestoreState();
1248 __ cfi().DefCFAOffset(GetFrameSize());
1249}
1250
1251void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
1252 __ Bind(GetLabelOf(block));
1253}
1254
1255void CodeGeneratorMIPS64::MoveLocation(Location destination,
1256 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001257 DataType::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001258 if (source.Equals(destination)) {
1259 return;
1260 }
1261
1262 // A valid move can always be inferred from the destination and source
1263 // locations. When moving from and to a register, the argument type can be
1264 // used to generate 32bit instead of 64bit moves.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001265 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001266 DCHECK_EQ(unspecified_type, false);
1267
1268 if (destination.IsRegister() || destination.IsFpuRegister()) {
1269 if (unspecified_type) {
1270 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1271 if (source.IsStackSlot() ||
1272 (src_cst != nullptr && (src_cst->IsIntConstant()
1273 || src_cst->IsFloatConstant()
1274 || src_cst->IsNullConstant()))) {
1275 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001276 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001277 } else {
1278 // If the source is a double stack slot or a 64bit constant, a 64bit
1279 // type is appropriate. Else the source is a register, and since the
1280 // type has not been specified, we chose a 64bit type to force a 64bit
1281 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001282 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001283 }
1284 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001285 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1286 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001287 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1288 // Move to GPR/FPR from stack
1289 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001290 if (DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001291 __ LoadFpuFromOffset(load_type,
1292 destination.AsFpuRegister<FpuRegister>(),
1293 SP,
1294 source.GetStackIndex());
1295 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001296 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001297 __ LoadFromOffset(load_type,
1298 destination.AsRegister<GpuRegister>(),
1299 SP,
1300 source.GetStackIndex());
1301 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001302 } else if (source.IsSIMDStackSlot()) {
1303 __ LoadFpuFromOffset(kLoadQuadword,
1304 destination.AsFpuRegister<FpuRegister>(),
1305 SP,
1306 source.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001307 } else if (source.IsConstant()) {
1308 // Move to GPR/FPR from constant
1309 GpuRegister gpr = AT;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001310 if (!DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001311 gpr = destination.AsRegister<GpuRegister>();
1312 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001313 if (dst_type == DataType::Type::kInt32 || dst_type == DataType::Type::kFloat32) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001314 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001315 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001316 gpr = ZERO;
1317 } else {
1318 __ LoadConst32(gpr, value);
1319 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001320 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001321 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001322 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001323 gpr = ZERO;
1324 } else {
1325 __ LoadConst64(gpr, value);
1326 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001327 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001328 if (dst_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001329 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001330 } else if (dst_type == DataType::Type::kFloat64) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001331 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
1332 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001333 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001334 if (destination.IsRegister()) {
1335 // Move to GPR from GPR
1336 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
1337 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001338 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001339 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001340 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1341 } else {
1342 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1343 }
1344 }
1345 } else if (source.IsFpuRegister()) {
1346 if (destination.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001347 if (GetGraph()->HasSIMD()) {
1348 __ MoveV(VectorRegisterFrom(destination),
1349 VectorRegisterFrom(source));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001350 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001351 // Move to FPR from FPR
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001352 if (dst_type == DataType::Type::kFloat32) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001353 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1354 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001355 DCHECK_EQ(dst_type, DataType::Type::kFloat64);
Lena Djokicca8c2952017-05-29 11:31:46 +02001356 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1357 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001358 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001359 } else {
1360 DCHECK(destination.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001361 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001362 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1363 } else {
1364 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1365 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001366 }
1367 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001368 } else if (destination.IsSIMDStackSlot()) {
1369 if (source.IsFpuRegister()) {
1370 __ StoreFpuToOffset(kStoreQuadword,
1371 source.AsFpuRegister<FpuRegister>(),
1372 SP,
1373 destination.GetStackIndex());
1374 } else {
1375 DCHECK(source.IsSIMDStackSlot());
1376 __ LoadFpuFromOffset(kLoadQuadword,
1377 FTMP,
1378 SP,
1379 source.GetStackIndex());
1380 __ StoreFpuToOffset(kStoreQuadword,
1381 FTMP,
1382 SP,
1383 destination.GetStackIndex());
1384 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001385 } else { // The destination is not a register. It must be a stack slot.
1386 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1387 if (source.IsRegister() || source.IsFpuRegister()) {
1388 if (unspecified_type) {
1389 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001390 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001391 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001392 dst_type =
1393 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001394 }
1395 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001396 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1397 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001398 // Move to stack from GPR/FPR
1399 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1400 if (source.IsRegister()) {
1401 __ StoreToOffset(store_type,
1402 source.AsRegister<GpuRegister>(),
1403 SP,
1404 destination.GetStackIndex());
1405 } else {
1406 __ StoreFpuToOffset(store_type,
1407 source.AsFpuRegister<FpuRegister>(),
1408 SP,
1409 destination.GetStackIndex());
1410 }
1411 } else if (source.IsConstant()) {
1412 // Move to stack from constant
1413 HConstant* src_cst = source.GetConstant();
1414 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001415 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001416 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001417 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
1418 if (value != 0) {
1419 gpr = TMP;
1420 __ LoadConst32(gpr, value);
1421 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001422 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001423 DCHECK(destination.IsDoubleStackSlot());
1424 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
1425 if (value != 0) {
1426 gpr = TMP;
1427 __ LoadConst64(gpr, value);
1428 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001429 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001430 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001431 } else {
1432 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
1433 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
1434 // Move to stack from stack
1435 if (destination.IsStackSlot()) {
1436 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1437 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1438 } else {
1439 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
1440 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
1441 }
1442 }
1443 }
1444}
1445
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001446void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001447 DCHECK(!loc1.IsConstant());
1448 DCHECK(!loc2.IsConstant());
1449
1450 if (loc1.Equals(loc2)) {
1451 return;
1452 }
1453
1454 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
1455 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
1456 bool is_fp_reg1 = loc1.IsFpuRegister();
1457 bool is_fp_reg2 = loc2.IsFpuRegister();
1458
1459 if (loc2.IsRegister() && loc1.IsRegister()) {
1460 // Swap 2 GPRs
1461 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
1462 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
1463 __ Move(TMP, r2);
1464 __ Move(r2, r1);
1465 __ Move(r1, TMP);
1466 } else if (is_fp_reg2 && is_fp_reg1) {
1467 // Swap 2 FPRs
1468 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
1469 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001470 if (type == DataType::Type::kFloat32) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001471 __ MovS(FTMP, r1);
1472 __ MovS(r1, r2);
1473 __ MovS(r2, FTMP);
1474 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001475 DCHECK_EQ(type, DataType::Type::kFloat64);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001476 __ MovD(FTMP, r1);
1477 __ MovD(r1, r2);
1478 __ MovD(r2, FTMP);
1479 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001480 } else if (is_slot1 != is_slot2) {
1481 // Swap GPR/FPR and stack slot
1482 Location reg_loc = is_slot1 ? loc2 : loc1;
1483 Location mem_loc = is_slot1 ? loc1 : loc2;
1484 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
1485 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001486 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001487 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
1488 if (reg_loc.IsFpuRegister()) {
1489 __ StoreFpuToOffset(store_type,
1490 reg_loc.AsFpuRegister<FpuRegister>(),
1491 SP,
1492 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001493 if (mem_loc.IsStackSlot()) {
1494 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1495 } else {
1496 DCHECK(mem_loc.IsDoubleStackSlot());
1497 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1498 }
1499 } else {
1500 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
1501 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
1502 }
1503 } else if (is_slot1 && is_slot2) {
1504 move_resolver_.Exchange(loc1.GetStackIndex(),
1505 loc2.GetStackIndex(),
1506 loc1.IsDoubleStackSlot());
1507 } else {
1508 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
1509 }
1510}
1511
Calin Juravle175dc732015-08-25 15:42:32 +01001512void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
1513 DCHECK(location.IsRegister());
1514 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
1515}
1516
Calin Juravlee460d1d2015-09-29 04:52:17 +01001517void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1518 if (location.IsRegister()) {
1519 locations->AddTemp(location);
1520 } else {
1521 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1522 }
1523}
1524
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001525void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
1526 GpuRegister value,
1527 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001528 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001529 GpuRegister card = AT;
1530 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001531 if (value_can_be_null) {
1532 __ Beqzc(value, &done);
1533 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001534 __ LoadFromOffset(kLoadDoubleword,
1535 card,
1536 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001537 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001538 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
1539 __ Daddu(temp, card, temp);
1540 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001541 if (value_can_be_null) {
1542 __ Bind(&done);
1543 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001544}
1545
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001546template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Alexey Frunze19f6c692016-11-30 19:19:55 -08001547inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
1548 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001549 ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001550 for (const PcRelativePatchInfo& info : infos) {
1551 const DexFile& dex_file = info.target_dex_file;
1552 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001553 DCHECK(info.label.IsBound());
1554 uint32_t literal_offset = __ GetLabelLocation(&info.label);
1555 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1556 uint32_t pc_rel_offset = __ GetLabelLocation(&info_high.label);
1557 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Alexey Frunze19f6c692016-11-30 19:19:55 -08001558 }
1559}
1560
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001561void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001562 DCHECK(linker_patches->empty());
1563 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001564 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001565 method_bss_entry_patches_.size() +
Alexey Frunzef63f5692016-12-13 17:43:11 -08001566 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001567 type_bss_entry_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001568 pc_relative_string_patches_.size() +
1569 string_bss_entry_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -08001570 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001571 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001572 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1573 pc_relative_method_patches_, linker_patches);
1574 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1575 pc_relative_type_patches_, linker_patches);
1576 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
1577 pc_relative_string_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001578 } else {
1579 DCHECK(pc_relative_method_patches_.empty());
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001580 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>(
1581 pc_relative_type_patches_, linker_patches);
1582 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>(
1583 pc_relative_string_patches_, linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001584 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001585 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1586 method_bss_entry_patches_, linker_patches);
1587 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1588 type_bss_entry_patches_, linker_patches);
1589 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1590 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001591 DCHECK_EQ(size, linker_patches->size());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001592}
1593
Vladimir Marko65979462017-05-19 17:25:12 +01001594CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001595 MethodReference target_method,
1596 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001597 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001598 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001599 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001600 &pc_relative_method_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001601}
1602
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001603CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001604 MethodReference target_method,
1605 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001606 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001607 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001608 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001609 &method_bss_entry_patches_);
1610}
1611
Alexey Frunzef63f5692016-12-13 17:43:11 -08001612CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001613 const DexFile& dex_file,
1614 dex::TypeIndex type_index,
1615 const PcRelativePatchInfo* info_high) {
1616 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001617}
1618
Vladimir Marko1998cd02017-01-13 13:02:58 +00001619CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001620 const DexFile& dex_file,
1621 dex::TypeIndex type_index,
1622 const PcRelativePatchInfo* info_high) {
1623 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001624}
1625
Vladimir Marko65979462017-05-19 17:25:12 +01001626CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001627 const DexFile& dex_file,
1628 dex::StringIndex string_index,
1629 const PcRelativePatchInfo* info_high) {
1630 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001631}
1632
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001633CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewStringBssEntryPatch(
1634 const DexFile& dex_file,
1635 dex::StringIndex string_index,
1636 const PcRelativePatchInfo* info_high) {
1637 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
1638}
1639
Alexey Frunze19f6c692016-11-30 19:19:55 -08001640CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001641 const DexFile& dex_file,
1642 uint32_t offset_or_index,
1643 const PcRelativePatchInfo* info_high,
1644 ArenaDeque<PcRelativePatchInfo>* patches) {
1645 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001646 return &patches->back();
1647}
1648
Alexey Frunzef63f5692016-12-13 17:43:11 -08001649Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1650 return map->GetOrCreate(
1651 value,
1652 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1653}
1654
Alexey Frunze19f6c692016-11-30 19:19:55 -08001655Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1656 return uint64_literals_.GetOrCreate(
1657 value,
1658 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1659}
1660
Alexey Frunzef63f5692016-12-13 17:43:11 -08001661Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001662 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001663}
1664
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001665void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
1666 GpuRegister out,
1667 PcRelativePatchInfo* info_low) {
1668 DCHECK(!info_high->patch_info_high);
1669 __ Bind(&info_high->label);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001670 // Add the high half of a 32-bit offset to PC.
1671 __ Auipc(out, /* placeholder */ 0x1234);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001672 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001673 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001674 if (info_low != nullptr) {
1675 DCHECK_EQ(info_low->patch_info_high, info_high);
1676 __ Bind(&info_low->label);
1677 }
Alexey Frunze19f6c692016-11-30 19:19:55 -08001678}
1679
Alexey Frunze627c1a02017-01-30 19:28:14 -08001680Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file,
1681 dex::StringIndex string_index,
1682 Handle<mirror::String> handle) {
1683 jit_string_roots_.Overwrite(StringReference(&dex_file, string_index),
1684 reinterpret_cast64<uint64_t>(handle.GetReference()));
1685 return jit_string_patches_.GetOrCreate(
1686 StringReference(&dex_file, string_index),
1687 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1688}
1689
1690Literal* CodeGeneratorMIPS64::DeduplicateJitClassLiteral(const DexFile& dex_file,
1691 dex::TypeIndex type_index,
1692 Handle<mirror::Class> handle) {
1693 jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
1694 reinterpret_cast64<uint64_t>(handle.GetReference()));
1695 return jit_class_patches_.GetOrCreate(
1696 TypeReference(&dex_file, type_index),
1697 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1698}
1699
1700void CodeGeneratorMIPS64::PatchJitRootUse(uint8_t* code,
1701 const uint8_t* roots_data,
1702 const Literal* literal,
1703 uint64_t index_in_table) const {
1704 uint32_t literal_offset = GetAssembler().GetLabelLocation(literal->GetLabel());
1705 uintptr_t address =
1706 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1707 reinterpret_cast<uint32_t*>(code + literal_offset)[0] = dchecked_integral_cast<uint32_t>(address);
1708}
1709
1710void CodeGeneratorMIPS64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1711 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001712 const StringReference& string_reference = entry.first;
1713 Literal* table_entry_literal = entry.second;
1714 const auto it = jit_string_roots_.find(string_reference);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001715 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001716 uint64_t index_in_table = it->second;
1717 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001718 }
1719 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001720 const TypeReference& type_reference = entry.first;
1721 Literal* table_entry_literal = entry.second;
1722 const auto it = jit_class_roots_.find(type_reference);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001723 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001724 uint64_t index_in_table = it->second;
1725 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001726 }
1727}
1728
David Brazdil58282f42016-01-14 12:45:10 +00001729void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001730 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1731 blocked_core_registers_[ZERO] = true;
1732 blocked_core_registers_[K0] = true;
1733 blocked_core_registers_[K1] = true;
1734 blocked_core_registers_[GP] = true;
1735 blocked_core_registers_[SP] = true;
1736 blocked_core_registers_[RA] = true;
1737
Lazar Trsicd9672662015-09-03 17:33:01 +02001738 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1739 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001740 blocked_core_registers_[AT] = true;
1741 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001742 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001743 blocked_fpu_registers_[FTMP] = true;
1744
1745 // Reserve suspend and thread registers.
1746 blocked_core_registers_[S0] = true;
1747 blocked_core_registers_[TR] = true;
1748
1749 // Reserve T9 for function calls
1750 blocked_core_registers_[T9] = true;
1751
Goran Jakovljevic782be112016-06-21 12:39:04 +02001752 if (GetGraph()->IsDebuggable()) {
1753 // Stubs do not save callee-save floating point registers. If the graph
1754 // is debuggable, we need to deal with these registers differently. For
1755 // now, just block them.
1756 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1757 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1758 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001759 }
1760}
1761
Alexey Frunze4dda3372015-06-01 18:31:49 -07001762size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1763 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001764 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001765}
1766
1767size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1768 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001769 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001770}
1771
1772size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001773 __ StoreFpuToOffset(GetGraph()->HasSIMD() ? kStoreQuadword : kStoreDoubleword,
1774 FpuRegister(reg_id),
1775 SP,
1776 stack_index);
1777 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001778}
1779
1780size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001781 __ LoadFpuFromOffset(GetGraph()->HasSIMD() ? kLoadQuadword : kLoadDoubleword,
1782 FpuRegister(reg_id),
1783 SP,
1784 stack_index);
1785 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001786}
1787
1788void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001789 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001790}
1791
1792void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001793 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001794}
1795
Calin Juravle175dc732015-08-25 15:42:32 +01001796void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001797 HInstruction* instruction,
1798 uint32_t dex_pc,
1799 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001800 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001801 GenerateInvokeRuntime(GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Serban Constantinescufc734082016-07-19 17:18:07 +01001802 if (EntrypointRequiresStackMap(entrypoint)) {
1803 RecordPcInfo(instruction, dex_pc, slow_path);
1804 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001805}
1806
Alexey Frunze15958152017-02-09 19:08:30 -08001807void CodeGeneratorMIPS64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1808 HInstruction* instruction,
1809 SlowPathCode* slow_path) {
1810 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1811 GenerateInvokeRuntime(entry_point_offset);
1812}
1813
1814void CodeGeneratorMIPS64::GenerateInvokeRuntime(int32_t entry_point_offset) {
1815 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
1816 __ Jalr(T9);
1817 __ Nop();
1818}
1819
Alexey Frunze4dda3372015-06-01 18:31:49 -07001820void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1821 GpuRegister class_reg) {
1822 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1823 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1824 __ Bltc(TMP, AT, slow_path->GetEntryLabel());
Alexey Frunze15958152017-02-09 19:08:30 -08001825 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1826 __ Sync(0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001827 __ Bind(slow_path->GetExitLabel());
1828}
1829
1830void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1831 __ Sync(0); // only stype 0 is supported
1832}
1833
1834void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1835 HBasicBlock* successor) {
1836 SuspendCheckSlowPathMIPS64* slow_path =
1837 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS64(instruction, successor);
1838 codegen_->AddSlowPath(slow_path);
1839
1840 __ LoadFromOffset(kLoadUnsignedHalfword,
1841 TMP,
1842 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001843 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001844 if (successor == nullptr) {
1845 __ Bnezc(TMP, slow_path->GetEntryLabel());
1846 __ Bind(slow_path->GetReturnLabel());
1847 } else {
1848 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001849 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001850 // slow_path will return to GetLabelOf(successor).
1851 }
1852}
1853
1854InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1855 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001856 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001857 assembler_(codegen->GetAssembler()),
1858 codegen_(codegen) {}
1859
1860void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1861 DCHECK_EQ(instruction->InputCount(), 2U);
1862 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001863 DataType::Type type = instruction->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001864 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001865 case DataType::Type::kInt32:
1866 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001867 locations->SetInAt(0, Location::RequiresRegister());
1868 HInstruction* right = instruction->InputAt(1);
1869 bool can_use_imm = false;
1870 if (right->IsConstant()) {
1871 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1872 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1873 can_use_imm = IsUint<16>(imm);
1874 } else if (instruction->IsAdd()) {
1875 can_use_imm = IsInt<16>(imm);
1876 } else {
1877 DCHECK(instruction->IsSub());
1878 can_use_imm = IsInt<16>(-imm);
1879 }
1880 }
1881 if (can_use_imm)
1882 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1883 else
1884 locations->SetInAt(1, Location::RequiresRegister());
1885 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1886 }
1887 break;
1888
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001889 case DataType::Type::kFloat32:
1890 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001891 locations->SetInAt(0, Location::RequiresFpuRegister());
1892 locations->SetInAt(1, Location::RequiresFpuRegister());
1893 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1894 break;
1895
1896 default:
1897 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1898 }
1899}
1900
1901void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001902 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001903 LocationSummary* locations = instruction->GetLocations();
1904
1905 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001906 case DataType::Type::kInt32:
1907 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001908 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1909 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1910 Location rhs_location = locations->InAt(1);
1911
1912 GpuRegister rhs_reg = ZERO;
1913 int64_t rhs_imm = 0;
1914 bool use_imm = rhs_location.IsConstant();
1915 if (use_imm) {
1916 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1917 } else {
1918 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1919 }
1920
1921 if (instruction->IsAnd()) {
1922 if (use_imm)
1923 __ Andi(dst, lhs, rhs_imm);
1924 else
1925 __ And(dst, lhs, rhs_reg);
1926 } else if (instruction->IsOr()) {
1927 if (use_imm)
1928 __ Ori(dst, lhs, rhs_imm);
1929 else
1930 __ Or(dst, lhs, rhs_reg);
1931 } else if (instruction->IsXor()) {
1932 if (use_imm)
1933 __ Xori(dst, lhs, rhs_imm);
1934 else
1935 __ Xor(dst, lhs, rhs_reg);
1936 } else if (instruction->IsAdd()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001937 if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001938 if (use_imm)
1939 __ Addiu(dst, lhs, rhs_imm);
1940 else
1941 __ Addu(dst, lhs, rhs_reg);
1942 } else {
1943 if (use_imm)
1944 __ Daddiu(dst, lhs, rhs_imm);
1945 else
1946 __ Daddu(dst, lhs, rhs_reg);
1947 }
1948 } else {
1949 DCHECK(instruction->IsSub());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001950 if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001951 if (use_imm)
1952 __ Addiu(dst, lhs, -rhs_imm);
1953 else
1954 __ Subu(dst, lhs, rhs_reg);
1955 } else {
1956 if (use_imm)
1957 __ Daddiu(dst, lhs, -rhs_imm);
1958 else
1959 __ Dsubu(dst, lhs, rhs_reg);
1960 }
1961 }
1962 break;
1963 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001964 case DataType::Type::kFloat32:
1965 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001966 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
1967 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1968 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1969 if (instruction->IsAdd()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001970 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07001971 __ AddS(dst, lhs, rhs);
1972 else
1973 __ AddD(dst, lhs, rhs);
1974 } else if (instruction->IsSub()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001975 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07001976 __ SubS(dst, lhs, rhs);
1977 else
1978 __ SubD(dst, lhs, rhs);
1979 } else {
1980 LOG(FATAL) << "Unexpected floating-point binary operation";
1981 }
1982 break;
1983 }
1984 default:
1985 LOG(FATAL) << "Unexpected binary operation type " << type;
1986 }
1987}
1988
1989void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001990 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001991
1992 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001993 DataType::Type type = instr->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001994 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001995 case DataType::Type::kInt32:
1996 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001997 locations->SetInAt(0, Location::RequiresRegister());
1998 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001999 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002000 break;
2001 }
2002 default:
2003 LOG(FATAL) << "Unexpected shift type " << type;
2004 }
2005}
2006
2007void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002008 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002009 LocationSummary* locations = instr->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002010 DataType::Type type = instr->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002011
2012 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002013 case DataType::Type::kInt32:
2014 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002015 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2016 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2017 Location rhs_location = locations->InAt(1);
2018
2019 GpuRegister rhs_reg = ZERO;
2020 int64_t rhs_imm = 0;
2021 bool use_imm = rhs_location.IsConstant();
2022 if (use_imm) {
2023 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2024 } else {
2025 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2026 }
2027
2028 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00002029 uint32_t shift_value = rhs_imm &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002030 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002031
Alexey Frunze92d90602015-12-18 18:16:36 -08002032 if (shift_value == 0) {
2033 if (dst != lhs) {
2034 __ Move(dst, lhs);
2035 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002036 } else if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002037 if (instr->IsShl()) {
2038 __ Sll(dst, lhs, shift_value);
2039 } else if (instr->IsShr()) {
2040 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002041 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002042 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002043 } else {
2044 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002045 }
2046 } else {
2047 if (shift_value < 32) {
2048 if (instr->IsShl()) {
2049 __ Dsll(dst, lhs, shift_value);
2050 } else if (instr->IsShr()) {
2051 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002052 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002053 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002054 } else {
2055 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002056 }
2057 } else {
2058 shift_value -= 32;
2059 if (instr->IsShl()) {
2060 __ Dsll32(dst, lhs, shift_value);
2061 } else if (instr->IsShr()) {
2062 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002063 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002064 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002065 } else {
2066 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002067 }
2068 }
2069 }
2070 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002071 if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002072 if (instr->IsShl()) {
2073 __ Sllv(dst, lhs, rhs_reg);
2074 } else if (instr->IsShr()) {
2075 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002076 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002077 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002078 } else {
2079 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002080 }
2081 } else {
2082 if (instr->IsShl()) {
2083 __ Dsllv(dst, lhs, rhs_reg);
2084 } else if (instr->IsShr()) {
2085 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002086 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002087 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002088 } else {
2089 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002090 }
2091 }
2092 }
2093 break;
2094 }
2095 default:
2096 LOG(FATAL) << "Unexpected shift operation type " << type;
2097 }
2098}
2099
2100void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
2101 HandleBinaryOp(instruction);
2102}
2103
2104void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
2105 HandleBinaryOp(instruction);
2106}
2107
2108void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
2109 HandleBinaryOp(instruction);
2110}
2111
2112void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
2113 HandleBinaryOp(instruction);
2114}
2115
2116void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002117 DataType::Type type = instruction->GetType();
Alexey Frunze15958152017-02-09 19:08:30 -08002118 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002119 kEmitCompilerReadBarrier && (type == DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002120 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08002121 new (GetGraph()->GetArena()) LocationSummary(instruction,
2122 object_array_get_with_read_barrier
2123 ? LocationSummary::kCallOnSlowPath
2124 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002125 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2126 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2127 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002128 locations->SetInAt(0, Location::RequiresRegister());
2129 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002130 if (DataType::IsFloatingPointType(type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002131 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2132 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002133 // The output overlaps in the case of an object array get with
2134 // read barriers enabled: we do not want the move to overwrite the
2135 // array's location, as we need it to emit the read barrier.
2136 locations->SetOut(Location::RequiresRegister(),
2137 object_array_get_with_read_barrier
2138 ? Location::kOutputOverlap
2139 : Location::kNoOutputOverlap);
2140 }
2141 // We need a temporary register for the read barrier marking slow
2142 // path in CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier.
2143 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002144 bool temp_needed = instruction->GetIndex()->IsConstant()
2145 ? !kBakerReadBarrierThunksEnableForFields
2146 : !kBakerReadBarrierThunksEnableForArrays;
2147 if (temp_needed) {
2148 locations->AddTemp(Location::RequiresRegister());
2149 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002150 }
2151}
2152
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002153static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS64* codegen) {
2154 auto null_checker = [codegen, instruction]() {
2155 codegen->MaybeRecordImplicitNullCheck(instruction);
2156 };
2157 return null_checker;
2158}
2159
Alexey Frunze4dda3372015-06-01 18:31:49 -07002160void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
2161 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002162 Location obj_loc = locations->InAt(0);
2163 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
2164 Location out_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002165 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002166 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002167 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002168
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002169 DataType::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002170 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2171 instruction->IsStringCharAt();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002172 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002173 case DataType::Type::kBool: {
Alexey Frunze15958152017-02-09 19:08:30 -08002174 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002175 if (index.IsConstant()) {
2176 size_t offset =
2177 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002178 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002179 } else {
2180 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002181 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002182 }
2183 break;
2184 }
2185
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002186 case DataType::Type::kInt8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002187 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002188 if (index.IsConstant()) {
2189 size_t offset =
2190 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002191 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002192 } else {
2193 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002194 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002195 }
2196 break;
2197 }
2198
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002199 case DataType::Type::kInt16: {
Alexey Frunze15958152017-02-09 19:08:30 -08002200 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002201 if (index.IsConstant()) {
2202 size_t offset =
2203 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002204 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002205 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002206 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002207 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002208 }
2209 break;
2210 }
2211
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002212 case DataType::Type::kUint16: {
Alexey Frunze15958152017-02-09 19:08:30 -08002213 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002214 if (maybe_compressed_char_at) {
2215 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002216 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002217 __ Dext(TMP, TMP, 0, 1);
2218 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2219 "Expecting 0=compressed, 1=uncompressed");
2220 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002221 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002222 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2223 if (maybe_compressed_char_at) {
2224 Mips64Label uncompressed_load, done;
2225 __ Bnezc(TMP, &uncompressed_load);
2226 __ LoadFromOffset(kLoadUnsignedByte,
2227 out,
2228 obj,
2229 data_offset + (const_index << TIMES_1));
2230 __ Bc(&done);
2231 __ Bind(&uncompressed_load);
2232 __ LoadFromOffset(kLoadUnsignedHalfword,
2233 out,
2234 obj,
2235 data_offset + (const_index << TIMES_2));
2236 __ Bind(&done);
2237 } else {
2238 __ LoadFromOffset(kLoadUnsignedHalfword,
2239 out,
2240 obj,
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002241 data_offset + (const_index << TIMES_2),
2242 null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002243 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002244 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002245 GpuRegister index_reg = index.AsRegister<GpuRegister>();
2246 if (maybe_compressed_char_at) {
2247 Mips64Label uncompressed_load, done;
2248 __ Bnezc(TMP, &uncompressed_load);
2249 __ Daddu(TMP, obj, index_reg);
2250 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2251 __ Bc(&done);
2252 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002253 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002254 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2255 __ Bind(&done);
2256 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002257 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002258 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002259 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002260 }
2261 break;
2262 }
2263
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002264 case DataType::Type::kInt32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002265 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002266 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002267 LoadOperandType load_type =
2268 (type == DataType::Type::kReference) ? kLoadUnsignedWord : kLoadWord;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002269 if (index.IsConstant()) {
2270 size_t offset =
2271 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002272 __ LoadFromOffset(load_type, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002273 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002274 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002275 __ LoadFromOffset(load_type, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002276 }
2277 break;
2278 }
2279
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002280 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002281 static_assert(
2282 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2283 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2284 // /* HeapReference<Object> */ out =
2285 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2286 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002287 bool temp_needed = index.IsConstant()
2288 ? !kBakerReadBarrierThunksEnableForFields
2289 : !kBakerReadBarrierThunksEnableForArrays;
2290 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002291 // Note that a potential implicit null check is handled in this
2292 // CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002293 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2294 if (index.IsConstant()) {
2295 // Array load with a constant index can be treated as a field load.
2296 size_t offset =
2297 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2298 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2299 out_loc,
2300 obj,
2301 offset,
2302 temp,
2303 /* needs_null_check */ false);
2304 } else {
2305 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2306 out_loc,
2307 obj,
2308 data_offset,
2309 index,
2310 temp,
2311 /* needs_null_check */ false);
2312 }
Alexey Frunze15958152017-02-09 19:08:30 -08002313 } else {
2314 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2315 if (index.IsConstant()) {
2316 size_t offset =
2317 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2318 __ LoadFromOffset(kLoadUnsignedWord, out, obj, offset, null_checker);
2319 // If read barriers are enabled, emit read barriers other than
2320 // Baker's using a slow path (and also unpoison the loaded
2321 // reference, if heap poisoning is enabled).
2322 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2323 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002324 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002325 __ LoadFromOffset(kLoadUnsignedWord, out, TMP, data_offset, null_checker);
2326 // If read barriers are enabled, emit read barriers other than
2327 // Baker's using a slow path (and also unpoison the loaded
2328 // reference, if heap poisoning is enabled).
2329 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2330 out_loc,
2331 out_loc,
2332 obj_loc,
2333 data_offset,
2334 index);
2335 }
2336 }
2337 break;
2338 }
2339
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002340 case DataType::Type::kInt64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002341 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002342 if (index.IsConstant()) {
2343 size_t offset =
2344 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002345 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002346 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002347 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002348 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002349 }
2350 break;
2351 }
2352
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002353 case DataType::Type::kFloat32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002354 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002355 if (index.IsConstant()) {
2356 size_t offset =
2357 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002358 __ LoadFpuFromOffset(kLoadWord, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002359 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002360 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002361 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002362 }
2363 break;
2364 }
2365
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002366 case DataType::Type::kFloat64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002367 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002368 if (index.IsConstant()) {
2369 size_t offset =
2370 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002371 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002372 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002373 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002374 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002375 }
2376 break;
2377 }
2378
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002379 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002380 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2381 UNREACHABLE();
2382 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002383}
2384
2385void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
2386 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2387 locations->SetInAt(0, Location::RequiresRegister());
2388 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2389}
2390
2391void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
2392 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002393 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002394 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2395 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2396 __ LoadFromOffset(kLoadWord, out, obj, offset);
2397 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002398 // Mask out compression flag from String's array length.
2399 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2400 __ Srl(out, out, 1u);
2401 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002402}
2403
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002404Location LocationsBuilderMIPS64::RegisterOrZeroConstant(HInstruction* instruction) {
2405 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2406 ? Location::ConstantLocation(instruction->AsConstant())
2407 : Location::RequiresRegister();
2408}
2409
2410Location LocationsBuilderMIPS64::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2411 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2412 // We can store a non-zero float or double constant without first loading it into the FPU,
2413 // but we should only prefer this if the constant has a single use.
2414 if (instruction->IsConstant() &&
2415 (instruction->AsConstant()->IsZeroBitPattern() ||
2416 instruction->GetUses().HasExactlyOneElement())) {
2417 return Location::ConstantLocation(instruction->AsConstant());
2418 // Otherwise fall through and require an FPU register for the constant.
2419 }
2420 return Location::RequiresFpuRegister();
2421}
2422
Alexey Frunze4dda3372015-06-01 18:31:49 -07002423void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002424 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002425
2426 bool needs_write_barrier =
2427 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2428 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2429
Alexey Frunze4dda3372015-06-01 18:31:49 -07002430 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2431 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002432 may_need_runtime_call_for_type_check ?
2433 LocationSummary::kCallOnSlowPath :
2434 LocationSummary::kNoCall);
2435
2436 locations->SetInAt(0, Location::RequiresRegister());
2437 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002438 if (DataType::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
Alexey Frunze15958152017-02-09 19:08:30 -08002439 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002440 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002441 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2442 }
2443 if (needs_write_barrier) {
2444 // Temporary register for the write barrier.
2445 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002446 }
2447}
2448
2449void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
2450 LocationSummary* locations = instruction->GetLocations();
2451 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2452 Location index = locations->InAt(1);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002453 Location value_location = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002454 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002455 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002456 bool needs_write_barrier =
2457 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002458 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002459 GpuRegister base_reg = index.IsConstant() ? obj : TMP;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002460
2461 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002462 case DataType::Type::kBool:
2463 case DataType::Type::kInt8: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002464 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002465 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002466 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002467 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002468 __ Daddu(base_reg, obj, index.AsRegister<GpuRegister>());
2469 }
2470 if (value_location.IsConstant()) {
2471 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2472 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2473 } else {
2474 GpuRegister value = value_location.AsRegister<GpuRegister>();
2475 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002476 }
2477 break;
2478 }
2479
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002480 case DataType::Type::kInt16:
2481 case DataType::Type::kUint16: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002482 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002483 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002484 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002485 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002486 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002487 }
2488 if (value_location.IsConstant()) {
2489 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2490 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2491 } else {
2492 GpuRegister value = value_location.AsRegister<GpuRegister>();
2493 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002494 }
2495 break;
2496 }
2497
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002498 case DataType::Type::kInt32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002499 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2500 if (index.IsConstant()) {
2501 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2502 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002503 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002504 }
2505 if (value_location.IsConstant()) {
2506 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2507 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2508 } else {
2509 GpuRegister value = value_location.AsRegister<GpuRegister>();
2510 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2511 }
2512 break;
2513 }
2514
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002515 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002516 if (value_location.IsConstant()) {
2517 // Just setting null.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002518 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002519 if (index.IsConstant()) {
Alexey Frunzec061de12017-02-14 13:27:23 -08002520 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002521 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002522 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunzec061de12017-02-14 13:27:23 -08002523 }
Alexey Frunze15958152017-02-09 19:08:30 -08002524 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2525 DCHECK_EQ(value, 0);
2526 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2527 DCHECK(!needs_write_barrier);
2528 DCHECK(!may_need_runtime_call_for_type_check);
2529 break;
2530 }
2531
2532 DCHECK(needs_write_barrier);
2533 GpuRegister value = value_location.AsRegister<GpuRegister>();
2534 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
2535 GpuRegister temp2 = TMP; // Doesn't need to survive slow path.
2536 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2537 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2538 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2539 Mips64Label done;
2540 SlowPathCodeMIPS64* slow_path = nullptr;
2541
2542 if (may_need_runtime_call_for_type_check) {
2543 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS64(instruction);
2544 codegen_->AddSlowPath(slow_path);
2545 if (instruction->GetValueCanBeNull()) {
2546 Mips64Label non_zero;
2547 __ Bnezc(value, &non_zero);
2548 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2549 if (index.IsConstant()) {
2550 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002551 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002552 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002553 }
Alexey Frunze15958152017-02-09 19:08:30 -08002554 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2555 __ Bc(&done);
2556 __ Bind(&non_zero);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002557 }
Alexey Frunze15958152017-02-09 19:08:30 -08002558
2559 // Note that when read barriers are enabled, the type checks
2560 // are performed without read barriers. This is fine, even in
2561 // the case where a class object is in the from-space after
2562 // the flip, as a comparison involving such a type would not
2563 // produce a false positive; it may of course produce a false
2564 // negative, in which case we would take the ArraySet slow
2565 // path.
2566
2567 // /* HeapReference<Class> */ temp1 = obj->klass_
2568 __ LoadFromOffset(kLoadUnsignedWord, temp1, obj, class_offset, null_checker);
2569 __ MaybeUnpoisonHeapReference(temp1);
2570
2571 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2572 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, component_offset);
2573 // /* HeapReference<Class> */ temp2 = value->klass_
2574 __ LoadFromOffset(kLoadUnsignedWord, temp2, value, class_offset);
2575 // If heap poisoning is enabled, no need to unpoison `temp1`
2576 // nor `temp2`, as we are comparing two poisoned references.
2577
2578 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2579 Mips64Label do_put;
2580 __ Beqc(temp1, temp2, &do_put);
2581 // If heap poisoning is enabled, the `temp1` reference has
2582 // not been unpoisoned yet; unpoison it now.
2583 __ MaybeUnpoisonHeapReference(temp1);
2584
2585 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2586 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, super_offset);
2587 // If heap poisoning is enabled, no need to unpoison
2588 // `temp1`, as we are comparing against null below.
2589 __ Bnezc(temp1, slow_path->GetEntryLabel());
2590 __ Bind(&do_put);
2591 } else {
2592 __ Bnec(temp1, temp2, slow_path->GetEntryLabel());
2593 }
2594 }
2595
2596 GpuRegister source = value;
2597 if (kPoisonHeapReferences) {
2598 // Note that in the case where `value` is a null reference,
2599 // we do not enter this block, as a null reference does not
2600 // need poisoning.
2601 __ Move(temp1, value);
2602 __ PoisonHeapReference(temp1);
2603 source = temp1;
2604 }
2605
2606 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2607 if (index.IsConstant()) {
2608 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002609 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002610 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002611 }
2612 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2613
2614 if (!may_need_runtime_call_for_type_check) {
2615 codegen_->MaybeRecordImplicitNullCheck(instruction);
2616 }
2617
2618 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2619
2620 if (done.IsLinked()) {
2621 __ Bind(&done);
2622 }
2623
2624 if (slow_path != nullptr) {
2625 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002626 }
2627 break;
2628 }
2629
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002630 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002631 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002632 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002633 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002634 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002635 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002636 }
2637 if (value_location.IsConstant()) {
2638 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2639 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2640 } else {
2641 GpuRegister value = value_location.AsRegister<GpuRegister>();
2642 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002643 }
2644 break;
2645 }
2646
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002647 case DataType::Type::kFloat32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002648 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002649 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002650 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002651 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002652 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002653 }
2654 if (value_location.IsConstant()) {
2655 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2656 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2657 } else {
2658 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2659 __ StoreFpuToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002660 }
2661 break;
2662 }
2663
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002664 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002665 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002666 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002667 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002668 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002669 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002670 }
2671 if (value_location.IsConstant()) {
2672 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2673 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2674 } else {
2675 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2676 __ StoreFpuToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002677 }
2678 break;
2679 }
2680
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002681 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002682 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2683 UNREACHABLE();
2684 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002685}
2686
2687void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002688 RegisterSet caller_saves = RegisterSet::Empty();
2689 InvokeRuntimeCallingConvention calling_convention;
2690 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2691 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2692 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002693 locations->SetInAt(0, Location::RequiresRegister());
2694 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002695}
2696
2697void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
2698 LocationSummary* locations = instruction->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002699 BoundsCheckSlowPathMIPS64* slow_path =
2700 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002701 codegen_->AddSlowPath(slow_path);
2702
2703 GpuRegister index = locations->InAt(0).AsRegister<GpuRegister>();
2704 GpuRegister length = locations->InAt(1).AsRegister<GpuRegister>();
2705
2706 // length is limited by the maximum positive signed 32-bit integer.
2707 // Unsigned comparison of length and index checks for index < 0
2708 // and for length <= index simultaneously.
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002709 __ Bgeuc(index, length, slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002710}
2711
Alexey Frunze15958152017-02-09 19:08:30 -08002712// Temp is used for read barrier.
2713static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
2714 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002715 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08002716 (kUseBakerReadBarrier ||
2717 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2718 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2719 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
2720 return 1;
2721 }
2722 return 0;
2723}
2724
2725// Extra temp is used for read barrier.
2726static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
2727 return 1 + NumberOfInstanceOfTemps(type_check_kind);
2728}
2729
Alexey Frunze4dda3372015-06-01 18:31:49 -07002730void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002731 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2732 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
2733
2734 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
2735 switch (type_check_kind) {
2736 case TypeCheckKind::kExactCheck:
2737 case TypeCheckKind::kAbstractClassCheck:
2738 case TypeCheckKind::kClassHierarchyCheck:
2739 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08002740 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002741 ? LocationSummary::kCallOnSlowPath
2742 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
2743 break;
2744 case TypeCheckKind::kArrayCheck:
2745 case TypeCheckKind::kUnresolvedCheck:
2746 case TypeCheckKind::kInterfaceCheck:
2747 call_kind = LocationSummary::kCallOnSlowPath;
2748 break;
2749 }
2750
2751 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002752 locations->SetInAt(0, Location::RequiresRegister());
2753 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08002754 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002755}
2756
2757void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002758 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002759 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002760 Location obj_loc = locations->InAt(0);
2761 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002762 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08002763 Location temp_loc = locations->GetTemp(0);
2764 GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
2765 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
2766 DCHECK_LE(num_temps, 2u);
2767 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002768 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2769 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2770 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2771 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2772 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
2773 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
2774 const uint32_t object_array_data_offset =
2775 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2776 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002777
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002778 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
2779 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
2780 // read barriers is done for performance and code size reasons.
2781 bool is_type_check_slow_path_fatal = false;
2782 if (!kEmitCompilerReadBarrier) {
2783 is_type_check_slow_path_fatal =
2784 (type_check_kind == TypeCheckKind::kExactCheck ||
2785 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2786 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2787 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
2788 !instruction->CanThrowIntoCatchBlock();
2789 }
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002790 SlowPathCodeMIPS64* slow_path =
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002791 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
2792 is_type_check_slow_path_fatal);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002793 codegen_->AddSlowPath(slow_path);
2794
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002795 // Avoid this check if we know `obj` is not null.
2796 if (instruction->MustDoNullCheck()) {
2797 __ Beqzc(obj, &done);
2798 }
2799
2800 switch (type_check_kind) {
2801 case TypeCheckKind::kExactCheck:
2802 case TypeCheckKind::kArrayCheck: {
2803 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002804 GenerateReferenceLoadTwoRegisters(instruction,
2805 temp_loc,
2806 obj_loc,
2807 class_offset,
2808 maybe_temp2_loc,
2809 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002810 // Jump to slow path for throwing the exception or doing a
2811 // more involved array check.
2812 __ Bnec(temp, cls, slow_path->GetEntryLabel());
2813 break;
2814 }
2815
2816 case TypeCheckKind::kAbstractClassCheck: {
2817 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002818 GenerateReferenceLoadTwoRegisters(instruction,
2819 temp_loc,
2820 obj_loc,
2821 class_offset,
2822 maybe_temp2_loc,
2823 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002824 // If the class is abstract, we eagerly fetch the super class of the
2825 // object to avoid doing a comparison we know will fail.
2826 Mips64Label loop;
2827 __ Bind(&loop);
2828 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002829 GenerateReferenceLoadOneRegister(instruction,
2830 temp_loc,
2831 super_offset,
2832 maybe_temp2_loc,
2833 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002834 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2835 // exception.
2836 __ Beqzc(temp, slow_path->GetEntryLabel());
2837 // Otherwise, compare the classes.
2838 __ Bnec(temp, cls, &loop);
2839 break;
2840 }
2841
2842 case TypeCheckKind::kClassHierarchyCheck: {
2843 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002844 GenerateReferenceLoadTwoRegisters(instruction,
2845 temp_loc,
2846 obj_loc,
2847 class_offset,
2848 maybe_temp2_loc,
2849 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002850 // Walk over the class hierarchy to find a match.
2851 Mips64Label loop;
2852 __ Bind(&loop);
2853 __ Beqc(temp, cls, &done);
2854 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002855 GenerateReferenceLoadOneRegister(instruction,
2856 temp_loc,
2857 super_offset,
2858 maybe_temp2_loc,
2859 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002860 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2861 // exception. Otherwise, jump to the beginning of the loop.
2862 __ Bnezc(temp, &loop);
2863 __ Bc(slow_path->GetEntryLabel());
2864 break;
2865 }
2866
2867 case TypeCheckKind::kArrayObjectCheck: {
2868 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002869 GenerateReferenceLoadTwoRegisters(instruction,
2870 temp_loc,
2871 obj_loc,
2872 class_offset,
2873 maybe_temp2_loc,
2874 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002875 // Do an exact check.
2876 __ Beqc(temp, cls, &done);
2877 // Otherwise, we need to check that the object's class is a non-primitive array.
2878 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08002879 GenerateReferenceLoadOneRegister(instruction,
2880 temp_loc,
2881 component_offset,
2882 maybe_temp2_loc,
2883 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002884 // If the component type is null, jump to the slow path to throw the exception.
2885 __ Beqzc(temp, slow_path->GetEntryLabel());
2886 // Otherwise, the object is indeed an array, further check that this component
2887 // type is not a primitive type.
2888 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
2889 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2890 __ Bnezc(temp, slow_path->GetEntryLabel());
2891 break;
2892 }
2893
2894 case TypeCheckKind::kUnresolvedCheck:
2895 // We always go into the type check slow path for the unresolved check case.
2896 // We cannot directly call the CheckCast runtime entry point
2897 // without resorting to a type checking slow path here (i.e. by
2898 // calling InvokeRuntime directly), as it would require to
2899 // assign fixed registers for the inputs of this HInstanceOf
2900 // instruction (following the runtime calling convention), which
2901 // might be cluttered by the potential first read barrier
2902 // emission at the beginning of this method.
2903 __ Bc(slow_path->GetEntryLabel());
2904 break;
2905
2906 case TypeCheckKind::kInterfaceCheck: {
2907 // Avoid read barriers to improve performance of the fast path. We can not get false
2908 // positives by doing this.
2909 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002910 GenerateReferenceLoadTwoRegisters(instruction,
2911 temp_loc,
2912 obj_loc,
2913 class_offset,
2914 maybe_temp2_loc,
2915 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002916 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08002917 GenerateReferenceLoadTwoRegisters(instruction,
2918 temp_loc,
2919 temp_loc,
2920 iftable_offset,
2921 maybe_temp2_loc,
2922 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002923 // Iftable is never null.
2924 __ Lw(TMP, temp, array_length_offset);
2925 // Loop through the iftable and check if any class matches.
2926 Mips64Label loop;
2927 __ Bind(&loop);
2928 __ Beqzc(TMP, slow_path->GetEntryLabel());
2929 __ Lwu(AT, temp, object_array_data_offset);
2930 __ MaybeUnpoisonHeapReference(AT);
2931 // Go to next interface.
2932 __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
2933 __ Addiu(TMP, TMP, -2);
2934 // Compare the classes and continue the loop if they do not match.
2935 __ Bnec(AT, cls, &loop);
2936 break;
2937 }
2938 }
2939
2940 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002941 __ Bind(slow_path->GetExitLabel());
2942}
2943
2944void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
2945 LocationSummary* locations =
2946 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2947 locations->SetInAt(0, Location::RequiresRegister());
2948 if (check->HasUses()) {
2949 locations->SetOut(Location::SameAsFirstInput());
2950 }
2951}
2952
2953void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
2954 // We assume the class is not null.
2955 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
2956 check->GetLoadClass(),
2957 check,
2958 check->GetDexPc(),
2959 true);
2960 codegen_->AddSlowPath(slow_path);
2961 GenerateClassInitializationCheck(slow_path,
2962 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
2963}
2964
2965void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002966 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002967
Alexey Frunze299a9392015-12-08 16:08:02 -08002968 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002969
2970 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002971 case DataType::Type::kBool:
2972 case DataType::Type::kInt8:
2973 case DataType::Type::kInt16:
2974 case DataType::Type::kUint16:
2975 case DataType::Type::kInt32:
2976 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002977 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002978 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002979 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2980 break;
2981
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002982 case DataType::Type::kFloat32:
2983 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08002984 locations->SetInAt(0, Location::RequiresFpuRegister());
2985 locations->SetInAt(1, Location::RequiresFpuRegister());
2986 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002987 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002988
2989 default:
2990 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2991 }
2992}
2993
2994void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
2995 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08002996 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002997 DataType::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002998
2999 // 0 if: left == right
3000 // 1 if: left > right
3001 // -1 if: left < right
3002 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003003 case DataType::Type::kBool:
3004 case DataType::Type::kInt8:
3005 case DataType::Type::kInt16:
3006 case DataType::Type::kUint16:
3007 case DataType::Type::kInt32:
3008 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003009 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003010 Location rhs_location = locations->InAt(1);
3011 bool use_imm = rhs_location.IsConstant();
3012 GpuRegister rhs = ZERO;
3013 if (use_imm) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003014 if (in_type == DataType::Type::kInt64) {
Aart Bika19616e2016-02-01 18:57:58 -08003015 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
3016 if (value != 0) {
3017 rhs = AT;
3018 __ LoadConst64(rhs, value);
3019 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00003020 } else {
3021 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
3022 if (value != 0) {
3023 rhs = AT;
3024 __ LoadConst32(rhs, value);
3025 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003026 }
3027 } else {
3028 rhs = rhs_location.AsRegister<GpuRegister>();
3029 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003030 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08003031 __ Slt(res, rhs, lhs);
3032 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003033 break;
3034 }
3035
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003036 case DataType::Type::kFloat32: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003037 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3038 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3039 Mips64Label done;
3040 __ CmpEqS(FTMP, lhs, rhs);
3041 __ LoadConst32(res, 0);
3042 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003043 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003044 __ CmpLtS(FTMP, lhs, rhs);
3045 __ LoadConst32(res, -1);
3046 __ Bc1nez(FTMP, &done);
3047 __ LoadConst32(res, 1);
3048 } else {
3049 __ CmpLtS(FTMP, rhs, lhs);
3050 __ LoadConst32(res, 1);
3051 __ Bc1nez(FTMP, &done);
3052 __ LoadConst32(res, -1);
3053 }
3054 __ Bind(&done);
3055 break;
3056 }
3057
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003058 case DataType::Type::kFloat64: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003059 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3060 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3061 Mips64Label done;
3062 __ CmpEqD(FTMP, lhs, rhs);
3063 __ LoadConst32(res, 0);
3064 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003065 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003066 __ CmpLtD(FTMP, lhs, rhs);
3067 __ LoadConst32(res, -1);
3068 __ Bc1nez(FTMP, &done);
3069 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003070 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08003071 __ CmpLtD(FTMP, rhs, lhs);
3072 __ LoadConst32(res, 1);
3073 __ Bc1nez(FTMP, &done);
3074 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003075 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003076 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003077 break;
3078 }
3079
3080 default:
3081 LOG(FATAL) << "Unimplemented compare type " << in_type;
3082 }
3083}
3084
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003085void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003086 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08003087 switch (instruction->InputAt(0)->GetType()) {
3088 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003089 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003090 locations->SetInAt(0, Location::RequiresRegister());
3091 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3092 break;
3093
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003094 case DataType::Type::kFloat32:
3095 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003096 locations->SetInAt(0, Location::RequiresFpuRegister());
3097 locations->SetInAt(1, Location::RequiresFpuRegister());
3098 break;
3099 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003100 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003101 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3102 }
3103}
3104
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003105void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003106 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003107 return;
3108 }
3109
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003110 DataType::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003111 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003112 switch (type) {
3113 default:
3114 // Integer case.
3115 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
3116 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003117 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003118 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
3119 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003120 case DataType::Type::kFloat32:
3121 case DataType::Type::kFloat64:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003122 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3123 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003124 }
3125}
3126
Alexey Frunzec857c742015-09-23 15:12:39 -07003127void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3128 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003129 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003130
3131 LocationSummary* locations = instruction->GetLocations();
3132 Location second = locations->InAt(1);
3133 DCHECK(second.IsConstant());
3134
3135 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3136 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3137 int64_t imm = Int64FromConstant(second.GetConstant());
3138 DCHECK(imm == 1 || imm == -1);
3139
3140 if (instruction->IsRem()) {
3141 __ Move(out, ZERO);
3142 } else {
3143 if (imm == -1) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003144 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003145 __ Subu(out, ZERO, dividend);
3146 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003147 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003148 __ Dsubu(out, ZERO, dividend);
3149 }
3150 } else if (out != dividend) {
3151 __ Move(out, dividend);
3152 }
3153 }
3154}
3155
3156void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3157 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003158 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003159
3160 LocationSummary* locations = instruction->GetLocations();
3161 Location second = locations->InAt(1);
3162 DCHECK(second.IsConstant());
3163
3164 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3165 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3166 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003167 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07003168 int ctz_imm = CTZ(abs_imm);
3169
3170 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003171 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003172 if (ctz_imm == 1) {
3173 // Fast path for division by +/-2, which is very common.
3174 __ Srl(TMP, dividend, 31);
3175 } else {
3176 __ Sra(TMP, dividend, 31);
3177 __ Srl(TMP, TMP, 32 - ctz_imm);
3178 }
3179 __ Addu(out, dividend, TMP);
3180 __ Sra(out, out, ctz_imm);
3181 if (imm < 0) {
3182 __ Subu(out, ZERO, out);
3183 }
3184 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003185 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003186 if (ctz_imm == 1) {
3187 // Fast path for division by +/-2, which is very common.
3188 __ Dsrl32(TMP, dividend, 31);
3189 } else {
3190 __ Dsra32(TMP, dividend, 31);
3191 if (ctz_imm > 32) {
3192 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3193 } else {
3194 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3195 }
3196 }
3197 __ Daddu(out, dividend, TMP);
3198 if (ctz_imm < 32) {
3199 __ Dsra(out, out, ctz_imm);
3200 } else {
3201 __ Dsra32(out, out, ctz_imm - 32);
3202 }
3203 if (imm < 0) {
3204 __ Dsubu(out, ZERO, out);
3205 }
3206 }
3207 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003208 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003209 if (ctz_imm == 1) {
3210 // Fast path for modulo +/-2, which is very common.
3211 __ Sra(TMP, dividend, 31);
3212 __ Subu(out, dividend, TMP);
3213 __ Andi(out, out, 1);
3214 __ Addu(out, out, TMP);
3215 } else {
3216 __ Sra(TMP, dividend, 31);
3217 __ Srl(TMP, TMP, 32 - ctz_imm);
3218 __ Addu(out, dividend, TMP);
3219 if (IsUint<16>(abs_imm - 1)) {
3220 __ Andi(out, out, abs_imm - 1);
3221 } else {
3222 __ Sll(out, out, 32 - ctz_imm);
3223 __ Srl(out, out, 32 - ctz_imm);
3224 }
3225 __ Subu(out, out, TMP);
3226 }
3227 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003228 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003229 if (ctz_imm == 1) {
3230 // Fast path for modulo +/-2, which is very common.
3231 __ Dsra32(TMP, dividend, 31);
3232 __ Dsubu(out, dividend, TMP);
3233 __ Andi(out, out, 1);
3234 __ Daddu(out, out, TMP);
3235 } else {
3236 __ Dsra32(TMP, dividend, 31);
3237 if (ctz_imm > 32) {
3238 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3239 } else {
3240 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3241 }
3242 __ Daddu(out, dividend, TMP);
3243 if (IsUint<16>(abs_imm - 1)) {
3244 __ Andi(out, out, abs_imm - 1);
3245 } else {
3246 if (ctz_imm > 32) {
3247 __ Dsll(out, out, 64 - ctz_imm);
3248 __ Dsrl(out, out, 64 - ctz_imm);
3249 } else {
3250 __ Dsll32(out, out, 32 - ctz_imm);
3251 __ Dsrl32(out, out, 32 - ctz_imm);
3252 }
3253 }
3254 __ Dsubu(out, out, TMP);
3255 }
3256 }
3257 }
3258}
3259
3260void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3261 DCHECK(instruction->IsDiv() || instruction->IsRem());
3262
3263 LocationSummary* locations = instruction->GetLocations();
3264 Location second = locations->InAt(1);
3265 DCHECK(second.IsConstant());
3266
3267 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3268 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3269 int64_t imm = Int64FromConstant(second.GetConstant());
3270
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003271 DataType::Type type = instruction->GetResultType();
3272 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003273
3274 int64_t magic;
3275 int shift;
3276 CalculateMagicAndShiftForDivRem(imm,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003277 (type == DataType::Type::kInt64),
Alexey Frunzec857c742015-09-23 15:12:39 -07003278 &magic,
3279 &shift);
3280
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003281 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003282 __ LoadConst32(TMP, magic);
3283 __ MuhR6(TMP, dividend, TMP);
3284
3285 if (imm > 0 && magic < 0) {
3286 __ Addu(TMP, TMP, dividend);
3287 } else if (imm < 0 && magic > 0) {
3288 __ Subu(TMP, TMP, dividend);
3289 }
3290
3291 if (shift != 0) {
3292 __ Sra(TMP, TMP, shift);
3293 }
3294
3295 if (instruction->IsDiv()) {
3296 __ Sra(out, TMP, 31);
3297 __ Subu(out, TMP, out);
3298 } else {
3299 __ Sra(AT, TMP, 31);
3300 __ Subu(AT, TMP, AT);
3301 __ LoadConst32(TMP, imm);
3302 __ MulR6(TMP, AT, TMP);
3303 __ Subu(out, dividend, TMP);
3304 }
3305 } else {
3306 __ LoadConst64(TMP, magic);
3307 __ Dmuh(TMP, dividend, TMP);
3308
3309 if (imm > 0 && magic < 0) {
3310 __ Daddu(TMP, TMP, dividend);
3311 } else if (imm < 0 && magic > 0) {
3312 __ Dsubu(TMP, TMP, dividend);
3313 }
3314
3315 if (shift >= 32) {
3316 __ Dsra32(TMP, TMP, shift - 32);
3317 } else if (shift > 0) {
3318 __ Dsra(TMP, TMP, shift);
3319 }
3320
3321 if (instruction->IsDiv()) {
3322 __ Dsra32(out, TMP, 31);
3323 __ Dsubu(out, TMP, out);
3324 } else {
3325 __ Dsra32(AT, TMP, 31);
3326 __ Dsubu(AT, TMP, AT);
3327 __ LoadConst64(TMP, imm);
3328 __ Dmul(TMP, AT, TMP);
3329 __ Dsubu(out, dividend, TMP);
3330 }
3331 }
3332}
3333
3334void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3335 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003336 DataType::Type type = instruction->GetResultType();
3337 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003338
3339 LocationSummary* locations = instruction->GetLocations();
3340 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3341 Location second = locations->InAt(1);
3342
3343 if (second.IsConstant()) {
3344 int64_t imm = Int64FromConstant(second.GetConstant());
3345 if (imm == 0) {
3346 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3347 } else if (imm == 1 || imm == -1) {
3348 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003349 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003350 DivRemByPowerOfTwo(instruction);
3351 } else {
3352 DCHECK(imm <= -2 || imm >= 2);
3353 GenerateDivRemWithAnyConstant(instruction);
3354 }
3355 } else {
3356 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3357 GpuRegister divisor = second.AsRegister<GpuRegister>();
3358 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003359 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003360 __ DivR6(out, dividend, divisor);
3361 else
3362 __ Ddiv(out, dividend, divisor);
3363 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003364 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003365 __ ModR6(out, dividend, divisor);
3366 else
3367 __ Dmod(out, dividend, divisor);
3368 }
3369 }
3370}
3371
Alexey Frunze4dda3372015-06-01 18:31:49 -07003372void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
3373 LocationSummary* locations =
3374 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3375 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003376 case DataType::Type::kInt32:
3377 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003378 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003379 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003380 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3381 break;
3382
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003383 case DataType::Type::kFloat32:
3384 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003385 locations->SetInAt(0, Location::RequiresFpuRegister());
3386 locations->SetInAt(1, Location::RequiresFpuRegister());
3387 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3388 break;
3389
3390 default:
3391 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3392 }
3393}
3394
3395void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003396 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003397 LocationSummary* locations = instruction->GetLocations();
3398
3399 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003400 case DataType::Type::kInt32:
3401 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07003402 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003403 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003404 case DataType::Type::kFloat32:
3405 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003406 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3407 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3408 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003409 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07003410 __ DivS(dst, lhs, rhs);
3411 else
3412 __ DivD(dst, lhs, rhs);
3413 break;
3414 }
3415 default:
3416 LOG(FATAL) << "Unexpected div type " << type;
3417 }
3418}
3419
3420void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003421 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003422 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003423}
3424
3425void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3426 SlowPathCodeMIPS64* slow_path =
3427 new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS64(instruction);
3428 codegen_->AddSlowPath(slow_path);
3429 Location value = instruction->GetLocations()->InAt(0);
3430
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003431 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003432
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003433 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003434 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003435 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003436 }
3437
3438 if (value.IsConstant()) {
3439 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
3440 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003441 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003442 } else {
3443 // A division by a non-null constant is valid. We don't need to perform
3444 // any check, so simply fall through.
3445 }
3446 } else {
3447 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3448 }
3449}
3450
3451void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
3452 LocationSummary* locations =
3453 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3454 locations->SetOut(Location::ConstantLocation(constant));
3455}
3456
3457void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3458 // Will be generated at use site.
3459}
3460
3461void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
3462 exit->SetLocations(nullptr);
3463}
3464
3465void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3466}
3467
3468void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
3469 LocationSummary* locations =
3470 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3471 locations->SetOut(Location::ConstantLocation(constant));
3472}
3473
3474void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3475 // Will be generated at use site.
3476}
3477
David Brazdilfc6a86a2015-06-26 10:33:45 +00003478void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003479 DCHECK(!successor->IsExitBlock());
3480 HBasicBlock* block = got->GetBlock();
3481 HInstruction* previous = got->GetPrevious();
3482 HLoopInformation* info = block->GetLoopInformation();
3483
3484 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3485 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3486 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3487 return;
3488 }
3489 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3490 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3491 }
3492 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003493 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003494 }
3495}
3496
David Brazdilfc6a86a2015-06-26 10:33:45 +00003497void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
3498 got->SetLocations(nullptr);
3499}
3500
3501void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
3502 HandleGoto(got, got->GetSuccessor());
3503}
3504
3505void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3506 try_boundary->SetLocations(nullptr);
3507}
3508
3509void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3510 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3511 if (!successor->IsExitBlock()) {
3512 HandleGoto(try_boundary, successor);
3513 }
3514}
3515
Alexey Frunze299a9392015-12-08 16:08:02 -08003516void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
3517 bool is64bit,
3518 LocationSummary* locations) {
3519 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3520 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3521 Location rhs_location = locations->InAt(1);
3522 GpuRegister rhs_reg = ZERO;
3523 int64_t rhs_imm = 0;
3524 bool use_imm = rhs_location.IsConstant();
3525 if (use_imm) {
3526 if (is64bit) {
3527 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3528 } else {
3529 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3530 }
3531 } else {
3532 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3533 }
3534 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3535
3536 switch (cond) {
3537 case kCondEQ:
3538 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003539 if (use_imm && IsInt<16>(-rhs_imm)) {
3540 if (rhs_imm == 0) {
3541 if (cond == kCondEQ) {
3542 __ Sltiu(dst, lhs, 1);
3543 } else {
3544 __ Sltu(dst, ZERO, lhs);
3545 }
3546 } else {
3547 if (is64bit) {
3548 __ Daddiu(dst, lhs, -rhs_imm);
3549 } else {
3550 __ Addiu(dst, lhs, -rhs_imm);
3551 }
3552 if (cond == kCondEQ) {
3553 __ Sltiu(dst, dst, 1);
3554 } else {
3555 __ Sltu(dst, ZERO, dst);
3556 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003557 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003558 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003559 if (use_imm && IsUint<16>(rhs_imm)) {
3560 __ Xori(dst, lhs, rhs_imm);
3561 } else {
3562 if (use_imm) {
3563 rhs_reg = TMP;
3564 __ LoadConst64(rhs_reg, rhs_imm);
3565 }
3566 __ Xor(dst, lhs, rhs_reg);
3567 }
3568 if (cond == kCondEQ) {
3569 __ Sltiu(dst, dst, 1);
3570 } else {
3571 __ Sltu(dst, ZERO, dst);
3572 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003573 }
3574 break;
3575
3576 case kCondLT:
3577 case kCondGE:
3578 if (use_imm && IsInt<16>(rhs_imm)) {
3579 __ Slti(dst, lhs, rhs_imm);
3580 } else {
3581 if (use_imm) {
3582 rhs_reg = TMP;
3583 __ LoadConst64(rhs_reg, rhs_imm);
3584 }
3585 __ Slt(dst, lhs, rhs_reg);
3586 }
3587 if (cond == kCondGE) {
3588 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3589 // only the slt instruction but no sge.
3590 __ Xori(dst, dst, 1);
3591 }
3592 break;
3593
3594 case kCondLE:
3595 case kCondGT:
3596 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3597 // Simulate lhs <= rhs via lhs < rhs + 1.
3598 __ Slti(dst, lhs, rhs_imm_plus_one);
3599 if (cond == kCondGT) {
3600 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3601 // only the slti instruction but no sgti.
3602 __ Xori(dst, dst, 1);
3603 }
3604 } else {
3605 if (use_imm) {
3606 rhs_reg = TMP;
3607 __ LoadConst64(rhs_reg, rhs_imm);
3608 }
3609 __ Slt(dst, rhs_reg, lhs);
3610 if (cond == kCondLE) {
3611 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3612 // only the slt instruction but no sle.
3613 __ Xori(dst, dst, 1);
3614 }
3615 }
3616 break;
3617
3618 case kCondB:
3619 case kCondAE:
3620 if (use_imm && IsInt<16>(rhs_imm)) {
3621 // Sltiu sign-extends its 16-bit immediate operand before
3622 // the comparison and thus lets us compare directly with
3623 // unsigned values in the ranges [0, 0x7fff] and
3624 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3625 __ Sltiu(dst, lhs, rhs_imm);
3626 } else {
3627 if (use_imm) {
3628 rhs_reg = TMP;
3629 __ LoadConst64(rhs_reg, rhs_imm);
3630 }
3631 __ Sltu(dst, lhs, rhs_reg);
3632 }
3633 if (cond == kCondAE) {
3634 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3635 // only the sltu instruction but no sgeu.
3636 __ Xori(dst, dst, 1);
3637 }
3638 break;
3639
3640 case kCondBE:
3641 case kCondA:
3642 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3643 // Simulate lhs <= rhs via lhs < rhs + 1.
3644 // Note that this only works if rhs + 1 does not overflow
3645 // to 0, hence the check above.
3646 // Sltiu sign-extends its 16-bit immediate operand before
3647 // the comparison and thus lets us compare directly with
3648 // unsigned values in the ranges [0, 0x7fff] and
3649 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3650 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3651 if (cond == kCondA) {
3652 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3653 // only the sltiu instruction but no sgtiu.
3654 __ Xori(dst, dst, 1);
3655 }
3656 } else {
3657 if (use_imm) {
3658 rhs_reg = TMP;
3659 __ LoadConst64(rhs_reg, rhs_imm);
3660 }
3661 __ Sltu(dst, rhs_reg, lhs);
3662 if (cond == kCondBE) {
3663 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3664 // only the sltu instruction but no sleu.
3665 __ Xori(dst, dst, 1);
3666 }
3667 }
3668 break;
3669 }
3670}
3671
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003672bool InstructionCodeGeneratorMIPS64::MaterializeIntLongCompare(IfCondition cond,
3673 bool is64bit,
3674 LocationSummary* input_locations,
3675 GpuRegister dst) {
3676 GpuRegister lhs = input_locations->InAt(0).AsRegister<GpuRegister>();
3677 Location rhs_location = input_locations->InAt(1);
3678 GpuRegister rhs_reg = ZERO;
3679 int64_t rhs_imm = 0;
3680 bool use_imm = rhs_location.IsConstant();
3681 if (use_imm) {
3682 if (is64bit) {
3683 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3684 } else {
3685 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3686 }
3687 } else {
3688 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3689 }
3690 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3691
3692 switch (cond) {
3693 case kCondEQ:
3694 case kCondNE:
3695 if (use_imm && IsInt<16>(-rhs_imm)) {
3696 if (is64bit) {
3697 __ Daddiu(dst, lhs, -rhs_imm);
3698 } else {
3699 __ Addiu(dst, lhs, -rhs_imm);
3700 }
3701 } else if (use_imm && IsUint<16>(rhs_imm)) {
3702 __ Xori(dst, lhs, rhs_imm);
3703 } else {
3704 if (use_imm) {
3705 rhs_reg = TMP;
3706 __ LoadConst64(rhs_reg, rhs_imm);
3707 }
3708 __ Xor(dst, lhs, rhs_reg);
3709 }
3710 return (cond == kCondEQ);
3711
3712 case kCondLT:
3713 case kCondGE:
3714 if (use_imm && IsInt<16>(rhs_imm)) {
3715 __ Slti(dst, lhs, rhs_imm);
3716 } else {
3717 if (use_imm) {
3718 rhs_reg = TMP;
3719 __ LoadConst64(rhs_reg, rhs_imm);
3720 }
3721 __ Slt(dst, lhs, rhs_reg);
3722 }
3723 return (cond == kCondGE);
3724
3725 case kCondLE:
3726 case kCondGT:
3727 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3728 // Simulate lhs <= rhs via lhs < rhs + 1.
3729 __ Slti(dst, lhs, rhs_imm_plus_one);
3730 return (cond == kCondGT);
3731 } else {
3732 if (use_imm) {
3733 rhs_reg = TMP;
3734 __ LoadConst64(rhs_reg, rhs_imm);
3735 }
3736 __ Slt(dst, rhs_reg, lhs);
3737 return (cond == kCondLE);
3738 }
3739
3740 case kCondB:
3741 case kCondAE:
3742 if (use_imm && IsInt<16>(rhs_imm)) {
3743 // Sltiu sign-extends its 16-bit immediate operand before
3744 // the comparison and thus lets us compare directly with
3745 // unsigned values in the ranges [0, 0x7fff] and
3746 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3747 __ Sltiu(dst, lhs, rhs_imm);
3748 } else {
3749 if (use_imm) {
3750 rhs_reg = TMP;
3751 __ LoadConst64(rhs_reg, rhs_imm);
3752 }
3753 __ Sltu(dst, lhs, rhs_reg);
3754 }
3755 return (cond == kCondAE);
3756
3757 case kCondBE:
3758 case kCondA:
3759 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3760 // Simulate lhs <= rhs via lhs < rhs + 1.
3761 // Note that this only works if rhs + 1 does not overflow
3762 // to 0, hence the check above.
3763 // Sltiu sign-extends its 16-bit immediate operand before
3764 // the comparison and thus lets us compare directly with
3765 // unsigned values in the ranges [0, 0x7fff] and
3766 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3767 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3768 return (cond == kCondA);
3769 } else {
3770 if (use_imm) {
3771 rhs_reg = TMP;
3772 __ LoadConst64(rhs_reg, rhs_imm);
3773 }
3774 __ Sltu(dst, rhs_reg, lhs);
3775 return (cond == kCondBE);
3776 }
3777 }
3778}
3779
Alexey Frunze299a9392015-12-08 16:08:02 -08003780void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
3781 bool is64bit,
3782 LocationSummary* locations,
3783 Mips64Label* label) {
3784 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3785 Location rhs_location = locations->InAt(1);
3786 GpuRegister rhs_reg = ZERO;
3787 int64_t rhs_imm = 0;
3788 bool use_imm = rhs_location.IsConstant();
3789 if (use_imm) {
3790 if (is64bit) {
3791 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3792 } else {
3793 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3794 }
3795 } else {
3796 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3797 }
3798
3799 if (use_imm && rhs_imm == 0) {
3800 switch (cond) {
3801 case kCondEQ:
3802 case kCondBE: // <= 0 if zero
3803 __ Beqzc(lhs, label);
3804 break;
3805 case kCondNE:
3806 case kCondA: // > 0 if non-zero
3807 __ Bnezc(lhs, label);
3808 break;
3809 case kCondLT:
3810 __ Bltzc(lhs, label);
3811 break;
3812 case kCondGE:
3813 __ Bgezc(lhs, label);
3814 break;
3815 case kCondLE:
3816 __ Blezc(lhs, label);
3817 break;
3818 case kCondGT:
3819 __ Bgtzc(lhs, label);
3820 break;
3821 case kCondB: // always false
3822 break;
3823 case kCondAE: // always true
3824 __ Bc(label);
3825 break;
3826 }
3827 } else {
3828 if (use_imm) {
3829 rhs_reg = TMP;
3830 __ LoadConst64(rhs_reg, rhs_imm);
3831 }
3832 switch (cond) {
3833 case kCondEQ:
3834 __ Beqc(lhs, rhs_reg, label);
3835 break;
3836 case kCondNE:
3837 __ Bnec(lhs, rhs_reg, label);
3838 break;
3839 case kCondLT:
3840 __ Bltc(lhs, rhs_reg, label);
3841 break;
3842 case kCondGE:
3843 __ Bgec(lhs, rhs_reg, label);
3844 break;
3845 case kCondLE:
3846 __ Bgec(rhs_reg, lhs, label);
3847 break;
3848 case kCondGT:
3849 __ Bltc(rhs_reg, lhs, label);
3850 break;
3851 case kCondB:
3852 __ Bltuc(lhs, rhs_reg, label);
3853 break;
3854 case kCondAE:
3855 __ Bgeuc(lhs, rhs_reg, label);
3856 break;
3857 case kCondBE:
3858 __ Bgeuc(rhs_reg, lhs, label);
3859 break;
3860 case kCondA:
3861 __ Bltuc(rhs_reg, lhs, label);
3862 break;
3863 }
3864 }
3865}
3866
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003867void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
3868 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003869 DataType::Type type,
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003870 LocationSummary* locations) {
3871 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3872 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3873 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003874 if (type == DataType::Type::kFloat32) {
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003875 switch (cond) {
3876 case kCondEQ:
3877 __ CmpEqS(FTMP, lhs, rhs);
3878 __ Mfc1(dst, FTMP);
3879 __ Andi(dst, dst, 1);
3880 break;
3881 case kCondNE:
3882 __ CmpEqS(FTMP, lhs, rhs);
3883 __ Mfc1(dst, FTMP);
3884 __ Addiu(dst, dst, 1);
3885 break;
3886 case kCondLT:
3887 if (gt_bias) {
3888 __ CmpLtS(FTMP, lhs, rhs);
3889 } else {
3890 __ CmpUltS(FTMP, lhs, rhs);
3891 }
3892 __ Mfc1(dst, FTMP);
3893 __ Andi(dst, dst, 1);
3894 break;
3895 case kCondLE:
3896 if (gt_bias) {
3897 __ CmpLeS(FTMP, lhs, rhs);
3898 } else {
3899 __ CmpUleS(FTMP, lhs, rhs);
3900 }
3901 __ Mfc1(dst, FTMP);
3902 __ Andi(dst, dst, 1);
3903 break;
3904 case kCondGT:
3905 if (gt_bias) {
3906 __ CmpUltS(FTMP, rhs, lhs);
3907 } else {
3908 __ CmpLtS(FTMP, rhs, lhs);
3909 }
3910 __ Mfc1(dst, FTMP);
3911 __ Andi(dst, dst, 1);
3912 break;
3913 case kCondGE:
3914 if (gt_bias) {
3915 __ CmpUleS(FTMP, rhs, lhs);
3916 } else {
3917 __ CmpLeS(FTMP, rhs, lhs);
3918 }
3919 __ Mfc1(dst, FTMP);
3920 __ Andi(dst, dst, 1);
3921 break;
3922 default:
3923 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3924 UNREACHABLE();
3925 }
3926 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003927 DCHECK_EQ(type, DataType::Type::kFloat64);
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003928 switch (cond) {
3929 case kCondEQ:
3930 __ CmpEqD(FTMP, lhs, rhs);
3931 __ Mfc1(dst, FTMP);
3932 __ Andi(dst, dst, 1);
3933 break;
3934 case kCondNE:
3935 __ CmpEqD(FTMP, lhs, rhs);
3936 __ Mfc1(dst, FTMP);
3937 __ Addiu(dst, dst, 1);
3938 break;
3939 case kCondLT:
3940 if (gt_bias) {
3941 __ CmpLtD(FTMP, lhs, rhs);
3942 } else {
3943 __ CmpUltD(FTMP, lhs, rhs);
3944 }
3945 __ Mfc1(dst, FTMP);
3946 __ Andi(dst, dst, 1);
3947 break;
3948 case kCondLE:
3949 if (gt_bias) {
3950 __ CmpLeD(FTMP, lhs, rhs);
3951 } else {
3952 __ CmpUleD(FTMP, lhs, rhs);
3953 }
3954 __ Mfc1(dst, FTMP);
3955 __ Andi(dst, dst, 1);
3956 break;
3957 case kCondGT:
3958 if (gt_bias) {
3959 __ CmpUltD(FTMP, rhs, lhs);
3960 } else {
3961 __ CmpLtD(FTMP, rhs, lhs);
3962 }
3963 __ Mfc1(dst, FTMP);
3964 __ Andi(dst, dst, 1);
3965 break;
3966 case kCondGE:
3967 if (gt_bias) {
3968 __ CmpUleD(FTMP, rhs, lhs);
3969 } else {
3970 __ CmpLeD(FTMP, rhs, lhs);
3971 }
3972 __ Mfc1(dst, FTMP);
3973 __ Andi(dst, dst, 1);
3974 break;
3975 default:
3976 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3977 UNREACHABLE();
3978 }
3979 }
3980}
3981
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003982bool InstructionCodeGeneratorMIPS64::MaterializeFpCompare(IfCondition cond,
3983 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003984 DataType::Type type,
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003985 LocationSummary* input_locations,
3986 FpuRegister dst) {
3987 FpuRegister lhs = input_locations->InAt(0).AsFpuRegister<FpuRegister>();
3988 FpuRegister rhs = input_locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003989 if (type == DataType::Type::kFloat32) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003990 switch (cond) {
3991 case kCondEQ:
3992 __ CmpEqS(dst, lhs, rhs);
3993 return false;
3994 case kCondNE:
3995 __ CmpEqS(dst, lhs, rhs);
3996 return true;
3997 case kCondLT:
3998 if (gt_bias) {
3999 __ CmpLtS(dst, lhs, rhs);
4000 } else {
4001 __ CmpUltS(dst, lhs, rhs);
4002 }
4003 return false;
4004 case kCondLE:
4005 if (gt_bias) {
4006 __ CmpLeS(dst, lhs, rhs);
4007 } else {
4008 __ CmpUleS(dst, lhs, rhs);
4009 }
4010 return false;
4011 case kCondGT:
4012 if (gt_bias) {
4013 __ CmpUltS(dst, rhs, lhs);
4014 } else {
4015 __ CmpLtS(dst, rhs, lhs);
4016 }
4017 return false;
4018 case kCondGE:
4019 if (gt_bias) {
4020 __ CmpUleS(dst, rhs, lhs);
4021 } else {
4022 __ CmpLeS(dst, rhs, lhs);
4023 }
4024 return false;
4025 default:
4026 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4027 UNREACHABLE();
4028 }
4029 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004030 DCHECK_EQ(type, DataType::Type::kFloat64);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004031 switch (cond) {
4032 case kCondEQ:
4033 __ CmpEqD(dst, lhs, rhs);
4034 return false;
4035 case kCondNE:
4036 __ CmpEqD(dst, lhs, rhs);
4037 return true;
4038 case kCondLT:
4039 if (gt_bias) {
4040 __ CmpLtD(dst, lhs, rhs);
4041 } else {
4042 __ CmpUltD(dst, lhs, rhs);
4043 }
4044 return false;
4045 case kCondLE:
4046 if (gt_bias) {
4047 __ CmpLeD(dst, lhs, rhs);
4048 } else {
4049 __ CmpUleD(dst, lhs, rhs);
4050 }
4051 return false;
4052 case kCondGT:
4053 if (gt_bias) {
4054 __ CmpUltD(dst, rhs, lhs);
4055 } else {
4056 __ CmpLtD(dst, rhs, lhs);
4057 }
4058 return false;
4059 case kCondGE:
4060 if (gt_bias) {
4061 __ CmpUleD(dst, rhs, lhs);
4062 } else {
4063 __ CmpLeD(dst, rhs, lhs);
4064 }
4065 return false;
4066 default:
4067 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4068 UNREACHABLE();
4069 }
4070 }
4071}
4072
Alexey Frunze299a9392015-12-08 16:08:02 -08004073void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
4074 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004075 DataType::Type type,
Alexey Frunze299a9392015-12-08 16:08:02 -08004076 LocationSummary* locations,
4077 Mips64Label* label) {
4078 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4079 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004080 if (type == DataType::Type::kFloat32) {
Alexey Frunze299a9392015-12-08 16:08:02 -08004081 switch (cond) {
4082 case kCondEQ:
4083 __ CmpEqS(FTMP, lhs, rhs);
4084 __ Bc1nez(FTMP, label);
4085 break;
4086 case kCondNE:
4087 __ CmpEqS(FTMP, lhs, rhs);
4088 __ Bc1eqz(FTMP, label);
4089 break;
4090 case kCondLT:
4091 if (gt_bias) {
4092 __ CmpLtS(FTMP, lhs, rhs);
4093 } else {
4094 __ CmpUltS(FTMP, lhs, rhs);
4095 }
4096 __ Bc1nez(FTMP, label);
4097 break;
4098 case kCondLE:
4099 if (gt_bias) {
4100 __ CmpLeS(FTMP, lhs, rhs);
4101 } else {
4102 __ CmpUleS(FTMP, lhs, rhs);
4103 }
4104 __ Bc1nez(FTMP, label);
4105 break;
4106 case kCondGT:
4107 if (gt_bias) {
4108 __ CmpUltS(FTMP, rhs, lhs);
4109 } else {
4110 __ CmpLtS(FTMP, rhs, lhs);
4111 }
4112 __ Bc1nez(FTMP, label);
4113 break;
4114 case kCondGE:
4115 if (gt_bias) {
4116 __ CmpUleS(FTMP, rhs, lhs);
4117 } else {
4118 __ CmpLeS(FTMP, rhs, lhs);
4119 }
4120 __ Bc1nez(FTMP, label);
4121 break;
4122 default:
4123 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004124 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004125 }
4126 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004127 DCHECK_EQ(type, DataType::Type::kFloat64);
Alexey Frunze299a9392015-12-08 16:08:02 -08004128 switch (cond) {
4129 case kCondEQ:
4130 __ CmpEqD(FTMP, lhs, rhs);
4131 __ Bc1nez(FTMP, label);
4132 break;
4133 case kCondNE:
4134 __ CmpEqD(FTMP, lhs, rhs);
4135 __ Bc1eqz(FTMP, label);
4136 break;
4137 case kCondLT:
4138 if (gt_bias) {
4139 __ CmpLtD(FTMP, lhs, rhs);
4140 } else {
4141 __ CmpUltD(FTMP, lhs, rhs);
4142 }
4143 __ Bc1nez(FTMP, label);
4144 break;
4145 case kCondLE:
4146 if (gt_bias) {
4147 __ CmpLeD(FTMP, lhs, rhs);
4148 } else {
4149 __ CmpUleD(FTMP, lhs, rhs);
4150 }
4151 __ Bc1nez(FTMP, label);
4152 break;
4153 case kCondGT:
4154 if (gt_bias) {
4155 __ CmpUltD(FTMP, rhs, lhs);
4156 } else {
4157 __ CmpLtD(FTMP, rhs, lhs);
4158 }
4159 __ Bc1nez(FTMP, label);
4160 break;
4161 case kCondGE:
4162 if (gt_bias) {
4163 __ CmpUleD(FTMP, rhs, lhs);
4164 } else {
4165 __ CmpLeD(FTMP, rhs, lhs);
4166 }
4167 __ Bc1nez(FTMP, label);
4168 break;
4169 default:
4170 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004171 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004172 }
4173 }
4174}
4175
Alexey Frunze4dda3372015-06-01 18:31:49 -07004176void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00004177 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004178 Mips64Label* true_target,
4179 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00004180 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004181
David Brazdil0debae72015-11-12 18:37:00 +00004182 if (true_target == nullptr && false_target == nullptr) {
4183 // Nothing to do. The code always falls through.
4184 return;
4185 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00004186 // Constant condition, statically compared against "true" (integer value 1).
4187 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00004188 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004189 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004190 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004191 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00004192 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00004193 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004194 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00004195 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004196 }
David Brazdil0debae72015-11-12 18:37:00 +00004197 return;
4198 }
4199
4200 // The following code generates these patterns:
4201 // (1) true_target == nullptr && false_target != nullptr
4202 // - opposite condition true => branch to false_target
4203 // (2) true_target != nullptr && false_target == nullptr
4204 // - condition true => branch to true_target
4205 // (3) true_target != nullptr && false_target != nullptr
4206 // - condition true => branch to true_target
4207 // - branch to false_target
4208 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004209 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00004210 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004211 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00004212 if (true_target == nullptr) {
4213 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
4214 } else {
4215 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
4216 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004217 } else {
4218 // The condition instruction has not been materialized, use its inputs as
4219 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00004220 HCondition* condition = cond->AsCondition();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004221 DataType::Type type = condition->InputAt(0)->GetType();
Alexey Frunze299a9392015-12-08 16:08:02 -08004222 LocationSummary* locations = cond->GetLocations();
4223 IfCondition if_cond = condition->GetCondition();
4224 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00004225
David Brazdil0debae72015-11-12 18:37:00 +00004226 if (true_target == nullptr) {
4227 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08004228 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00004229 }
4230
Alexey Frunze299a9392015-12-08 16:08:02 -08004231 switch (type) {
4232 default:
4233 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
4234 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004235 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004236 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
4237 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004238 case DataType::Type::kFloat32:
4239 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004240 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
4241 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004242 }
4243 }
David Brazdil0debae72015-11-12 18:37:00 +00004244
4245 // If neither branch falls through (case 3), the conditional branch to `true_target`
4246 // was already emitted (case 2) and we need to emit a jump to `false_target`.
4247 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004248 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004249 }
4250}
4251
4252void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
4253 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00004254 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004255 locations->SetInAt(0, Location::RequiresRegister());
4256 }
4257}
4258
4259void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00004260 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
4261 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004262 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004263 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004264 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004265 nullptr : codegen_->GetLabelOf(false_successor);
4266 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004267}
4268
4269void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
4270 LocationSummary* locations = new (GetGraph()->GetArena())
4271 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01004272 InvokeRuntimeCallingConvention calling_convention;
4273 RegisterSet caller_saves = RegisterSet::Empty();
4274 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4275 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00004276 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004277 locations->SetInAt(0, Location::RequiresRegister());
4278 }
4279}
4280
4281void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08004282 SlowPathCodeMIPS64* slow_path =
4283 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00004284 GenerateTestAndBranch(deoptimize,
4285 /* condition_input_index */ 0,
4286 slow_path->GetEntryLabel(),
4287 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004288}
4289
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004290// This function returns true if a conditional move can be generated for HSelect.
4291// Otherwise it returns false and HSelect must be implemented in terms of conditonal
4292// branches and regular moves.
4293//
4294// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
4295//
4296// While determining feasibility of a conditional move and setting inputs/outputs
4297// are two distinct tasks, this function does both because they share quite a bit
4298// of common logic.
4299static bool CanMoveConditionally(HSelect* select, LocationSummary* locations_to_set) {
4300 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
4301 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4302 HCondition* condition = cond->AsCondition();
4303
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004304 DataType::Type cond_type =
4305 materialized ? DataType::Type::kInt32 : condition->InputAt(0)->GetType();
4306 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004307
4308 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
4309 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
4310 bool is_true_value_zero_constant =
4311 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
4312 bool is_false_value_zero_constant =
4313 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
4314
4315 bool can_move_conditionally = false;
4316 bool use_const_for_false_in = false;
4317 bool use_const_for_true_in = false;
4318
4319 if (!cond->IsConstant()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004320 if (!DataType::IsFloatingPointType(cond_type)) {
4321 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004322 // Moving int/long on int/long condition.
4323 if (is_true_value_zero_constant) {
4324 // seleqz out_reg, false_reg, cond_reg
4325 can_move_conditionally = true;
4326 use_const_for_true_in = true;
4327 } else if (is_false_value_zero_constant) {
4328 // selnez out_reg, true_reg, cond_reg
4329 can_move_conditionally = true;
4330 use_const_for_false_in = true;
4331 } else if (materialized) {
4332 // Not materializing unmaterialized int conditions
4333 // to keep the instruction count low.
4334 // selnez AT, true_reg, cond_reg
4335 // seleqz TMP, false_reg, cond_reg
4336 // or out_reg, AT, TMP
4337 can_move_conditionally = true;
4338 }
4339 } else {
4340 // Moving float/double on int/long condition.
4341 if (materialized) {
4342 // Not materializing unmaterialized int conditions
4343 // to keep the instruction count low.
4344 can_move_conditionally = true;
4345 if (is_true_value_zero_constant) {
4346 // sltu TMP, ZERO, cond_reg
4347 // mtc1 TMP, temp_cond_reg
4348 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4349 use_const_for_true_in = true;
4350 } else if (is_false_value_zero_constant) {
4351 // sltu TMP, ZERO, cond_reg
4352 // mtc1 TMP, temp_cond_reg
4353 // selnez.fmt out_reg, true_reg, temp_cond_reg
4354 use_const_for_false_in = true;
4355 } else {
4356 // sltu TMP, ZERO, cond_reg
4357 // mtc1 TMP, temp_cond_reg
4358 // sel.fmt temp_cond_reg, false_reg, true_reg
4359 // mov.fmt out_reg, temp_cond_reg
4360 }
4361 }
4362 }
4363 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004364 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004365 // Moving int/long on float/double condition.
4366 can_move_conditionally = true;
4367 if (is_true_value_zero_constant) {
4368 // mfc1 TMP, temp_cond_reg
4369 // seleqz out_reg, false_reg, TMP
4370 use_const_for_true_in = true;
4371 } else if (is_false_value_zero_constant) {
4372 // mfc1 TMP, temp_cond_reg
4373 // selnez out_reg, true_reg, TMP
4374 use_const_for_false_in = true;
4375 } else {
4376 // mfc1 TMP, temp_cond_reg
4377 // selnez AT, true_reg, TMP
4378 // seleqz TMP, false_reg, TMP
4379 // or out_reg, AT, TMP
4380 }
4381 } else {
4382 // Moving float/double on float/double condition.
4383 can_move_conditionally = true;
4384 if (is_true_value_zero_constant) {
4385 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4386 use_const_for_true_in = true;
4387 } else if (is_false_value_zero_constant) {
4388 // selnez.fmt out_reg, true_reg, temp_cond_reg
4389 use_const_for_false_in = true;
4390 } else {
4391 // sel.fmt temp_cond_reg, false_reg, true_reg
4392 // mov.fmt out_reg, temp_cond_reg
4393 }
4394 }
4395 }
4396 }
4397
4398 if (can_move_conditionally) {
4399 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
4400 } else {
4401 DCHECK(!use_const_for_false_in);
4402 DCHECK(!use_const_for_true_in);
4403 }
4404
4405 if (locations_to_set != nullptr) {
4406 if (use_const_for_false_in) {
4407 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
4408 } else {
4409 locations_to_set->SetInAt(0,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004410 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004411 ? Location::RequiresFpuRegister()
4412 : Location::RequiresRegister());
4413 }
4414 if (use_const_for_true_in) {
4415 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
4416 } else {
4417 locations_to_set->SetInAt(1,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004418 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004419 ? Location::RequiresFpuRegister()
4420 : Location::RequiresRegister());
4421 }
4422 if (materialized) {
4423 locations_to_set->SetInAt(2, Location::RequiresRegister());
4424 }
4425
4426 if (can_move_conditionally) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004427 locations_to_set->SetOut(DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004428 ? Location::RequiresFpuRegister()
4429 : Location::RequiresRegister());
4430 } else {
4431 locations_to_set->SetOut(Location::SameAsFirstInput());
4432 }
4433 }
4434
4435 return can_move_conditionally;
4436}
4437
4438
4439void InstructionCodeGeneratorMIPS64::GenConditionalMove(HSelect* select) {
4440 LocationSummary* locations = select->GetLocations();
4441 Location dst = locations->Out();
4442 Location false_src = locations->InAt(0);
4443 Location true_src = locations->InAt(1);
4444 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4445 GpuRegister cond_reg = TMP;
4446 FpuRegister fcond_reg = FTMP;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004447 DataType::Type cond_type = DataType::Type::kInt32;
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004448 bool cond_inverted = false;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004449 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004450
4451 if (IsBooleanValueOrMaterializedCondition(cond)) {
4452 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<GpuRegister>();
4453 } else {
4454 HCondition* condition = cond->AsCondition();
4455 LocationSummary* cond_locations = cond->GetLocations();
4456 IfCondition if_cond = condition->GetCondition();
4457 cond_type = condition->InputAt(0)->GetType();
4458 switch (cond_type) {
4459 default:
4460 cond_inverted = MaterializeIntLongCompare(if_cond,
4461 /* is64bit */ false,
4462 cond_locations,
4463 cond_reg);
4464 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004465 case DataType::Type::kInt64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004466 cond_inverted = MaterializeIntLongCompare(if_cond,
4467 /* is64bit */ true,
4468 cond_locations,
4469 cond_reg);
4470 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004471 case DataType::Type::kFloat32:
4472 case DataType::Type::kFloat64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004473 cond_inverted = MaterializeFpCompare(if_cond,
4474 condition->IsGtBias(),
4475 cond_type,
4476 cond_locations,
4477 fcond_reg);
4478 break;
4479 }
4480 }
4481
4482 if (true_src.IsConstant()) {
4483 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
4484 }
4485 if (false_src.IsConstant()) {
4486 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
4487 }
4488
4489 switch (dst_type) {
4490 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004491 if (DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004492 __ Mfc1(cond_reg, fcond_reg);
4493 }
4494 if (true_src.IsConstant()) {
4495 if (cond_inverted) {
4496 __ Selnez(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4497 } else {
4498 __ Seleqz(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4499 }
4500 } else if (false_src.IsConstant()) {
4501 if (cond_inverted) {
4502 __ Seleqz(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4503 } else {
4504 __ Selnez(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4505 }
4506 } else {
4507 DCHECK_NE(cond_reg, AT);
4508 if (cond_inverted) {
4509 __ Seleqz(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4510 __ Selnez(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4511 } else {
4512 __ Selnez(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4513 __ Seleqz(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4514 }
4515 __ Or(dst.AsRegister<GpuRegister>(), AT, TMP);
4516 }
4517 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004518 case DataType::Type::kFloat32: {
4519 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004520 // sel*.fmt tests bit 0 of the condition register, account for that.
4521 __ Sltu(TMP, ZERO, cond_reg);
4522 __ Mtc1(TMP, fcond_reg);
4523 }
4524 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4525 if (true_src.IsConstant()) {
4526 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4527 if (cond_inverted) {
4528 __ SelnezS(dst_reg, src_reg, fcond_reg);
4529 } else {
4530 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4531 }
4532 } else if (false_src.IsConstant()) {
4533 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4534 if (cond_inverted) {
4535 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4536 } else {
4537 __ SelnezS(dst_reg, src_reg, fcond_reg);
4538 }
4539 } else {
4540 if (cond_inverted) {
4541 __ SelS(fcond_reg,
4542 true_src.AsFpuRegister<FpuRegister>(),
4543 false_src.AsFpuRegister<FpuRegister>());
4544 } else {
4545 __ SelS(fcond_reg,
4546 false_src.AsFpuRegister<FpuRegister>(),
4547 true_src.AsFpuRegister<FpuRegister>());
4548 }
4549 __ MovS(dst_reg, fcond_reg);
4550 }
4551 break;
4552 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004553 case DataType::Type::kFloat64: {
4554 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004555 // sel*.fmt tests bit 0 of the condition register, account for that.
4556 __ Sltu(TMP, ZERO, cond_reg);
4557 __ Mtc1(TMP, fcond_reg);
4558 }
4559 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4560 if (true_src.IsConstant()) {
4561 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4562 if (cond_inverted) {
4563 __ SelnezD(dst_reg, src_reg, fcond_reg);
4564 } else {
4565 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4566 }
4567 } else if (false_src.IsConstant()) {
4568 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4569 if (cond_inverted) {
4570 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4571 } else {
4572 __ SelnezD(dst_reg, src_reg, fcond_reg);
4573 }
4574 } else {
4575 if (cond_inverted) {
4576 __ SelD(fcond_reg,
4577 true_src.AsFpuRegister<FpuRegister>(),
4578 false_src.AsFpuRegister<FpuRegister>());
4579 } else {
4580 __ SelD(fcond_reg,
4581 false_src.AsFpuRegister<FpuRegister>(),
4582 true_src.AsFpuRegister<FpuRegister>());
4583 }
4584 __ MovD(dst_reg, fcond_reg);
4585 }
4586 break;
4587 }
4588 }
4589}
4590
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004591void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4592 LocationSummary* locations = new (GetGraph()->GetArena())
4593 LocationSummary(flag, LocationSummary::kNoCall);
4594 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07004595}
4596
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004597void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4598 __ LoadFromOffset(kLoadWord,
4599 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
4600 SP,
4601 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07004602}
4603
David Brazdil74eb1b22015-12-14 11:44:01 +00004604void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
4605 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004606 CanMoveConditionally(select, locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00004607}
4608
4609void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004610 if (CanMoveConditionally(select, /* locations_to_set */ nullptr)) {
4611 GenConditionalMove(select);
4612 } else {
4613 LocationSummary* locations = select->GetLocations();
4614 Mips64Label false_target;
4615 GenerateTestAndBranch(select,
4616 /* condition_input_index */ 2,
4617 /* true_target */ nullptr,
4618 &false_target);
4619 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
4620 __ Bind(&false_target);
4621 }
David Brazdil74eb1b22015-12-14 11:44:01 +00004622}
4623
David Srbecky0cf44932015-12-09 14:09:59 +00004624void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
4625 new (GetGraph()->GetArena()) LocationSummary(info);
4626}
4627
David Srbeckyd28f4a02016-03-14 17:14:24 +00004628void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
4629 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00004630}
4631
4632void CodeGeneratorMIPS64::GenerateNop() {
4633 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00004634}
4635
Alexey Frunze4dda3372015-06-01 18:31:49 -07004636void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08004637 const FieldInfo& field_info) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004638 DataType::Type field_type = field_info.GetFieldType();
Alexey Frunze15958152017-02-09 19:08:30 -08004639 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004640 kEmitCompilerReadBarrier && (field_type == DataType::Type::kReference);
Alexey Frunze15958152017-02-09 19:08:30 -08004641 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
4642 instruction,
4643 object_field_get_with_read_barrier
4644 ? LocationSummary::kCallOnSlowPath
4645 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004646 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4647 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4648 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004649 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004650 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004651 locations->SetOut(Location::RequiresFpuRegister());
4652 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004653 // The output overlaps in the case of an object field get with
4654 // read barriers enabled: we do not want the move to overwrite the
4655 // object's location, as we need it to emit the read barrier.
4656 locations->SetOut(Location::RequiresRegister(),
4657 object_field_get_with_read_barrier
4658 ? Location::kOutputOverlap
4659 : Location::kNoOutputOverlap);
4660 }
4661 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4662 // We need a temporary register for the read barrier marking slow
4663 // path in CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004664 if (!kBakerReadBarrierThunksEnableForFields) {
4665 locations->AddTemp(Location::RequiresRegister());
4666 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004667 }
4668}
4669
4670void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
4671 const FieldInfo& field_info) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004672 DataType::Type type = field_info.GetFieldType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004673 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004674 Location obj_loc = locations->InAt(0);
4675 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
4676 Location dst_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004677 LoadOperandType load_type = kLoadUnsignedByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004678 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004679 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004680 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4681
Alexey Frunze4dda3372015-06-01 18:31:49 -07004682 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004683 case DataType::Type::kBool:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004684 load_type = kLoadUnsignedByte;
4685 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004686 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004687 load_type = kLoadSignedByte;
4688 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004689 case DataType::Type::kInt16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004690 load_type = kLoadSignedHalfword;
4691 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004692 case DataType::Type::kUint16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004693 load_type = kLoadUnsignedHalfword;
4694 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004695 case DataType::Type::kInt32:
4696 case DataType::Type::kFloat32:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004697 load_type = kLoadWord;
4698 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004699 case DataType::Type::kInt64:
4700 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004701 load_type = kLoadDoubleword;
4702 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004703 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004704 load_type = kLoadUnsignedWord;
4705 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004706 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004707 LOG(FATAL) << "Unreachable type " << type;
4708 UNREACHABLE();
4709 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004710 if (!DataType::IsFloatingPointType(type)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004711 DCHECK(dst_loc.IsRegister());
4712 GpuRegister dst = dst_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004713 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08004714 // /* HeapReference<Object> */ dst = *(obj + offset)
4715 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004716 Location temp_loc =
4717 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08004718 // Note that a potential implicit null check is handled in this
4719 // CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier call.
4720 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4721 dst_loc,
4722 obj,
4723 offset,
4724 temp_loc,
4725 /* needs_null_check */ true);
4726 if (is_volatile) {
4727 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4728 }
4729 } else {
4730 __ LoadFromOffset(kLoadUnsignedWord, dst, obj, offset, null_checker);
4731 if (is_volatile) {
4732 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4733 }
4734 // If read barriers are enabled, emit read barriers other than
4735 // Baker's using a slow path (and also unpoison the loaded
4736 // reference, if heap poisoning is enabled).
4737 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
4738 }
4739 } else {
4740 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
4741 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004742 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004743 DCHECK(dst_loc.IsFpuRegister());
4744 FpuRegister dst = dst_loc.AsFpuRegister<FpuRegister>();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004745 __ LoadFpuFromOffset(load_type, dst, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004746 }
Alexey Frunzec061de12017-02-14 13:27:23 -08004747
Alexey Frunze15958152017-02-09 19:08:30 -08004748 // Memory barriers, in the case of references, are handled in the
4749 // previous switch statement.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004750 if (is_volatile && (type != DataType::Type::kReference)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004751 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Alexey Frunzec061de12017-02-14 13:27:23 -08004752 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004753}
4754
4755void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
4756 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
4757 LocationSummary* locations =
4758 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4759 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004760 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004761 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004762 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004763 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004764 }
4765}
4766
4767void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004768 const FieldInfo& field_info,
4769 bool value_can_be_null) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004770 DataType::Type type = field_info.GetFieldType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004771 LocationSummary* locations = instruction->GetLocations();
4772 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004773 Location value_location = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004774 StoreOperandType store_type = kStoreByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004775 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004776 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4777 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004778 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4779
Alexey Frunze4dda3372015-06-01 18:31:49 -07004780 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004781 case DataType::Type::kBool:
4782 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004783 store_type = kStoreByte;
4784 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004785 case DataType::Type::kInt16:
4786 case DataType::Type::kUint16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004787 store_type = kStoreHalfword;
4788 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004789 case DataType::Type::kInt32:
4790 case DataType::Type::kFloat32:
4791 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004792 store_type = kStoreWord;
4793 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004794 case DataType::Type::kInt64:
4795 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004796 store_type = kStoreDoubleword;
4797 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004798 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004799 LOG(FATAL) << "Unreachable type " << type;
4800 UNREACHABLE();
4801 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004802
Alexey Frunze15958152017-02-09 19:08:30 -08004803 if (is_volatile) {
4804 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
4805 }
4806
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004807 if (value_location.IsConstant()) {
4808 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
4809 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
4810 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004811 if (!DataType::IsFloatingPointType(type)) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004812 DCHECK(value_location.IsRegister());
4813 GpuRegister src = value_location.AsRegister<GpuRegister>();
4814 if (kPoisonHeapReferences && needs_write_barrier) {
4815 // Note that in the case where `value` is a null reference,
4816 // we do not enter this block, as a null reference does not
4817 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004818 DCHECK_EQ(type, DataType::Type::kReference);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004819 __ PoisonHeapReference(TMP, src);
4820 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
4821 } else {
4822 __ StoreToOffset(store_type, src, obj, offset, null_checker);
4823 }
4824 } else {
4825 DCHECK(value_location.IsFpuRegister());
4826 FpuRegister src = value_location.AsFpuRegister<FpuRegister>();
4827 __ StoreFpuToOffset(store_type, src, obj, offset, null_checker);
4828 }
4829 }
Alexey Frunze15958152017-02-09 19:08:30 -08004830
Alexey Frunzec061de12017-02-14 13:27:23 -08004831 if (needs_write_barrier) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004832 DCHECK(value_location.IsRegister());
4833 GpuRegister src = value_location.AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004834 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004835 }
Alexey Frunze15958152017-02-09 19:08:30 -08004836
4837 if (is_volatile) {
4838 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
4839 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004840}
4841
4842void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4843 HandleFieldGet(instruction, instruction->GetFieldInfo());
4844}
4845
4846void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4847 HandleFieldGet(instruction, instruction->GetFieldInfo());
4848}
4849
4850void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4851 HandleFieldSet(instruction, instruction->GetFieldInfo());
4852}
4853
4854void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004855 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004856}
4857
Alexey Frunze15958152017-02-09 19:08:30 -08004858void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadOneRegister(
4859 HInstruction* instruction,
4860 Location out,
4861 uint32_t offset,
4862 Location maybe_temp,
4863 ReadBarrierOption read_barrier_option) {
4864 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4865 if (read_barrier_option == kWithReadBarrier) {
4866 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004867 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
4868 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4869 }
Alexey Frunze15958152017-02-09 19:08:30 -08004870 if (kUseBakerReadBarrier) {
4871 // Load with fast path based Baker's read barrier.
4872 // /* HeapReference<Object> */ out = *(out + offset)
4873 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4874 out,
4875 out_reg,
4876 offset,
4877 maybe_temp,
4878 /* needs_null_check */ false);
4879 } else {
4880 // Load with slow path based read barrier.
4881 // Save the value of `out` into `maybe_temp` before overwriting it
4882 // in the following move operation, as we will need it for the
4883 // read barrier below.
4884 __ Move(maybe_temp.AsRegister<GpuRegister>(), out_reg);
4885 // /* HeapReference<Object> */ out = *(out + offset)
4886 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4887 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4888 }
4889 } else {
4890 // Plain load with no read barrier.
4891 // /* HeapReference<Object> */ out = *(out + offset)
4892 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4893 __ MaybeUnpoisonHeapReference(out_reg);
4894 }
4895}
4896
4897void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadTwoRegisters(
4898 HInstruction* instruction,
4899 Location out,
4900 Location obj,
4901 uint32_t offset,
4902 Location maybe_temp,
4903 ReadBarrierOption read_barrier_option) {
4904 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4905 GpuRegister obj_reg = obj.AsRegister<GpuRegister>();
4906 if (read_barrier_option == kWithReadBarrier) {
4907 CHECK(kEmitCompilerReadBarrier);
4908 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004909 if (!kBakerReadBarrierThunksEnableForFields) {
4910 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4911 }
Alexey Frunze15958152017-02-09 19:08:30 -08004912 // Load with fast path based Baker's read barrier.
4913 // /* HeapReference<Object> */ out = *(obj + offset)
4914 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4915 out,
4916 obj_reg,
4917 offset,
4918 maybe_temp,
4919 /* needs_null_check */ false);
4920 } else {
4921 // Load with slow path based read barrier.
4922 // /* HeapReference<Object> */ out = *(obj + offset)
4923 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
4924 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4925 }
4926 } else {
4927 // Plain load with no read barrier.
4928 // /* HeapReference<Object> */ out = *(obj + offset)
4929 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
4930 __ MaybeUnpoisonHeapReference(out_reg);
4931 }
4932}
4933
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004934static inline int GetBakerMarkThunkNumber(GpuRegister reg) {
4935 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 20, "Expecting equal");
4936 if (reg >= V0 && reg <= T2) { // 13 consequtive regs.
4937 return reg - V0;
4938 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
4939 return 13 + (reg - S2);
4940 } else if (reg == S8) { // One more.
4941 return 19;
4942 }
4943 LOG(FATAL) << "Unexpected register " << reg;
4944 UNREACHABLE();
4945}
4946
4947static inline int GetBakerMarkFieldArrayThunkDisplacement(GpuRegister reg, bool short_offset) {
4948 int num = GetBakerMarkThunkNumber(reg) +
4949 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
4950 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
4951}
4952
4953static inline int GetBakerMarkGcRootThunkDisplacement(GpuRegister reg) {
4954 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
4955 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
4956}
4957
4958void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(HInstruction* instruction,
4959 Location root,
4960 GpuRegister obj,
4961 uint32_t offset,
4962 ReadBarrierOption read_barrier_option,
4963 Mips64Label* label_low) {
4964 if (label_low != nullptr) {
4965 DCHECK_EQ(offset, 0x5678u);
4966 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08004967 GpuRegister root_reg = root.AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08004968 if (read_barrier_option == kWithReadBarrier) {
4969 DCHECK(kEmitCompilerReadBarrier);
4970 if (kUseBakerReadBarrier) {
4971 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4972 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004973 if (kBakerReadBarrierThunksEnableForGcRoots) {
4974 // Note that we do not actually check the value of `GetIsGcMarking()`
4975 // to decide whether to mark the loaded GC root or not. Instead, we
4976 // load into `temp` (T9) the read barrier mark introspection entrypoint.
4977 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
4978 // vice versa.
4979 //
4980 // We use thunks for the slow path. That thunk checks the reference
4981 // and jumps to the entrypoint if needed.
4982 //
4983 // temp = Thread::Current()->pReadBarrierMarkReg00
4984 // // AKA &art_quick_read_barrier_mark_introspection.
4985 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
4986 // if (temp != nullptr) {
4987 // temp = &gc_root_thunk<root_reg>
4988 // root = temp(root)
4989 // }
Alexey Frunze15958152017-02-09 19:08:30 -08004990
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004991 const int32_t entry_point_offset =
4992 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
4993 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
4994 int16_t offset_low = Low16Bits(offset);
4995 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
4996 // extension in lwu.
4997 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
4998 GpuRegister base = short_offset ? obj : TMP;
4999 // Loading the entrypoint does not require a load acquire since it is only changed when
5000 // threads are suspended or running a checkpoint.
5001 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5002 if (!short_offset) {
5003 DCHECK(!label_low);
5004 __ Daui(base, obj, offset_high);
5005 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07005006 Mips64Label skip_call;
5007 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005008 if (label_low != nullptr) {
5009 DCHECK(short_offset);
5010 __ Bind(label_low);
5011 }
5012 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5013 __ LoadFromOffset(kLoadUnsignedWord, root_reg, base, offset_low); // Single instruction
5014 // in delay slot.
5015 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005016 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005017 } else {
5018 // Note that we do not actually check the value of `GetIsGcMarking()`
5019 // to decide whether to mark the loaded GC root or not. Instead, we
5020 // load into `temp` (T9) the read barrier mark entry point corresponding
5021 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
5022 // is false, and vice versa.
5023 //
5024 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5025 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5026 // if (temp != null) {
5027 // root = temp(root)
5028 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005029
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005030 if (label_low != nullptr) {
5031 __ Bind(label_low);
5032 }
5033 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5034 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5035 static_assert(
5036 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5037 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5038 "have different sizes.");
5039 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5040 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5041 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08005042
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005043 // Slow path marking the GC root `root`.
5044 Location temp = Location::RegisterLocation(T9);
5045 SlowPathCodeMIPS64* slow_path =
5046 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS64(
5047 instruction,
5048 root,
5049 /*entrypoint*/ temp);
5050 codegen_->AddSlowPath(slow_path);
5051
5052 const int32_t entry_point_offset =
5053 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(root.reg() - 1);
5054 // Loading the entrypoint does not require a load acquire since it is only changed when
5055 // threads are suspended or running a checkpoint.
5056 __ LoadFromOffset(kLoadDoubleword, temp.AsRegister<GpuRegister>(), TR, entry_point_offset);
5057 __ Bnezc(temp.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
5058 __ Bind(slow_path->GetExitLabel());
5059 }
Alexey Frunze15958152017-02-09 19:08:30 -08005060 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005061 if (label_low != nullptr) {
5062 __ Bind(label_low);
5063 }
Alexey Frunze15958152017-02-09 19:08:30 -08005064 // GC root loaded through a slow path for read barriers other
5065 // than Baker's.
5066 // /* GcRoot<mirror::Object>* */ root = obj + offset
5067 __ Daddiu64(root_reg, obj, static_cast<int32_t>(offset));
5068 // /* mirror::Object* */ root = root->Read()
5069 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5070 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005071 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005072 if (label_low != nullptr) {
5073 __ Bind(label_low);
5074 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005075 // Plain GC root load with no read barrier.
5076 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5077 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5078 // Note that GC roots are not affected by heap poisoning, thus we
5079 // do not have to unpoison `root_reg` here.
5080 }
5081}
5082
Alexey Frunze15958152017-02-09 19:08:30 -08005083void CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5084 Location ref,
5085 GpuRegister obj,
5086 uint32_t offset,
5087 Location temp,
5088 bool needs_null_check) {
5089 DCHECK(kEmitCompilerReadBarrier);
5090 DCHECK(kUseBakerReadBarrier);
5091
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005092 if (kBakerReadBarrierThunksEnableForFields) {
5093 // Note that we do not actually check the value of `GetIsGcMarking()`
5094 // to decide whether to mark the loaded reference or not. Instead, we
5095 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5096 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5097 // vice versa.
5098 //
5099 // We use thunks for the slow path. That thunk checks the reference
5100 // and jumps to the entrypoint if needed. If the holder is not gray,
5101 // it issues a load-load memory barrier and returns to the original
5102 // reference load.
5103 //
5104 // temp = Thread::Current()->pReadBarrierMarkReg00
5105 // // AKA &art_quick_read_barrier_mark_introspection.
5106 // if (temp != nullptr) {
5107 // temp = &field_array_thunk<holder_reg>
5108 // temp()
5109 // }
5110 // not_gray_return_address:
5111 // // If the offset is too large to fit into the lw instruction, we
5112 // // use an adjusted base register (TMP) here. This register
5113 // // receives bits 16 ... 31 of the offset before the thunk invocation
5114 // // and the thunk benefits from it.
5115 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
5116 // gray_return_address:
5117
5118 DCHECK(temp.IsInvalid());
5119 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5120 const int32_t entry_point_offset =
5121 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5122 // There may have or may have not been a null check if the field offset is smaller than
5123 // the page size.
5124 // There must've been a null check in case it's actually a load from an array.
5125 // We will, however, perform an explicit null check in the thunk as it's easier to
5126 // do it than not.
5127 if (instruction->IsArrayGet()) {
5128 DCHECK(!needs_null_check);
5129 }
5130 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
5131 // Loading the entrypoint does not require a load acquire since it is only changed when
5132 // threads are suspended or running a checkpoint.
5133 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5134 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07005135 Mips64Label skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005136 if (short_offset) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07005137 __ Beqzc(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005138 __ Nop(); // In forbidden slot.
5139 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005140 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005141 // /* HeapReference<Object> */ ref = *(obj + offset)
5142 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset); // Single instruction.
5143 } else {
5144 int16_t offset_low = Low16Bits(offset);
5145 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lwu.
Alexey Frunze0cab6562017-07-25 15:19:36 -07005146 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005147 __ Daui(TMP, obj, offset_high); // In delay slot.
5148 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005149 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005150 // /* HeapReference<Object> */ ref = *(obj + offset)
5151 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset_low); // Single instruction.
5152 }
5153 if (needs_null_check) {
5154 MaybeRecordImplicitNullCheck(instruction);
5155 }
5156 __ MaybeUnpoisonHeapReference(ref_reg);
5157 return;
5158 }
5159
Alexey Frunze15958152017-02-09 19:08:30 -08005160 // /* HeapReference<Object> */ ref = *(obj + offset)
5161 Location no_index = Location::NoLocation();
5162 ScaleFactor no_scale_factor = TIMES_1;
5163 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5164 ref,
5165 obj,
5166 offset,
5167 no_index,
5168 no_scale_factor,
5169 temp,
5170 needs_null_check);
5171}
5172
5173void CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5174 Location ref,
5175 GpuRegister obj,
5176 uint32_t data_offset,
5177 Location index,
5178 Location temp,
5179 bool needs_null_check) {
5180 DCHECK(kEmitCompilerReadBarrier);
5181 DCHECK(kUseBakerReadBarrier);
5182
5183 static_assert(
5184 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5185 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005186 ScaleFactor scale_factor = TIMES_4;
5187
5188 if (kBakerReadBarrierThunksEnableForArrays) {
5189 // Note that we do not actually check the value of `GetIsGcMarking()`
5190 // to decide whether to mark the loaded reference or not. Instead, we
5191 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5192 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5193 // vice versa.
5194 //
5195 // We use thunks for the slow path. That thunk checks the reference
5196 // and jumps to the entrypoint if needed. If the holder is not gray,
5197 // it issues a load-load memory barrier and returns to the original
5198 // reference load.
5199 //
5200 // temp = Thread::Current()->pReadBarrierMarkReg00
5201 // // AKA &art_quick_read_barrier_mark_introspection.
5202 // if (temp != nullptr) {
5203 // temp = &field_array_thunk<holder_reg>
5204 // temp()
5205 // }
5206 // not_gray_return_address:
5207 // // The element address is pre-calculated in the TMP register before the
5208 // // thunk invocation and the thunk benefits from it.
5209 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
5210 // gray_return_address:
5211
5212 DCHECK(temp.IsInvalid());
5213 DCHECK(index.IsValid());
5214 const int32_t entry_point_offset =
5215 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5216 // We will not do the explicit null check in the thunk as some form of a null check
5217 // must've been done earlier.
5218 DCHECK(!needs_null_check);
5219 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
5220 // Loading the entrypoint does not require a load acquire since it is only changed when
5221 // threads are suspended or running a checkpoint.
5222 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005223 Mips64Label skip_call;
5224 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005225 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5226 GpuRegister index_reg = index.AsRegister<GpuRegister>();
5227 __ Dlsa(TMP, index_reg, obj, scale_factor); // In delay slot.
5228 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005229 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005230 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
5231 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
5232 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, data_offset); // Single instruction.
5233 __ MaybeUnpoisonHeapReference(ref_reg);
5234 return;
5235 }
5236
Alexey Frunze15958152017-02-09 19:08:30 -08005237 // /* HeapReference<Object> */ ref =
5238 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08005239 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5240 ref,
5241 obj,
5242 data_offset,
5243 index,
5244 scale_factor,
5245 temp,
5246 needs_null_check);
5247}
5248
5249void CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5250 Location ref,
5251 GpuRegister obj,
5252 uint32_t offset,
5253 Location index,
5254 ScaleFactor scale_factor,
5255 Location temp,
5256 bool needs_null_check,
5257 bool always_update_field) {
5258 DCHECK(kEmitCompilerReadBarrier);
5259 DCHECK(kUseBakerReadBarrier);
5260
5261 // In slow path based read barriers, the read barrier call is
5262 // inserted after the original load. However, in fast path based
5263 // Baker's read barriers, we need to perform the load of
5264 // mirror::Object::monitor_ *before* the original reference load.
5265 // This load-load ordering is required by the read barrier.
5266 // The fast path/slow path (for Baker's algorithm) should look like:
5267 //
5268 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5269 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5270 // HeapReference<Object> ref = *src; // Original reference load.
5271 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5272 // if (is_gray) {
5273 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5274 // }
5275 //
5276 // Note: the original implementation in ReadBarrier::Barrier is
5277 // slightly more complex as it performs additional checks that we do
5278 // not do here for performance reasons.
5279
5280 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5281 GpuRegister temp_reg = temp.AsRegister<GpuRegister>();
5282 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5283
5284 // /* int32_t */ monitor = obj->monitor_
5285 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
5286 if (needs_null_check) {
5287 MaybeRecordImplicitNullCheck(instruction);
5288 }
5289 // /* LockWord */ lock_word = LockWord(monitor)
5290 static_assert(sizeof(LockWord) == sizeof(int32_t),
5291 "art::LockWord and int32_t have different sizes.");
5292
5293 __ Sync(0); // Barrier to prevent load-load reordering.
5294
5295 // The actual reference load.
5296 if (index.IsValid()) {
5297 // Load types involving an "index": ArrayGet,
5298 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5299 // intrinsics.
5300 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5301 if (index.IsConstant()) {
5302 size_t computed_offset =
5303 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
5304 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, computed_offset);
5305 } else {
5306 GpuRegister index_reg = index.AsRegister<GpuRegister>();
Chris Larsencd0295d2017-03-31 15:26:54 -07005307 if (scale_factor == TIMES_1) {
5308 __ Daddu(TMP, index_reg, obj);
5309 } else {
5310 __ Dlsa(TMP, index_reg, obj, scale_factor);
5311 }
Alexey Frunze15958152017-02-09 19:08:30 -08005312 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset);
5313 }
5314 } else {
5315 // /* HeapReference<Object> */ ref = *(obj + offset)
5316 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset);
5317 }
5318
5319 // Object* ref = ref_addr->AsMirrorPtr()
5320 __ MaybeUnpoisonHeapReference(ref_reg);
5321
5322 // Slow path marking the object `ref` when it is gray.
5323 SlowPathCodeMIPS64* slow_path;
5324 if (always_update_field) {
5325 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 only supports address
5326 // of the form `obj + field_offset`, where `obj` is a register and
5327 // `field_offset` is a register. Thus `offset` and `scale_factor`
5328 // above are expected to be null in this code path.
5329 DCHECK_EQ(offset, 0u);
5330 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
5331 slow_path = new (GetGraph()->GetArena())
5332 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(instruction,
5333 ref,
5334 obj,
5335 /* field_offset */ index,
5336 temp_reg);
5337 } else {
5338 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS64(instruction, ref);
5339 }
5340 AddSlowPath(slow_path);
5341
5342 // if (rb_state == ReadBarrier::GrayState())
5343 // ref = ReadBarrier::Mark(ref);
5344 // Given the numeric representation, it's enough to check the low bit of the
5345 // rb_state. We do that by shifting the bit into the sign bit (31) and
5346 // performing a branch on less than zero.
5347 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5348 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
5349 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
5350 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
5351 __ Bltzc(temp_reg, slow_path->GetEntryLabel());
5352 __ Bind(slow_path->GetExitLabel());
5353}
5354
5355void CodeGeneratorMIPS64::GenerateReadBarrierSlow(HInstruction* instruction,
5356 Location out,
5357 Location ref,
5358 Location obj,
5359 uint32_t offset,
5360 Location index) {
5361 DCHECK(kEmitCompilerReadBarrier);
5362
5363 // Insert a slow path based read barrier *after* the reference load.
5364 //
5365 // If heap poisoning is enabled, the unpoisoning of the loaded
5366 // reference will be carried out by the runtime within the slow
5367 // path.
5368 //
5369 // Note that `ref` currently does not get unpoisoned (when heap
5370 // poisoning is enabled), which is alright as the `ref` argument is
5371 // not used by the artReadBarrierSlow entry point.
5372 //
5373 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5374 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena())
5375 ReadBarrierForHeapReferenceSlowPathMIPS64(instruction, out, ref, obj, offset, index);
5376 AddSlowPath(slow_path);
5377
5378 __ Bc(slow_path->GetEntryLabel());
5379 __ Bind(slow_path->GetExitLabel());
5380}
5381
5382void CodeGeneratorMIPS64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5383 Location out,
5384 Location ref,
5385 Location obj,
5386 uint32_t offset,
5387 Location index) {
5388 if (kEmitCompilerReadBarrier) {
5389 // Baker's read barriers shall be handled by the fast path
5390 // (CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier).
5391 DCHECK(!kUseBakerReadBarrier);
5392 // If heap poisoning is enabled, unpoisoning will be taken care of
5393 // by the runtime within the slow path.
5394 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
5395 } else if (kPoisonHeapReferences) {
5396 __ UnpoisonHeapReference(out.AsRegister<GpuRegister>());
5397 }
5398}
5399
5400void CodeGeneratorMIPS64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5401 Location out,
5402 Location root) {
5403 DCHECK(kEmitCompilerReadBarrier);
5404
5405 // Insert a slow path based read barrier *after* the GC root load.
5406 //
5407 // Note that GC roots are not affected by heap poisoning, so we do
5408 // not need to do anything special for this here.
5409 SlowPathCodeMIPS64* slow_path =
5410 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS64(instruction, out, root);
5411 AddSlowPath(slow_path);
5412
5413 __ Bc(slow_path->GetEntryLabel());
5414 __ Bind(slow_path->GetExitLabel());
5415}
5416
Alexey Frunze4dda3372015-06-01 18:31:49 -07005417void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005418 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5419 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07005420 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005421 switch (type_check_kind) {
5422 case TypeCheckKind::kExactCheck:
5423 case TypeCheckKind::kAbstractClassCheck:
5424 case TypeCheckKind::kClassHierarchyCheck:
5425 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08005426 call_kind =
5427 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005428 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005429 break;
5430 case TypeCheckKind::kArrayCheck:
5431 case TypeCheckKind::kUnresolvedCheck:
5432 case TypeCheckKind::kInterfaceCheck:
5433 call_kind = LocationSummary::kCallOnSlowPath;
5434 break;
5435 }
5436
Alexey Frunze4dda3372015-06-01 18:31:49 -07005437 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005438 if (baker_read_barrier_slow_path) {
5439 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5440 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005441 locations->SetInAt(0, Location::RequiresRegister());
5442 locations->SetInAt(1, Location::RequiresRegister());
5443 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005444 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005445 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08005446 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005447}
5448
5449void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005450 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005451 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08005452 Location obj_loc = locations->InAt(0);
5453 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005454 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08005455 Location out_loc = locations->Out();
5456 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5457 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
5458 DCHECK_LE(num_temps, 1u);
5459 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005460 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5461 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5462 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5463 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005464 Mips64Label done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005465 SlowPathCodeMIPS64* slow_path = nullptr;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005466
5467 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005468 // Avoid this check if we know `obj` is not null.
5469 if (instruction->MustDoNullCheck()) {
5470 __ Move(out, ZERO);
5471 __ Beqzc(obj, &done);
5472 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005473
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005474 switch (type_check_kind) {
5475 case TypeCheckKind::kExactCheck: {
5476 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005477 GenerateReferenceLoadTwoRegisters(instruction,
5478 out_loc,
5479 obj_loc,
5480 class_offset,
5481 maybe_temp_loc,
5482 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005483 // Classes must be equal for the instanceof to succeed.
5484 __ Xor(out, out, cls);
5485 __ Sltiu(out, out, 1);
5486 break;
5487 }
5488
5489 case TypeCheckKind::kAbstractClassCheck: {
5490 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005491 GenerateReferenceLoadTwoRegisters(instruction,
5492 out_loc,
5493 obj_loc,
5494 class_offset,
5495 maybe_temp_loc,
5496 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005497 // If the class is abstract, we eagerly fetch the super class of the
5498 // object to avoid doing a comparison we know will fail.
5499 Mips64Label loop;
5500 __ Bind(&loop);
5501 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005502 GenerateReferenceLoadOneRegister(instruction,
5503 out_loc,
5504 super_offset,
5505 maybe_temp_loc,
5506 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005507 // If `out` is null, we use it for the result, and jump to `done`.
5508 __ Beqzc(out, &done);
5509 __ Bnec(out, cls, &loop);
5510 __ LoadConst32(out, 1);
5511 break;
5512 }
5513
5514 case TypeCheckKind::kClassHierarchyCheck: {
5515 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005516 GenerateReferenceLoadTwoRegisters(instruction,
5517 out_loc,
5518 obj_loc,
5519 class_offset,
5520 maybe_temp_loc,
5521 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005522 // Walk over the class hierarchy to find a match.
5523 Mips64Label loop, success;
5524 __ Bind(&loop);
5525 __ Beqc(out, cls, &success);
5526 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005527 GenerateReferenceLoadOneRegister(instruction,
5528 out_loc,
5529 super_offset,
5530 maybe_temp_loc,
5531 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005532 __ Bnezc(out, &loop);
5533 // If `out` is null, we use it for the result, and jump to `done`.
5534 __ Bc(&done);
5535 __ Bind(&success);
5536 __ LoadConst32(out, 1);
5537 break;
5538 }
5539
5540 case TypeCheckKind::kArrayObjectCheck: {
5541 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005542 GenerateReferenceLoadTwoRegisters(instruction,
5543 out_loc,
5544 obj_loc,
5545 class_offset,
5546 maybe_temp_loc,
5547 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005548 // Do an exact check.
5549 Mips64Label success;
5550 __ Beqc(out, cls, &success);
5551 // Otherwise, we need to check that the object's class is a non-primitive array.
5552 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08005553 GenerateReferenceLoadOneRegister(instruction,
5554 out_loc,
5555 component_offset,
5556 maybe_temp_loc,
5557 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005558 // If `out` is null, we use it for the result, and jump to `done`.
5559 __ Beqzc(out, &done);
5560 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
5561 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
5562 __ Sltiu(out, out, 1);
5563 __ Bc(&done);
5564 __ Bind(&success);
5565 __ LoadConst32(out, 1);
5566 break;
5567 }
5568
5569 case TypeCheckKind::kArrayCheck: {
5570 // No read barrier since the slow path will retry upon failure.
5571 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005572 GenerateReferenceLoadTwoRegisters(instruction,
5573 out_loc,
5574 obj_loc,
5575 class_offset,
5576 maybe_temp_loc,
5577 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005578 DCHECK(locations->OnlyCallsOnSlowPath());
5579 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
5580 /* is_fatal */ false);
5581 codegen_->AddSlowPath(slow_path);
5582 __ Bnec(out, cls, slow_path->GetEntryLabel());
5583 __ LoadConst32(out, 1);
5584 break;
5585 }
5586
5587 case TypeCheckKind::kUnresolvedCheck:
5588 case TypeCheckKind::kInterfaceCheck: {
5589 // Note that we indeed only call on slow path, but we always go
5590 // into the slow path for the unresolved and interface check
5591 // cases.
5592 //
5593 // We cannot directly call the InstanceofNonTrivial runtime
5594 // entry point without resorting to a type checking slow path
5595 // here (i.e. by calling InvokeRuntime directly), as it would
5596 // require to assign fixed registers for the inputs of this
5597 // HInstanceOf instruction (following the runtime calling
5598 // convention), which might be cluttered by the potential first
5599 // read barrier emission at the beginning of this method.
5600 //
5601 // TODO: Introduce a new runtime entry point taking the object
5602 // to test (instead of its class) as argument, and let it deal
5603 // with the read barrier issues. This will let us refactor this
5604 // case of the `switch` code as it was previously (with a direct
5605 // call to the runtime not using a type checking slow path).
5606 // This should also be beneficial for the other cases above.
5607 DCHECK(locations->OnlyCallsOnSlowPath());
5608 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
5609 /* is_fatal */ false);
5610 codegen_->AddSlowPath(slow_path);
5611 __ Bc(slow_path->GetEntryLabel());
5612 break;
5613 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005614 }
5615
5616 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005617
5618 if (slow_path != nullptr) {
5619 __ Bind(slow_path->GetExitLabel());
5620 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005621}
5622
5623void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
5624 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
5625 locations->SetOut(Location::ConstantLocation(constant));
5626}
5627
5628void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
5629 // Will be generated at use site.
5630}
5631
5632void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
5633 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
5634 locations->SetOut(Location::ConstantLocation(constant));
5635}
5636
5637void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
5638 // Will be generated at use site.
5639}
5640
Calin Juravle175dc732015-08-25 15:42:32 +01005641void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5642 // The trampoline uses the same calling convention as dex calling conventions,
5643 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
5644 // the method_idx.
5645 HandleInvoke(invoke);
5646}
5647
5648void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5649 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
5650}
5651
Alexey Frunze4dda3372015-06-01 18:31:49 -07005652void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
5653 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
5654 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
5655}
5656
5657void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5658 HandleInvoke(invoke);
5659 // The register T0 is required to be used for the hidden argument in
5660 // art_quick_imt_conflict_trampoline, so add the hidden argument.
5661 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
5662}
5663
5664void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5665 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
5666 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005667 Location receiver = invoke->GetLocations()->InAt(0);
5668 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005669 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005670
5671 // Set the hidden argument.
5672 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
5673 invoke->GetDexMethodIndex());
5674
5675 // temp = object->GetClass();
5676 if (receiver.IsStackSlot()) {
5677 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
5678 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
5679 } else {
5680 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
5681 }
5682 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005683 // Instead of simply (possibly) unpoisoning `temp` here, we should
5684 // emit a read barrier for the previous class reference load.
5685 // However this is not required in practice, as this is an
5686 // intermediate/temporary reference and because the current
5687 // concurrent copying collector keeps the from-space memory
5688 // intact/accessible until the end of the marking phase (the
5689 // concurrent copying collector may not in the future).
5690 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005691 __ LoadFromOffset(kLoadDoubleword, temp, temp,
5692 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
5693 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005694 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005695 // temp = temp->GetImtEntryAt(method_offset);
5696 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5697 // T9 = temp->GetEntryPoint();
5698 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5699 // T9();
5700 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005701 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005702 DCHECK(!codegen_->IsLeafMethod());
5703 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
5704}
5705
5706void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07005707 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5708 if (intrinsic.TryDispatch(invoke)) {
5709 return;
5710 }
5711
Alexey Frunze4dda3372015-06-01 18:31:49 -07005712 HandleInvoke(invoke);
5713}
5714
5715void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005716 // Explicit clinit checks triggered by static invokes must have been pruned by
5717 // art::PrepareForRegisterAllocation.
5718 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005719
Chris Larsen3039e382015-08-26 07:54:08 -07005720 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5721 if (intrinsic.TryDispatch(invoke)) {
5722 return;
5723 }
5724
Alexey Frunze4dda3372015-06-01 18:31:49 -07005725 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005726}
5727
Orion Hodsonac141392017-01-13 11:53:47 +00005728void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5729 HandleInvoke(invoke);
5730}
5731
5732void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5733 codegen_->GenerateInvokePolymorphicCall(invoke);
5734}
5735
Chris Larsen3039e382015-08-26 07:54:08 -07005736static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005737 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07005738 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
5739 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005740 return true;
5741 }
5742 return false;
5743}
5744
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005745HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08005746 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005747 bool fallback_load = false;
5748 switch (desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005749 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005750 case HLoadString::LoadKind::kBootImageInternTable:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005751 case HLoadString::LoadKind::kBssEntry:
5752 DCHECK(!Runtime::Current()->UseJitCompilation());
5753 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005754 case HLoadString::LoadKind::kJitTableAddress:
5755 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005756 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005757 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005758 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko764d4542017-05-16 10:31:41 +01005759 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005760 }
5761 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005762 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005763 }
5764 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005765}
5766
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005767HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
5768 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005769 bool fallback_load = false;
5770 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005771 case HLoadClass::LoadKind::kInvalid:
5772 LOG(FATAL) << "UNREACHABLE";
5773 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005774 case HLoadClass::LoadKind::kReferrersClass:
5775 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005776 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005777 case HLoadClass::LoadKind::kBootImageClassTable:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005778 case HLoadClass::LoadKind::kBssEntry:
5779 DCHECK(!Runtime::Current()->UseJitCompilation());
5780 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005781 case HLoadClass::LoadKind::kJitTableAddress:
5782 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005783 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005784 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005785 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005786 break;
5787 }
5788 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005789 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005790 }
5791 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005792}
5793
Vladimir Markodc151b22015-10-15 18:02:30 +01005794HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
5795 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01005796 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08005797 // On MIPS64 we support all dispatch types.
5798 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01005799}
5800
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005801void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(
5802 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005803 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00005804 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08005805 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
5806 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
5807
Alexey Frunze19f6c692016-11-30 19:19:55 -08005808 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005809 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00005810 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005811 uint32_t offset =
5812 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00005813 __ LoadFromOffset(kLoadDoubleword,
5814 temp.AsRegister<GpuRegister>(),
5815 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005816 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00005817 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005818 }
Vladimir Marko58155012015-08-19 12:49:41 +00005819 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00005820 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00005821 break;
Vladimir Marko65979462017-05-19 17:25:12 +01005822 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
5823 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005824 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko65979462017-05-19 17:25:12 +01005825 NewPcRelativeMethodPatch(invoke->GetTargetMethod());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005826 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
5827 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
5828 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Vladimir Marko65979462017-05-19 17:25:12 +01005829 __ Daddiu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5830 break;
5831 }
Vladimir Marko58155012015-08-19 12:49:41 +00005832 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005833 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
5834 kLoadDoubleword,
5835 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00005836 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005837 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005838 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005839 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005840 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
5841 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
5842 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunze19f6c692016-11-30 19:19:55 -08005843 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5844 break;
5845 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005846 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
5847 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
5848 return; // No code pointer retrieval; the runtime performs the call directly.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005849 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005850 }
5851
Alexey Frunze19f6c692016-11-30 19:19:55 -08005852 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00005853 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005854 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00005855 break;
Vladimir Marko58155012015-08-19 12:49:41 +00005856 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
5857 // T9 = callee_method->entry_point_from_quick_compiled_code_;
5858 __ LoadFromOffset(kLoadDoubleword,
5859 T9,
5860 callee_method.AsRegister<GpuRegister>(),
5861 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07005862 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00005863 // T9()
5864 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005865 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00005866 break;
5867 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005868 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
5869
Alexey Frunze4dda3372015-06-01 18:31:49 -07005870 DCHECK(!IsLeafMethod());
5871}
5872
5873void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005874 // Explicit clinit checks triggered by static invokes must have been pruned by
5875 // art::PrepareForRegisterAllocation.
5876 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005877
5878 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5879 return;
5880 }
5881
5882 LocationSummary* locations = invoke->GetLocations();
5883 codegen_->GenerateStaticOrDirectCall(invoke,
5884 locations->HasTemps()
5885 ? locations->GetTemp(0)
5886 : Location::NoLocation());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005887}
5888
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005889void CodeGeneratorMIPS64::GenerateVirtualCall(
5890 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005891 // Use the calling convention instead of the location of the receiver, as
5892 // intrinsics may have put the receiver in a different register. In the intrinsics
5893 // slow path, the arguments have been moved to the right place, so here we are
5894 // guaranteed that the receiver is the first register of the calling convention.
5895 InvokeDexCallingConvention calling_convention;
5896 GpuRegister receiver = calling_convention.GetRegisterAt(0);
5897
Alexey Frunze53afca12015-11-05 16:34:23 -08005898 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005899 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5900 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
5901 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005902 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005903
5904 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005905 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08005906 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005907 // Instead of simply (possibly) unpoisoning `temp` here, we should
5908 // emit a read barrier for the previous class reference load.
5909 // However this is not required in practice, as this is an
5910 // intermediate/temporary reference and because the current
5911 // concurrent copying collector keeps the from-space memory
5912 // intact/accessible until the end of the marking phase (the
5913 // concurrent copying collector may not in the future).
5914 __ MaybeUnpoisonHeapReference(temp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005915 // temp = temp->GetMethodAt(method_offset);
5916 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5917 // T9 = temp->GetEntryPoint();
5918 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5919 // T9();
5920 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005921 __ Nop();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005922 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Alexey Frunze53afca12015-11-05 16:34:23 -08005923}
5924
5925void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
5926 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5927 return;
5928 }
5929
5930 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005931 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005932}
5933
5934void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005935 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005936 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005937 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005938 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
5939 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005940 return;
5941 }
Vladimir Marko41559982017-01-06 14:04:23 +00005942 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005943
Alexey Frunze15958152017-02-09 19:08:30 -08005944 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5945 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunzef63f5692016-12-13 17:43:11 -08005946 ? LocationSummary::kCallOnSlowPath
5947 : LocationSummary::kNoCall;
5948 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005949 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
5950 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5951 }
Vladimir Marko41559982017-01-06 14:04:23 +00005952 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005953 locations->SetInAt(0, Location::RequiresRegister());
5954 }
5955 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005956 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5957 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5958 // Rely on the type resolution or initialization and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005959 // Request a temp to hold the BSS entry location for the slow path.
5960 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005961 RegisterSet caller_saves = RegisterSet::Empty();
5962 InvokeRuntimeCallingConvention calling_convention;
5963 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5964 locations->SetCustomSlowPathCallerSaves(caller_saves);
5965 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005966 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07005967 }
5968 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005969}
5970
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005971// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5972// move.
5973void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005974 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005975 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005976 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01005977 return;
5978 }
Vladimir Marko41559982017-01-06 14:04:23 +00005979 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005980
Vladimir Marko41559982017-01-06 14:04:23 +00005981 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005982 Location out_loc = locations->Out();
5983 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5984 GpuRegister current_method_reg = ZERO;
5985 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005986 load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005987 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
5988 }
5989
Alexey Frunze15958152017-02-09 19:08:30 -08005990 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5991 ? kWithoutReadBarrier
5992 : kCompilerReadBarrierOption;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005993 bool generate_null_check = false;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005994 CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high = nullptr;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005995 switch (load_kind) {
5996 case HLoadClass::LoadKind::kReferrersClass:
5997 DCHECK(!cls->CanCallRuntime());
5998 DCHECK(!cls->MustGenerateClinitCheck());
5999 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6000 GenerateGcRootFieldLoad(cls,
6001 out_loc,
6002 current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08006003 ArtMethod::DeclaringClassOffset().Int32Value(),
6004 read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006005 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006006 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006007 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08006008 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006009 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Alexey Frunzef63f5692016-12-13 17:43:11 -08006010 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006011 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6012 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
6013 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006014 __ Daddiu(out, AT, /* placeholder */ 0x5678);
6015 break;
6016 }
6017 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08006018 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006019 uint32_t address = dchecked_integral_cast<uint32_t>(
6020 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
6021 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006022 __ LoadLiteral(out,
6023 kLoadUnsignedWord,
6024 codegen_->DeduplicateBootImageAddressLiteral(address));
6025 break;
6026 }
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006027 case HLoadClass::LoadKind::kBootImageClassTable: {
6028 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6029 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6030 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
6031 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6032 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
6033 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6034 __ Lwu(out, AT, /* placeholder */ 0x5678);
6035 // Extract the reference from the slot data, i.e. clear the hash bits.
6036 int32_t masked_hash = ClassTable::TableSlot::MaskHash(
6037 ComputeModifiedUtf8Hash(cls->GetDexFile().StringByTypeIdx(cls->GetTypeIndex())));
6038 if (masked_hash != 0) {
6039 __ Daddiu(out, out, -masked_hash);
6040 }
6041 break;
6042 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006043 case HLoadClass::LoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006044 bss_info_high = codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
6045 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6046 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
6047 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
6048 GpuRegister temp = non_baker_read_barrier
6049 ? out
6050 : locations->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006051 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high, temp);
6052 GenerateGcRootFieldLoad(cls,
6053 out_loc,
6054 temp,
6055 /* placeholder */ 0x5678,
6056 read_barrier_option,
6057 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006058 generate_null_check = true;
6059 break;
6060 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006061 case HLoadClass::LoadKind::kJitTableAddress:
6062 __ LoadLiteral(out,
6063 kLoadUnsignedWord,
6064 codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
6065 cls->GetTypeIndex(),
6066 cls->GetClass()));
Alexey Frunze15958152017-02-09 19:08:30 -08006067 GenerateGcRootFieldLoad(cls, out_loc, out, 0, read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006068 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006069 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006070 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006071 LOG(FATAL) << "UNREACHABLE";
6072 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006073 }
6074
6075 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6076 DCHECK(cls->CanCallRuntime());
6077 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006078 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck(), bss_info_high);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006079 codegen_->AddSlowPath(slow_path);
6080 if (generate_null_check) {
6081 __ Beqzc(out, slow_path->GetEntryLabel());
6082 }
6083 if (cls->MustGenerateClinitCheck()) {
6084 GenerateClassInitializationCheck(slow_path, out);
6085 } else {
6086 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006087 }
6088 }
6089}
6090
David Brazdilcb1c0552015-08-04 16:22:25 +01006091static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006092 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01006093}
6094
Alexey Frunze4dda3372015-06-01 18:31:49 -07006095void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
6096 LocationSummary* locations =
6097 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
6098 locations->SetOut(Location::RequiresRegister());
6099}
6100
6101void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
6102 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01006103 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
6104}
6105
6106void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
6107 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
6108}
6109
6110void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6111 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006112}
6113
Alexey Frunze4dda3372015-06-01 18:31:49 -07006114void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006115 HLoadString::LoadKind load_kind = load->GetLoadKind();
6116 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00006117 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006118 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006119 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006120 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzef63f5692016-12-13 17:43:11 -08006121 } else {
6122 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006123 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6124 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6125 // Rely on the pResolveString and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006126 // Request a temp to hold the BSS entry location for the slow path.
6127 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006128 RegisterSet caller_saves = RegisterSet::Empty();
6129 InvokeRuntimeCallingConvention calling_convention;
6130 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6131 locations->SetCustomSlowPathCallerSaves(caller_saves);
6132 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006133 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006134 }
6135 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08006136 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006137}
6138
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006139// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6140// move.
6141void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006142 HLoadString::LoadKind load_kind = load->GetLoadKind();
6143 LocationSummary* locations = load->GetLocations();
6144 Location out_loc = locations->Out();
6145 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6146
6147 switch (load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006148 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
6149 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006150 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006151 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006152 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6153 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
6154 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006155 __ Daddiu(out, AT, /* placeholder */ 0x5678);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006156 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006157 }
6158 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006159 uint32_t address = dchecked_integral_cast<uint32_t>(
6160 reinterpret_cast<uintptr_t>(load->GetString().Get()));
6161 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006162 __ LoadLiteral(out,
6163 kLoadUnsignedWord,
6164 codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006165 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006166 }
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006167 case HLoadString::LoadKind::kBootImageInternTable: {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006168 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006169 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006170 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006171 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6172 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006173 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6174 __ Lwu(out, AT, /* placeholder */ 0x5678);
6175 return;
6176 }
6177 case HLoadString::LoadKind::kBssEntry: {
6178 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6179 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6180 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
6181 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6182 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006183 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
6184 GpuRegister temp = non_baker_read_barrier
6185 ? out
6186 : locations->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006187 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, temp);
Alexey Frunze15958152017-02-09 19:08:30 -08006188 GenerateGcRootFieldLoad(load,
6189 out_loc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006190 temp,
Alexey Frunze15958152017-02-09 19:08:30 -08006191 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006192 kCompilerReadBarrierOption,
6193 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006194 SlowPathCodeMIPS64* slow_path =
6195 new (GetGraph()->GetArena()) LoadStringSlowPathMIPS64(load, info_high);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006196 codegen_->AddSlowPath(slow_path);
6197 __ Beqzc(out, slow_path->GetEntryLabel());
6198 __ Bind(slow_path->GetExitLabel());
6199 return;
6200 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006201 case HLoadString::LoadKind::kJitTableAddress:
6202 __ LoadLiteral(out,
6203 kLoadUnsignedWord,
6204 codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
6205 load->GetStringIndex(),
6206 load->GetString()));
Alexey Frunze15958152017-02-09 19:08:30 -08006207 GenerateGcRootFieldLoad(load, out_loc, out, 0, kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08006208 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006209 default:
6210 break;
6211 }
6212
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006213 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006214 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006215 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006216 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006217 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
6218 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6219 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006220}
6221
Alexey Frunze4dda3372015-06-01 18:31:49 -07006222void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
6223 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
6224 locations->SetOut(Location::ConstantLocation(constant));
6225}
6226
6227void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
6228 // Will be generated at use site.
6229}
6230
6231void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
6232 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006233 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006234 InvokeRuntimeCallingConvention calling_convention;
6235 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6236}
6237
6238void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006239 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07006240 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01006241 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006242 if (instruction->IsEnter()) {
6243 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6244 } else {
6245 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6246 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006247}
6248
6249void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
6250 LocationSummary* locations =
6251 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
6252 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006253 case DataType::Type::kInt32:
6254 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006255 locations->SetInAt(0, Location::RequiresRegister());
6256 locations->SetInAt(1, Location::RequiresRegister());
6257 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6258 break;
6259
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006260 case DataType::Type::kFloat32:
6261 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006262 locations->SetInAt(0, Location::RequiresFpuRegister());
6263 locations->SetInAt(1, Location::RequiresFpuRegister());
6264 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6265 break;
6266
6267 default:
6268 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
6269 }
6270}
6271
6272void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006273 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006274 LocationSummary* locations = instruction->GetLocations();
6275
6276 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006277 case DataType::Type::kInt32:
6278 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006279 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6280 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6281 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006282 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006283 __ MulR6(dst, lhs, rhs);
6284 else
6285 __ Dmul(dst, lhs, rhs);
6286 break;
6287 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006288 case DataType::Type::kFloat32:
6289 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006290 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6291 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
6292 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006293 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006294 __ MulS(dst, lhs, rhs);
6295 else
6296 __ MulD(dst, lhs, rhs);
6297 break;
6298 }
6299 default:
6300 LOG(FATAL) << "Unexpected mul type " << type;
6301 }
6302}
6303
6304void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
6305 LocationSummary* locations =
6306 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
6307 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006308 case DataType::Type::kInt32:
6309 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006310 locations->SetInAt(0, Location::RequiresRegister());
6311 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6312 break;
6313
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006314 case DataType::Type::kFloat32:
6315 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006316 locations->SetInAt(0, Location::RequiresFpuRegister());
6317 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6318 break;
6319
6320 default:
6321 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
6322 }
6323}
6324
6325void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006326 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006327 LocationSummary* locations = instruction->GetLocations();
6328
6329 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006330 case DataType::Type::kInt32:
6331 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006332 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6333 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006334 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006335 __ Subu(dst, ZERO, src);
6336 else
6337 __ Dsubu(dst, ZERO, src);
6338 break;
6339 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006340 case DataType::Type::kFloat32:
6341 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006342 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6343 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006344 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006345 __ NegS(dst, src);
6346 else
6347 __ NegD(dst, src);
6348 break;
6349 }
6350 default:
6351 LOG(FATAL) << "Unexpected neg type " << type;
6352 }
6353}
6354
6355void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
6356 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006357 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006358 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006359 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006360 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6361 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006362}
6363
6364void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006365 // Note: if heap poisoning is enabled, the entry point takes care
6366 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02006367 QuickEntrypointEnum entrypoint =
6368 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
6369 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006370 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02006371 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006372}
6373
6374void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
6375 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006376 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006377 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00006378 if (instruction->IsStringAlloc()) {
6379 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
6380 } else {
6381 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00006382 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006383 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006384}
6385
6386void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006387 // Note: if heap poisoning is enabled, the entry point takes care
6388 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00006389 if (instruction->IsStringAlloc()) {
6390 // String is allocated through StringFactory. Call NewEmptyString entry point.
6391 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02006392 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07006393 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00006394 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
6395 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
6396 __ Jalr(T9);
6397 __ Nop();
6398 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6399 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01006400 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00006401 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00006402 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006403}
6404
6405void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
6406 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
6407 locations->SetInAt(0, Location::RequiresRegister());
6408 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6409}
6410
6411void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006412 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006413 LocationSummary* locations = instruction->GetLocations();
6414
6415 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006416 case DataType::Type::kInt32:
6417 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006418 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6419 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6420 __ Nor(dst, src, ZERO);
6421 break;
6422 }
6423
6424 default:
6425 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
6426 }
6427}
6428
6429void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6430 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
6431 locations->SetInAt(0, Location::RequiresRegister());
6432 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6433}
6434
6435void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6436 LocationSummary* locations = instruction->GetLocations();
6437 __ Xori(locations->Out().AsRegister<GpuRegister>(),
6438 locations->InAt(0).AsRegister<GpuRegister>(),
6439 1);
6440}
6441
6442void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006443 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
6444 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006445}
6446
Calin Juravle2ae48182016-03-16 14:05:09 +00006447void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
6448 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006449 return;
6450 }
6451 Location obj = instruction->GetLocations()->InAt(0);
6452
6453 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00006454 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006455}
6456
Calin Juravle2ae48182016-03-16 14:05:09 +00006457void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006458 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00006459 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006460
6461 Location obj = instruction->GetLocations()->InAt(0);
6462
6463 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
6464}
6465
6466void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00006467 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006468}
6469
6470void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
6471 HandleBinaryOp(instruction);
6472}
6473
6474void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
6475 HandleBinaryOp(instruction);
6476}
6477
6478void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
6479 LOG(FATAL) << "Unreachable";
6480}
6481
6482void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
6483 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6484}
6485
6486void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
6487 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
6488 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
6489 if (location.IsStackSlot()) {
6490 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6491 } else if (location.IsDoubleStackSlot()) {
6492 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6493 }
6494 locations->SetOut(location);
6495}
6496
6497void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
6498 ATTRIBUTE_UNUSED) {
6499 // Nothing to do, the parameter is already at its location.
6500}
6501
6502void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
6503 LocationSummary* locations =
6504 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6505 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
6506}
6507
6508void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
6509 ATTRIBUTE_UNUSED) {
6510 // Nothing to do, the method is already at its location.
6511}
6512
6513void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
6514 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01006515 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006516 locations->SetInAt(i, Location::Any());
6517 }
6518 locations->SetOut(Location::Any());
6519}
6520
6521void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
6522 LOG(FATAL) << "Unreachable";
6523}
6524
6525void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006526 DataType::Type type = rem->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006527 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006528 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
6529 : LocationSummary::kNoCall;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006530 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
6531
6532 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006533 case DataType::Type::kInt32:
6534 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006535 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07006536 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006537 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6538 break;
6539
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006540 case DataType::Type::kFloat32:
6541 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006542 InvokeRuntimeCallingConvention calling_convention;
6543 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
6544 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
6545 locations->SetOut(calling_convention.GetReturnLocation(type));
6546 break;
6547 }
6548
6549 default:
6550 LOG(FATAL) << "Unexpected rem type " << type;
6551 }
6552}
6553
6554void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006555 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006556
6557 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006558 case DataType::Type::kInt32:
6559 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07006560 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006561 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006562
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006563 case DataType::Type::kFloat32:
6564 case DataType::Type::kFloat64: {
6565 QuickEntrypointEnum entrypoint =
6566 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescufc734082016-07-19 17:18:07 +01006567 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006568 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00006569 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
6570 } else {
6571 CheckEntrypointTypes<kQuickFmod, double, double, double>();
6572 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006573 break;
6574 }
6575 default:
6576 LOG(FATAL) << "Unexpected rem type " << type;
6577 }
6578}
6579
Igor Murashkind01745e2017-04-05 16:40:31 -07006580void LocationsBuilderMIPS64::VisitConstructorFence(HConstructorFence* constructor_fence) {
6581 constructor_fence->SetLocations(nullptr);
6582}
6583
6584void InstructionCodeGeneratorMIPS64::VisitConstructorFence(
6585 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
6586 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
6587}
6588
Alexey Frunze4dda3372015-06-01 18:31:49 -07006589void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
6590 memory_barrier->SetLocations(nullptr);
6591}
6592
6593void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
6594 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
6595}
6596
6597void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
6598 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006599 DataType::Type return_type = ret->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006600 locations->SetInAt(0, Mips64ReturnLocation(return_type));
6601}
6602
6603void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
6604 codegen_->GenerateFrameExit();
6605}
6606
6607void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
6608 ret->SetLocations(nullptr);
6609}
6610
6611void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
6612 codegen_->GenerateFrameExit();
6613}
6614
Alexey Frunze92d90602015-12-18 18:16:36 -08006615void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
6616 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00006617}
6618
Alexey Frunze92d90602015-12-18 18:16:36 -08006619void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
6620 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00006621}
6622
Alexey Frunze4dda3372015-06-01 18:31:49 -07006623void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
6624 HandleShift(shl);
6625}
6626
6627void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
6628 HandleShift(shl);
6629}
6630
6631void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
6632 HandleShift(shr);
6633}
6634
6635void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
6636 HandleShift(shr);
6637}
6638
Alexey Frunze4dda3372015-06-01 18:31:49 -07006639void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
6640 HandleBinaryOp(instruction);
6641}
6642
6643void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
6644 HandleBinaryOp(instruction);
6645}
6646
6647void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6648 HandleFieldGet(instruction, instruction->GetFieldInfo());
6649}
6650
6651void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6652 HandleFieldGet(instruction, instruction->GetFieldInfo());
6653}
6654
6655void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
6656 HandleFieldSet(instruction, instruction->GetFieldInfo());
6657}
6658
6659void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01006660 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006661}
6662
Calin Juravlee460d1d2015-09-29 04:52:17 +01006663void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
6664 HUnresolvedInstanceFieldGet* instruction) {
6665 FieldAccessCallingConventionMIPS64 calling_convention;
6666 codegen_->CreateUnresolvedFieldLocationSummary(
6667 instruction, instruction->GetFieldType(), calling_convention);
6668}
6669
6670void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
6671 HUnresolvedInstanceFieldGet* instruction) {
6672 FieldAccessCallingConventionMIPS64 calling_convention;
6673 codegen_->GenerateUnresolvedFieldAccess(instruction,
6674 instruction->GetFieldType(),
6675 instruction->GetFieldIndex(),
6676 instruction->GetDexPc(),
6677 calling_convention);
6678}
6679
6680void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
6681 HUnresolvedInstanceFieldSet* instruction) {
6682 FieldAccessCallingConventionMIPS64 calling_convention;
6683 codegen_->CreateUnresolvedFieldLocationSummary(
6684 instruction, instruction->GetFieldType(), calling_convention);
6685}
6686
6687void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
6688 HUnresolvedInstanceFieldSet* instruction) {
6689 FieldAccessCallingConventionMIPS64 calling_convention;
6690 codegen_->GenerateUnresolvedFieldAccess(instruction,
6691 instruction->GetFieldType(),
6692 instruction->GetFieldIndex(),
6693 instruction->GetDexPc(),
6694 calling_convention);
6695}
6696
6697void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
6698 HUnresolvedStaticFieldGet* instruction) {
6699 FieldAccessCallingConventionMIPS64 calling_convention;
6700 codegen_->CreateUnresolvedFieldLocationSummary(
6701 instruction, instruction->GetFieldType(), calling_convention);
6702}
6703
6704void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
6705 HUnresolvedStaticFieldGet* instruction) {
6706 FieldAccessCallingConventionMIPS64 calling_convention;
6707 codegen_->GenerateUnresolvedFieldAccess(instruction,
6708 instruction->GetFieldType(),
6709 instruction->GetFieldIndex(),
6710 instruction->GetDexPc(),
6711 calling_convention);
6712}
6713
6714void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
6715 HUnresolvedStaticFieldSet* instruction) {
6716 FieldAccessCallingConventionMIPS64 calling_convention;
6717 codegen_->CreateUnresolvedFieldLocationSummary(
6718 instruction, instruction->GetFieldType(), calling_convention);
6719}
6720
6721void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
6722 HUnresolvedStaticFieldSet* instruction) {
6723 FieldAccessCallingConventionMIPS64 calling_convention;
6724 codegen_->GenerateUnresolvedFieldAccess(instruction,
6725 instruction->GetFieldType(),
6726 instruction->GetFieldIndex(),
6727 instruction->GetDexPc(),
6728 calling_convention);
6729}
6730
Alexey Frunze4dda3372015-06-01 18:31:49 -07006731void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01006732 LocationSummary* locations =
6733 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02006734 // In suspend check slow path, usually there are no caller-save registers at all.
6735 // If SIMD instructions are present, however, we force spilling all live SIMD
6736 // registers in full width (since the runtime only saves/restores lower part).
6737 locations->SetCustomSlowPathCallerSaves(
6738 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006739}
6740
6741void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
6742 HBasicBlock* block = instruction->GetBlock();
6743 if (block->GetLoopInformation() != nullptr) {
6744 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
6745 // The back edge will generate the suspend check.
6746 return;
6747 }
6748 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
6749 // The goto will generate the suspend check.
6750 return;
6751 }
6752 GenerateSuspendCheck(instruction, nullptr);
6753}
6754
Alexey Frunze4dda3372015-06-01 18:31:49 -07006755void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
6756 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006757 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006758 InvokeRuntimeCallingConvention calling_convention;
6759 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6760}
6761
6762void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006763 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006764 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
6765}
6766
6767void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006768 DataType::Type input_type = conversion->GetInputType();
6769 DataType::Type result_type = conversion->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006770 DCHECK_NE(input_type, result_type);
6771
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006772 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
6773 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006774 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
6775 }
6776
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006777 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion);
6778
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006779 if (DataType::IsFloatingPointType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006780 locations->SetInAt(0, Location::RequiresFpuRegister());
6781 } else {
6782 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006783 }
6784
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006785 if (DataType::IsFloatingPointType(result_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006786 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006787 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006788 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006789 }
6790}
6791
6792void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
6793 LocationSummary* locations = conversion->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006794 DataType::Type result_type = conversion->GetResultType();
6795 DataType::Type input_type = conversion->GetInputType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006796
6797 DCHECK_NE(input_type, result_type);
6798
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006799 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006800 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6801 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6802
6803 switch (result_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006804 case DataType::Type::kUint16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006805 __ Andi(dst, src, 0xFFFF);
6806 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006807 case DataType::Type::kInt8:
6808 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00006809 // Type conversion from long to types narrower than int is a result of code
6810 // transformations. To avoid unpredictable results for SEB and SEH, we first
6811 // need to sign-extend the low 32-bit value into bits 32 through 63.
6812 __ Sll(dst, src, 0);
6813 __ Seb(dst, dst);
6814 } else {
6815 __ Seb(dst, src);
6816 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006817 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006818 case DataType::Type::kInt16:
6819 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00006820 // Type conversion from long to types narrower than int is a result of code
6821 // transformations. To avoid unpredictable results for SEB and SEH, we first
6822 // need to sign-extend the low 32-bit value into bits 32 through 63.
6823 __ Sll(dst, src, 0);
6824 __ Seh(dst, dst);
6825 } else {
6826 __ Seh(dst, src);
6827 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006828 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006829 case DataType::Type::kInt32:
6830 case DataType::Type::kInt64:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01006831 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
6832 // conversions, except when the input and output registers are the same and we are not
6833 // converting longs to shorter types. In these cases, do nothing.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006834 if ((input_type == DataType::Type::kInt64) || (dst != src)) {
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01006835 __ Sll(dst, src, 0);
6836 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006837 break;
6838
6839 default:
6840 LOG(FATAL) << "Unexpected type conversion from " << input_type
6841 << " to " << result_type;
6842 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006843 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006844 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6845 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006846 if (input_type == DataType::Type::kInt64) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006847 __ Dmtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006848 if (result_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006849 __ Cvtsl(dst, FTMP);
6850 } else {
6851 __ Cvtdl(dst, FTMP);
6852 }
6853 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006854 __ Mtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006855 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006856 __ Cvtsw(dst, FTMP);
6857 } else {
6858 __ Cvtdw(dst, FTMP);
6859 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006860 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006861 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
6862 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006863 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6864 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006865
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006866 if (result_type == DataType::Type::kInt64) {
6867 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006868 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006869 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006870 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006871 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006872 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006873 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006874 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006875 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006876 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006877 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006878 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006879 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006880 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006881 } else if (DataType::IsFloatingPointType(result_type) &&
6882 DataType::IsFloatingPointType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006883 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6884 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006885 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006886 __ Cvtsd(dst, src);
6887 } else {
6888 __ Cvtds(dst, src);
6889 }
6890 } else {
6891 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
6892 << " to " << result_type;
6893 }
6894}
6895
6896void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
6897 HandleShift(ushr);
6898}
6899
6900void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
6901 HandleShift(ushr);
6902}
6903
6904void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
6905 HandleBinaryOp(instruction);
6906}
6907
6908void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
6909 HandleBinaryOp(instruction);
6910}
6911
6912void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6913 // Nothing to do, this should be removed during prepare for register allocator.
6914 LOG(FATAL) << "Unreachable";
6915}
6916
6917void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6918 // Nothing to do, this should be removed during prepare for register allocator.
6919 LOG(FATAL) << "Unreachable";
6920}
6921
6922void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006923 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006924}
6925
6926void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006927 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006928}
6929
6930void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006931 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006932}
6933
6934void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006935 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006936}
6937
6938void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006939 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006940}
6941
6942void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006943 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006944}
6945
6946void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006947 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006948}
6949
6950void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006951 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006952}
6953
6954void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006955 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006956}
6957
6958void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006959 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006960}
6961
6962void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006963 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006964}
6965
6966void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006967 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006968}
6969
Aart Bike9f37602015-10-09 11:15:55 -07006970void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006971 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006972}
6973
6974void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006975 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006976}
6977
6978void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006979 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006980}
6981
6982void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006983 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006984}
6985
6986void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006987 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006988}
6989
6990void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006991 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006992}
6993
6994void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006995 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006996}
6997
6998void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006999 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007000}
7001
Mark Mendellfe57faa2015-09-18 09:26:15 -04007002// Simple implementation of packed switch - generate cascaded compare/jumps.
7003void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7004 LocationSummary* locations =
7005 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
7006 locations->SetInAt(0, Location::RequiresRegister());
7007}
7008
Alexey Frunze0960ac52016-12-20 17:24:59 -08007009void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
7010 int32_t lower_bound,
7011 uint32_t num_entries,
7012 HBasicBlock* switch_block,
7013 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007014 // Create a set of compare/jumps.
7015 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08007016 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007017 // Jump to default if index is negative
7018 // Note: We don't check the case that index is positive while value < lower_bound, because in
7019 // this case, index >= num_entries must be true. So that we can save one branch instruction.
7020 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
7021
Alexey Frunze0960ac52016-12-20 17:24:59 -08007022 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007023 // Jump to successors[0] if value == lower_bound.
7024 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
7025 int32_t last_index = 0;
7026 for (; num_entries - last_index > 2; last_index += 2) {
7027 __ Addiu(temp_reg, temp_reg, -2);
7028 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
7029 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
7030 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
7031 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
7032 }
7033 if (num_entries - last_index == 2) {
7034 // The last missing case_value.
7035 __ Addiu(temp_reg, temp_reg, -1);
7036 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007037 }
7038
7039 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08007040 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07007041 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007042 }
7043}
7044
Alexey Frunze0960ac52016-12-20 17:24:59 -08007045void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
7046 int32_t lower_bound,
7047 uint32_t num_entries,
7048 HBasicBlock* switch_block,
7049 HBasicBlock* default_block) {
7050 // Create a jump table.
7051 std::vector<Mips64Label*> labels(num_entries);
7052 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7053 for (uint32_t i = 0; i < num_entries; i++) {
7054 labels[i] = codegen_->GetLabelOf(successors[i]);
7055 }
7056 JumpTable* table = __ CreateJumpTable(std::move(labels));
7057
7058 // Is the value in range?
7059 __ Addiu32(TMP, value_reg, -lower_bound);
7060 __ LoadConst32(AT, num_entries);
7061 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
7062
7063 // We are in the range of the table.
7064 // Load the target address from the jump table, indexing by the value.
7065 __ LoadLabelAddress(AT, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07007066 __ Dlsa(TMP, TMP, AT, 2);
Alexey Frunze0960ac52016-12-20 17:24:59 -08007067 __ Lw(TMP, TMP, 0);
7068 // Compute the absolute target address by adding the table start address
7069 // (the table contains offsets to targets relative to its start).
7070 __ Daddu(TMP, TMP, AT);
7071 // And jump.
7072 __ Jr(TMP);
7073 __ Nop();
7074}
7075
7076void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7077 int32_t lower_bound = switch_instr->GetStartValue();
7078 uint32_t num_entries = switch_instr->GetNumEntries();
7079 LocationSummary* locations = switch_instr->GetLocations();
7080 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
7081 HBasicBlock* switch_block = switch_instr->GetBlock();
7082 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7083
7084 if (num_entries > kPackedSwitchJumpTableThreshold) {
7085 GenTableBasedPackedSwitch(value_reg,
7086 lower_bound,
7087 num_entries,
7088 switch_block,
7089 default_block);
7090 } else {
7091 GenPackedSwitchWithCompares(value_reg,
7092 lower_bound,
7093 num_entries,
7094 switch_block,
7095 default_block);
7096 }
7097}
7098
Chris Larsenc9905a62017-03-13 17:06:18 -07007099void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7100 LocationSummary* locations =
7101 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
7102 locations->SetInAt(0, Location::RequiresRegister());
7103 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007104}
7105
Chris Larsenc9905a62017-03-13 17:06:18 -07007106void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7107 LocationSummary* locations = instruction->GetLocations();
7108 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
7109 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7110 instruction->GetIndex(), kMips64PointerSize).SizeValue();
7111 __ LoadFromOffset(kLoadDoubleword,
7112 locations->Out().AsRegister<GpuRegister>(),
7113 locations->InAt(0).AsRegister<GpuRegister>(),
7114 method_offset);
7115 } else {
7116 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
7117 instruction->GetIndex(), kMips64PointerSize));
7118 __ LoadFromOffset(kLoadDoubleword,
7119 locations->Out().AsRegister<GpuRegister>(),
7120 locations->InAt(0).AsRegister<GpuRegister>(),
7121 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
7122 __ LoadFromOffset(kLoadDoubleword,
7123 locations->Out().AsRegister<GpuRegister>(),
7124 locations->Out().AsRegister<GpuRegister>(),
7125 method_offset);
7126 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007127}
7128
Alexey Frunze4dda3372015-06-01 18:31:49 -07007129} // namespace mips64
7130} // namespace art