blob: 0a6d9159d1259f20034422b70ecb244f5045c613 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips64/asm_support_mips64.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070020#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070022#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080023#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070028#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "intrinsics_mips64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010030#include "linker/linker_patch.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070031#include "mirror/array-inl.h"
32#include "mirror/class-inl.h"
33#include "offsets.h"
Vladimir Marko174b2e22017-10-12 13:34:49 +010034#include "stack_map_stream.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070035#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070036#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070037#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070038#include "utils/stack_checks.h"
39
40namespace art {
41namespace mips64 {
42
43static constexpr int kCurrentMethodStackOffset = 0;
44static constexpr GpuRegister kMethodRegisterArgument = A0;
45
Alexey Frunze4147fcc2017-06-17 19:57:27 -070046// Flags controlling the use of thunks for Baker read barriers.
47constexpr bool kBakerReadBarrierThunksEnableForFields = true;
48constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
49constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
50
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010051Location Mips64ReturnLocation(DataType::Type return_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070052 switch (return_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010053 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +010054 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010055 case DataType::Type::kInt8:
56 case DataType::Type::kUint16:
57 case DataType::Type::kInt16:
58 case DataType::Type::kInt32:
59 case DataType::Type::kReference:
60 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070061 return Location::RegisterLocation(V0);
62
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010063 case DataType::Type::kFloat32:
64 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070065 return Location::FpuRegisterLocation(F0);
66
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010067 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -070068 return Location();
69 }
70 UNREACHABLE();
71}
72
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010073Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(DataType::Type type) const {
Alexey Frunze4dda3372015-06-01 18:31:49 -070074 return Mips64ReturnLocation(type);
75}
76
77Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
78 return Location::RegisterLocation(kMethodRegisterArgument);
79}
80
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010081Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070082 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010083 if (type == DataType::Type::kVoid) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070084 LOG(FATAL) << "Unexpected parameter type " << type;
85 }
86
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010087 if (DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070088 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
89 next_location = Location::FpuRegisterLocation(
90 calling_convention.GetFpuRegisterAt(float_index_++));
91 gp_index_++;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010092 } else if (!DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070093 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
94 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
95 float_index_++;
96 } else {
97 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010098 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
99 : Location::StackSlot(stack_offset);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700100 }
101
102 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100103 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700104
Alexey Frunze4dda3372015-06-01 18:31:49 -0700105 return next_location;
106}
107
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100108Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700109 return Mips64ReturnLocation(type);
110}
111
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100112// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
113#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700114#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700115
116class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
117 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000118 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700119
120 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100121 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700122 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
123 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000124 if (instruction_->CanThrowIntoCatchBlock()) {
125 // Live registers will be restored in the catch block if caught.
126 SaveLiveRegisters(codegen, instruction_->GetLocations());
127 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700128 // We're moving two locations to locations that could overlap, so we need a parallel
129 // move resolver.
130 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100131 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700132 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100133 DataType::Type::kInt32,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100134 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700135 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100136 DataType::Type::kInt32);
Serban Constantinescufc734082016-07-19 17:18:07 +0100137 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
138 ? kQuickThrowStringBounds
139 : kQuickThrowArrayBounds;
140 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100141 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700142 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
143 }
144
Alexandre Rames8158f282015-08-07 10:26:17 +0100145 bool IsFatal() const OVERRIDE { return true; }
146
Roland Levillain46648892015-06-19 16:07:18 +0100147 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
148
Alexey Frunze4dda3372015-06-01 18:31:49 -0700149 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700150 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
151};
152
153class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
154 public:
Alexey Frunzec61c0762017-04-10 13:54:23 -0700155 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction)
156 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700157
158 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
159 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
160 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100161 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700162 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
163 }
164
Alexandre Rames8158f282015-08-07 10:26:17 +0100165 bool IsFatal() const OVERRIDE { return true; }
166
Roland Levillain46648892015-06-19 16:07:18 +0100167 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
168
Alexey Frunze4dda3372015-06-01 18:31:49 -0700169 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700170 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
171};
172
173class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
174 public:
175 LoadClassSlowPathMIPS64(HLoadClass* cls,
176 HInstruction* at,
177 uint32_t dex_pc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700178 bool do_clinit,
179 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high = nullptr)
180 : SlowPathCodeMIPS64(at),
181 cls_(cls),
182 dex_pc_(dex_pc),
183 do_clinit_(do_clinit),
184 bss_info_high_(bss_info_high) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700185 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
186 }
187
188 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000189 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700190 Location out = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700191 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700192 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
193 InvokeRuntimeCallingConvention calling_convention;
194 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
195 const bool is_load_class_bss_entry =
196 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700197 __ Bind(GetEntryLabel());
198 SaveLiveRegisters(codegen, locations);
199
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700200 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
201 GpuRegister entry_address = kNoGpuRegister;
202 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
203 GpuRegister temp = locations->GetTemp(0).AsRegister<GpuRegister>();
204 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
205 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
206 // kSaveEverything call.
207 entry_address = temp_is_a0 ? out.AsRegister<GpuRegister>() : temp;
208 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
209 if (temp_is_a0) {
210 __ Move(entry_address, temp);
211 }
212 }
213
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000214 dex::TypeIndex type_index = cls_->GetTypeIndex();
215 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100216 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
217 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000218 mips64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700219 if (do_clinit_) {
220 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
221 } else {
222 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
223 }
224
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700225 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
226 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
227 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
228 DCHECK(bss_info_high_);
229 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
230 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, bss_info_high_);
231 __ Bind(&info_low->label);
232 __ StoreToOffset(kStoreWord,
233 calling_convention.GetRegisterAt(0),
234 entry_address,
235 /* placeholder */ 0x5678);
236 }
237
Alexey Frunze4dda3372015-06-01 18:31:49 -0700238 // Move the class to the desired location.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700239 if (out.IsValid()) {
240 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100241 DataType::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700242 mips64_codegen->MoveLocation(out,
243 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
244 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700245 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700246 RestoreLiveRegisters(codegen, locations);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700247
248 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
249 if (is_load_class_bss_entry && !baker_or_no_read_barriers) {
250 // For non-Baker read barriers we need to re-calculate the address of
251 // the class entry.
252 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000253 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700254 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
255 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, info_high);
256 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, info_low);
257 __ StoreToOffset(kStoreWord, out.AsRegister<GpuRegister>(), TMP, /* placeholder */ 0x5678);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000258 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700259 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700260 }
261
Roland Levillain46648892015-06-19 16:07:18 +0100262 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
263
Alexey Frunze4dda3372015-06-01 18:31:49 -0700264 private:
265 // The class this slow path will load.
266 HLoadClass* const cls_;
267
Alexey Frunze4dda3372015-06-01 18:31:49 -0700268 // The dex PC of `at_`.
269 const uint32_t dex_pc_;
270
271 // Whether to initialize the class.
272 const bool do_clinit_;
273
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700274 // Pointer to the high half PC-relative patch info for HLoadClass/kBssEntry.
275 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high_;
276
Alexey Frunze4dda3372015-06-01 18:31:49 -0700277 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
278};
279
280class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
281 public:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700282 explicit LoadStringSlowPathMIPS64(HLoadString* instruction,
283 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high)
284 : SlowPathCodeMIPS64(instruction), bss_info_high_(bss_info_high) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700285
286 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700287 DCHECK(instruction_->IsLoadString());
288 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700289 LocationSummary* locations = instruction_->GetLocations();
290 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700291 HLoadString* load = instruction_->AsLoadString();
292 const dex::StringIndex string_index = load->GetStringIndex();
293 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700294 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700295 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
296 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700297 __ Bind(GetEntryLabel());
298 SaveLiveRegisters(codegen, locations);
299
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700300 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
301 GpuRegister entry_address = kNoGpuRegister;
302 if (baker_or_no_read_barriers) {
303 GpuRegister temp = locations->GetTemp(0).AsRegister<GpuRegister>();
304 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
305 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
306 // kSaveEverything call.
307 entry_address = temp_is_a0 ? out : temp;
308 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
309 if (temp_is_a0) {
310 __ Move(entry_address, temp);
311 }
312 }
313
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000314 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100315 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700316 instruction_,
317 instruction_->GetDexPc(),
318 this);
319 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700320
321 // Store the resolved string to the BSS entry.
322 if (baker_or_no_read_barriers) {
323 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
324 DCHECK(bss_info_high_);
325 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100326 mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(),
327 string_index,
328 bss_info_high_);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700329 __ Bind(&info_low->label);
330 __ StoreToOffset(kStoreWord,
331 calling_convention.GetRegisterAt(0),
332 entry_address,
333 /* placeholder */ 0x5678);
334 }
335
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100336 DataType::Type type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700337 mips64_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700338 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700339 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700340 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800341
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700342 // Store the resolved string to the BSS entry.
343 if (!baker_or_no_read_barriers) {
344 // For non-Baker read barriers we need to re-calculate the address of
345 // the string entry.
346 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100347 mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700348 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100349 mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700350 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, info_low);
351 __ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
352 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700353 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700354 }
355
Roland Levillain46648892015-06-19 16:07:18 +0100356 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
357
Alexey Frunze4dda3372015-06-01 18:31:49 -0700358 private:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700359 // Pointer to the high half PC-relative patch info.
360 const CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high_;
361
Alexey Frunze4dda3372015-06-01 18:31:49 -0700362 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
363};
364
365class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
366 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000367 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700368
369 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
370 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
371 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000372 if (instruction_->CanThrowIntoCatchBlock()) {
373 // Live registers will be restored in the catch block if caught.
374 SaveLiveRegisters(codegen, instruction_->GetLocations());
375 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100376 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700377 instruction_,
378 instruction_->GetDexPc(),
379 this);
380 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
381 }
382
Alexandre Rames8158f282015-08-07 10:26:17 +0100383 bool IsFatal() const OVERRIDE { return true; }
384
Roland Levillain46648892015-06-19 16:07:18 +0100385 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
386
Alexey Frunze4dda3372015-06-01 18:31:49 -0700387 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700388 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
389};
390
391class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
392 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100393 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000394 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700395
396 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200397 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700398 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
399 __ Bind(GetEntryLabel());
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200400 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufc734082016-07-19 17:18:07 +0100401 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700402 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200403 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700404 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700405 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700406 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700407 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700408 }
409 }
410
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700411 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700412 DCHECK(successor_ == nullptr);
413 return &return_label_;
414 }
415
Roland Levillain46648892015-06-19 16:07:18 +0100416 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
417
Chris Larsena2045912017-11-02 12:39:54 -0700418 HBasicBlock* GetSuccessor() const {
419 return successor_;
420 }
421
Alexey Frunze4dda3372015-06-01 18:31:49 -0700422 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700423 // If not null, the block to branch to after the suspend check.
424 HBasicBlock* const successor_;
425
426 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700427 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700428
429 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
430};
431
432class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
433 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800434 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction, bool is_fatal)
435 : SlowPathCodeMIPS64(instruction), is_fatal_(is_fatal) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700436
437 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
438 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800439
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100440 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700441 DCHECK(instruction_->IsCheckCast()
442 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
443 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
444
445 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800446 if (!is_fatal_) {
447 SaveLiveRegisters(codegen, locations);
448 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700449
450 // We're moving two locations to locations that could overlap, so we need a parallel
451 // move resolver.
452 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800453 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700454 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100455 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800456 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700457 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100458 DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700459 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100460 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800461 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100462 DataType::Type ret_type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700463 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
464 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700465 } else {
466 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800467 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
468 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700469 }
470
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800471 if (!is_fatal_) {
472 RestoreLiveRegisters(codegen, locations);
473 __ Bc(GetExitLabel());
474 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700475 }
476
Roland Levillain46648892015-06-19 16:07:18 +0100477 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
478
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800479 bool IsFatal() const OVERRIDE { return is_fatal_; }
480
Alexey Frunze4dda3372015-06-01 18:31:49 -0700481 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800482 const bool is_fatal_;
483
Alexey Frunze4dda3372015-06-01 18:31:49 -0700484 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
485};
486
487class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
488 public:
Aart Bik42249c32016-01-07 15:33:50 -0800489 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000490 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700491
492 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800493 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700494 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100495 LocationSummary* locations = instruction_->GetLocations();
496 SaveLiveRegisters(codegen, locations);
497 InvokeRuntimeCallingConvention calling_convention;
498 __ LoadConst32(calling_convention.GetRegisterAt(0),
499 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufc734082016-07-19 17:18:07 +0100500 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100501 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700502 }
503
Roland Levillain46648892015-06-19 16:07:18 +0100504 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
505
Alexey Frunze4dda3372015-06-01 18:31:49 -0700506 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700507 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
508};
509
Alexey Frunze15958152017-02-09 19:08:30 -0800510class ArraySetSlowPathMIPS64 : public SlowPathCodeMIPS64 {
511 public:
512 explicit ArraySetSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
513
514 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
515 LocationSummary* locations = instruction_->GetLocations();
516 __ Bind(GetEntryLabel());
517 SaveLiveRegisters(codegen, locations);
518
519 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100520 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800521 parallel_move.AddMove(
522 locations->InAt(0),
523 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100524 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800525 nullptr);
526 parallel_move.AddMove(
527 locations->InAt(1),
528 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100529 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800530 nullptr);
531 parallel_move.AddMove(
532 locations->InAt(2),
533 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100534 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800535 nullptr);
536 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
537
538 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
539 mips64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
540 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
541 RestoreLiveRegisters(codegen, locations);
542 __ Bc(GetExitLabel());
543 }
544
545 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS64"; }
546
547 private:
548 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS64);
549};
550
551// Slow path marking an object reference `ref` during a read
552// barrier. The field `obj.field` in the object `obj` holding this
553// reference does not get updated by this slow path after marking (see
554// ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 below for that).
555//
556// This means that after the execution of this slow path, `ref` will
557// always be up-to-date, but `obj.field` may not; i.e., after the
558// flip, `ref` will be a to-space reference, but `obj.field` will
559// probably still be a from-space reference (unless it gets updated by
560// another thread, or if another thread installed another object
561// reference (different from `ref`) in `obj.field`).
562//
563// If `entrypoint` is a valid location it is assumed to already be
564// holding the entrypoint. The case where the entrypoint is passed in
565// is for the GcRoot read barrier.
566class ReadBarrierMarkSlowPathMIPS64 : public SlowPathCodeMIPS64 {
567 public:
568 ReadBarrierMarkSlowPathMIPS64(HInstruction* instruction,
569 Location ref,
570 Location entrypoint = Location::NoLocation())
571 : SlowPathCodeMIPS64(instruction), ref_(ref), entrypoint_(entrypoint) {
572 DCHECK(kEmitCompilerReadBarrier);
573 }
574
575 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
576
577 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
578 LocationSummary* locations = instruction_->GetLocations();
579 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
580 DCHECK(locations->CanCall());
581 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
582 DCHECK(instruction_->IsInstanceFieldGet() ||
583 instruction_->IsStaticFieldGet() ||
584 instruction_->IsArrayGet() ||
585 instruction_->IsArraySet() ||
586 instruction_->IsLoadClass() ||
587 instruction_->IsLoadString() ||
588 instruction_->IsInstanceOf() ||
589 instruction_->IsCheckCast() ||
590 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
591 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
592 << "Unexpected instruction in read barrier marking slow path: "
593 << instruction_->DebugName();
594
595 __ Bind(GetEntryLabel());
596 // No need to save live registers; it's taken care of by the
597 // entrypoint. Also, there is no need to update the stack mask,
598 // as this runtime call will not trigger a garbage collection.
599 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
600 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
601 (S2 <= ref_reg && ref_reg <= S7) ||
602 (ref_reg == S8)) << ref_reg;
603 // "Compact" slow path, saving two moves.
604 //
605 // Instead of using the standard runtime calling convention (input
606 // and output in A0 and V0 respectively):
607 //
608 // A0 <- ref
609 // V0 <- ReadBarrierMark(A0)
610 // ref <- V0
611 //
612 // we just use rX (the register containing `ref`) as input and output
613 // of a dedicated entrypoint:
614 //
615 // rX <- ReadBarrierMarkRegX(rX)
616 //
617 if (entrypoint_.IsValid()) {
618 mips64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
619 DCHECK_EQ(entrypoint_.AsRegister<GpuRegister>(), T9);
620 __ Jalr(entrypoint_.AsRegister<GpuRegister>());
621 __ Nop();
622 } else {
623 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100624 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800625 // This runtime call does not require a stack map.
626 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
627 instruction_,
628 this);
629 }
630 __ Bc(GetExitLabel());
631 }
632
633 private:
634 // The location (register) of the marked object reference.
635 const Location ref_;
636
637 // The location of the entrypoint if already loaded.
638 const Location entrypoint_;
639
640 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS64);
641};
642
643// Slow path marking an object reference `ref` during a read barrier,
644// and if needed, atomically updating the field `obj.field` in the
645// object `obj` holding this reference after marking (contrary to
646// ReadBarrierMarkSlowPathMIPS64 above, which never tries to update
647// `obj.field`).
648//
649// This means that after the execution of this slow path, both `ref`
650// and `obj.field` will be up-to-date; i.e., after the flip, both will
651// hold the same to-space reference (unless another thread installed
652// another object reference (different from `ref`) in `obj.field`).
653class ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 : public SlowPathCodeMIPS64 {
654 public:
655 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(HInstruction* instruction,
656 Location ref,
657 GpuRegister obj,
658 Location field_offset,
659 GpuRegister temp1)
660 : SlowPathCodeMIPS64(instruction),
661 ref_(ref),
662 obj_(obj),
663 field_offset_(field_offset),
664 temp1_(temp1) {
665 DCHECK(kEmitCompilerReadBarrier);
666 }
667
668 const char* GetDescription() const OVERRIDE {
669 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS64";
670 }
671
672 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
673 LocationSummary* locations = instruction_->GetLocations();
674 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
675 DCHECK(locations->CanCall());
676 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
677 // This slow path is only used by the UnsafeCASObject intrinsic.
678 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
679 << "Unexpected instruction in read barrier marking and field updating slow path: "
680 << instruction_->DebugName();
681 DCHECK(instruction_->GetLocations()->Intrinsified());
682 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
683 DCHECK(field_offset_.IsRegister()) << field_offset_;
684
685 __ Bind(GetEntryLabel());
686
687 // Save the old reference.
688 // Note that we cannot use AT or TMP to save the old reference, as those
689 // are used by the code that follows, but we need the old reference after
690 // the call to the ReadBarrierMarkRegX entry point.
691 DCHECK_NE(temp1_, AT);
692 DCHECK_NE(temp1_, TMP);
693 __ Move(temp1_, ref_reg);
694
695 // No need to save live registers; it's taken care of by the
696 // entrypoint. Also, there is no need to update the stack mask,
697 // as this runtime call will not trigger a garbage collection.
698 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
699 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
700 (S2 <= ref_reg && ref_reg <= S7) ||
701 (ref_reg == S8)) << ref_reg;
702 // "Compact" slow path, saving two moves.
703 //
704 // Instead of using the standard runtime calling convention (input
705 // and output in A0 and V0 respectively):
706 //
707 // A0 <- ref
708 // V0 <- ReadBarrierMark(A0)
709 // ref <- V0
710 //
711 // we just use rX (the register containing `ref`) as input and output
712 // of a dedicated entrypoint:
713 //
714 // rX <- ReadBarrierMarkRegX(rX)
715 //
716 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100717 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800718 // This runtime call does not require a stack map.
719 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
720 instruction_,
721 this);
722
723 // If the new reference is different from the old reference,
724 // update the field in the holder (`*(obj_ + field_offset_)`).
725 //
726 // Note that this field could also hold a different object, if
727 // another thread had concurrently changed it. In that case, the
728 // the compare-and-set (CAS) loop below would abort, leaving the
729 // field as-is.
730 Mips64Label done;
731 __ Beqc(temp1_, ref_reg, &done);
732
733 // Update the the holder's field atomically. This may fail if
734 // mutator updates before us, but it's OK. This is achieved
735 // using a strong compare-and-set (CAS) operation with relaxed
736 // memory synchronization ordering, where the expected value is
737 // the old reference and the desired value is the new reference.
738
739 // Convenience aliases.
740 GpuRegister base = obj_;
741 GpuRegister offset = field_offset_.AsRegister<GpuRegister>();
742 GpuRegister expected = temp1_;
743 GpuRegister value = ref_reg;
744 GpuRegister tmp_ptr = TMP; // Pointer to actual memory.
745 GpuRegister tmp = AT; // Value in memory.
746
747 __ Daddu(tmp_ptr, base, offset);
748
749 if (kPoisonHeapReferences) {
750 __ PoisonHeapReference(expected);
751 // Do not poison `value` if it is the same register as
752 // `expected`, which has just been poisoned.
753 if (value != expected) {
754 __ PoisonHeapReference(value);
755 }
756 }
757
758 // do {
759 // tmp = [r_ptr] - expected;
760 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
761
762 Mips64Label loop_head, exit_loop;
763 __ Bind(&loop_head);
764 __ Ll(tmp, tmp_ptr);
765 // The LL instruction sign-extends the 32-bit value, but
766 // 32-bit references must be zero-extended. Zero-extend `tmp`.
767 __ Dext(tmp, tmp, 0, 32);
768 __ Bnec(tmp, expected, &exit_loop);
769 __ Move(tmp, value);
770 __ Sc(tmp, tmp_ptr);
771 __ Beqzc(tmp, &loop_head);
772 __ Bind(&exit_loop);
773
774 if (kPoisonHeapReferences) {
775 __ UnpoisonHeapReference(expected);
776 // Do not unpoison `value` if it is the same register as
777 // `expected`, which has just been unpoisoned.
778 if (value != expected) {
779 __ UnpoisonHeapReference(value);
780 }
781 }
782
783 __ Bind(&done);
784 __ Bc(GetExitLabel());
785 }
786
787 private:
788 // The location (register) of the marked object reference.
789 const Location ref_;
790 // The register containing the object holding the marked object reference field.
791 const GpuRegister obj_;
792 // The location of the offset of the marked reference field within `obj_`.
793 Location field_offset_;
794
795 const GpuRegister temp1_;
796
797 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS64);
798};
799
800// Slow path generating a read barrier for a heap reference.
801class ReadBarrierForHeapReferenceSlowPathMIPS64 : public SlowPathCodeMIPS64 {
802 public:
803 ReadBarrierForHeapReferenceSlowPathMIPS64(HInstruction* instruction,
804 Location out,
805 Location ref,
806 Location obj,
807 uint32_t offset,
808 Location index)
809 : SlowPathCodeMIPS64(instruction),
810 out_(out),
811 ref_(ref),
812 obj_(obj),
813 offset_(offset),
814 index_(index) {
815 DCHECK(kEmitCompilerReadBarrier);
816 // If `obj` is equal to `out` or `ref`, it means the initial object
817 // has been overwritten by (or after) the heap object reference load
818 // to be instrumented, e.g.:
819 //
820 // __ LoadFromOffset(kLoadWord, out, out, offset);
821 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
822 //
823 // In that case, we have lost the information about the original
824 // object, and the emitted read barrier cannot work properly.
825 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
826 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
827 }
828
829 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
830 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
831 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100832 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800833 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
834 DCHECK(locations->CanCall());
835 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
836 DCHECK(instruction_->IsInstanceFieldGet() ||
837 instruction_->IsStaticFieldGet() ||
838 instruction_->IsArrayGet() ||
839 instruction_->IsInstanceOf() ||
840 instruction_->IsCheckCast() ||
841 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
842 << "Unexpected instruction in read barrier for heap reference slow path: "
843 << instruction_->DebugName();
844
845 __ Bind(GetEntryLabel());
846 SaveLiveRegisters(codegen, locations);
847
848 // We may have to change the index's value, but as `index_` is a
849 // constant member (like other "inputs" of this slow path),
850 // introduce a copy of it, `index`.
851 Location index = index_;
852 if (index_.IsValid()) {
853 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
854 if (instruction_->IsArrayGet()) {
855 // Compute the actual memory offset and store it in `index`.
856 GpuRegister index_reg = index_.AsRegister<GpuRegister>();
857 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
858 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
859 // We are about to change the value of `index_reg` (see the
860 // calls to art::mips64::Mips64Assembler::Sll and
861 // art::mips64::MipsAssembler::Addiu32 below), but it has
862 // not been saved by the previous call to
863 // art::SlowPathCode::SaveLiveRegisters, as it is a
864 // callee-save register --
865 // art::SlowPathCode::SaveLiveRegisters does not consider
866 // callee-save registers, as it has been designed with the
867 // assumption that callee-save registers are supposed to be
868 // handled by the called function. So, as a callee-save
869 // register, `index_reg` _would_ eventually be saved onto
870 // the stack, but it would be too late: we would have
871 // changed its value earlier. Therefore, we manually save
872 // it here into another freely available register,
873 // `free_reg`, chosen of course among the caller-save
874 // registers (as a callee-save `free_reg` register would
875 // exhibit the same problem).
876 //
877 // Note we could have requested a temporary register from
878 // the register allocator instead; but we prefer not to, as
879 // this is a slow path, and we know we can find a
880 // caller-save register that is available.
881 GpuRegister free_reg = FindAvailableCallerSaveRegister(codegen);
882 __ Move(free_reg, index_reg);
883 index_reg = free_reg;
884 index = Location::RegisterLocation(index_reg);
885 } else {
886 // The initial register stored in `index_` has already been
887 // saved in the call to art::SlowPathCode::SaveLiveRegisters
888 // (as it is not a callee-save register), so we can freely
889 // use it.
890 }
891 // Shifting the index value contained in `index_reg` by the scale
892 // factor (2) cannot overflow in practice, as the runtime is
893 // unable to allocate object arrays with a size larger than
894 // 2^26 - 1 (that is, 2^28 - 4 bytes).
895 __ Sll(index_reg, index_reg, TIMES_4);
896 static_assert(
897 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
898 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
899 __ Addiu32(index_reg, index_reg, offset_);
900 } else {
901 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
902 // intrinsics, `index_` is not shifted by a scale factor of 2
903 // (as in the case of ArrayGet), as it is actually an offset
904 // to an object field within an object.
905 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
906 DCHECK(instruction_->GetLocations()->Intrinsified());
907 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
908 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
909 << instruction_->AsInvoke()->GetIntrinsic();
910 DCHECK_EQ(offset_, 0U);
911 DCHECK(index_.IsRegister());
912 }
913 }
914
915 // We're moving two or three locations to locations that could
916 // overlap, so we need a parallel move resolver.
917 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100918 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800919 parallel_move.AddMove(ref_,
920 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100921 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800922 nullptr);
923 parallel_move.AddMove(obj_,
924 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100925 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800926 nullptr);
927 if (index.IsValid()) {
928 parallel_move.AddMove(index,
929 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100930 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800931 nullptr);
932 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
933 } else {
934 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
935 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
936 }
937 mips64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
938 instruction_,
939 instruction_->GetDexPc(),
940 this);
941 CheckEntrypointTypes<
942 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
943 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
944
945 RestoreLiveRegisters(codegen, locations);
946 __ Bc(GetExitLabel());
947 }
948
949 const char* GetDescription() const OVERRIDE {
950 return "ReadBarrierForHeapReferenceSlowPathMIPS64";
951 }
952
953 private:
954 GpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
955 size_t ref = static_cast<int>(ref_.AsRegister<GpuRegister>());
956 size_t obj = static_cast<int>(obj_.AsRegister<GpuRegister>());
957 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
958 if (i != ref &&
959 i != obj &&
960 !codegen->IsCoreCalleeSaveRegister(i) &&
961 !codegen->IsBlockedCoreRegister(i)) {
962 return static_cast<GpuRegister>(i);
963 }
964 }
965 // We shall never fail to find a free caller-save register, as
966 // there are more than two core caller-save registers on MIPS64
967 // (meaning it is possible to find one which is different from
968 // `ref` and `obj`).
969 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
970 LOG(FATAL) << "Could not find a free caller-save register";
971 UNREACHABLE();
972 }
973
974 const Location out_;
975 const Location ref_;
976 const Location obj_;
977 const uint32_t offset_;
978 // An additional location containing an index to an array.
979 // Only used for HArrayGet and the UnsafeGetObject &
980 // UnsafeGetObjectVolatile intrinsics.
981 const Location index_;
982
983 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS64);
984};
985
986// Slow path generating a read barrier for a GC root.
987class ReadBarrierForRootSlowPathMIPS64 : public SlowPathCodeMIPS64 {
988 public:
989 ReadBarrierForRootSlowPathMIPS64(HInstruction* instruction, Location out, Location root)
990 : SlowPathCodeMIPS64(instruction), out_(out), root_(root) {
991 DCHECK(kEmitCompilerReadBarrier);
992 }
993
994 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
995 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100996 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800997 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
998 DCHECK(locations->CanCall());
999 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
1000 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1001 << "Unexpected instruction in read barrier for GC root slow path: "
1002 << instruction_->DebugName();
1003
1004 __ Bind(GetEntryLabel());
1005 SaveLiveRegisters(codegen, locations);
1006
1007 InvokeRuntimeCallingConvention calling_convention;
1008 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
1009 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
1010 root_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001011 DataType::Type::kReference);
Alexey Frunze15958152017-02-09 19:08:30 -08001012 mips64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
1013 instruction_,
1014 instruction_->GetDexPc(),
1015 this);
1016 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1017 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1018
1019 RestoreLiveRegisters(codegen, locations);
1020 __ Bc(GetExitLabel());
1021 }
1022
1023 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS64"; }
1024
1025 private:
1026 const Location out_;
1027 const Location root_;
1028
1029 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS64);
1030};
1031
Alexey Frunze4dda3372015-06-01 18:31:49 -07001032CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
1033 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001034 const CompilerOptions& compiler_options,
1035 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -07001036 : CodeGenerator(graph,
1037 kNumberOfGpuRegisters,
1038 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001039 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001040 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1041 arraysize(kCoreCalleeSaves)),
1042 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1043 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001044 compiler_options,
1045 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001046 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001047 location_builder_(graph, this),
1048 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001049 move_resolver_(graph->GetAllocator(), this),
1050 assembler_(graph->GetAllocator(), &isa_features),
Alexey Frunze19f6c692016-11-30 19:19:55 -08001051 isa_features_(isa_features),
Alexey Frunzef63f5692016-12-13 17:43:11 -08001052 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001053 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -08001054 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001055 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1056 pc_relative_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1057 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1058 pc_relative_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1059 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1060 pc_relative_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1061 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001062 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001063 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001064 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001065 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001066 // Save RA (containing the return address) to mimic Quick.
1067 AddAllocatedRegister(Location::RegisterLocation(RA));
1068}
1069
1070#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001071// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1072#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001073#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -07001074
1075void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001076 // Ensure that we fix up branches.
1077 __ FinalizeCode();
1078
1079 // Adjust native pc offsets in stack maps.
Vladimir Marko174b2e22017-10-12 13:34:49 +01001080 StackMapStream* stack_map_stream = GetStackMapStream();
1081 for (size_t i = 0, num = stack_map_stream->GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001082 uint32_t old_position =
Vladimir Marko33bff252017-11-01 14:35:42 +00001083 stack_map_stream->GetStackMap(i).native_pc_code_offset.Uint32Value(InstructionSet::kMips64);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001084 uint32_t new_position = __ GetAdjustedPosition(old_position);
1085 DCHECK_GE(new_position, old_position);
Vladimir Marko174b2e22017-10-12 13:34:49 +01001086 stack_map_stream->SetStackMapNativePcOffset(i, new_position);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001087 }
1088
1089 // Adjust pc offsets for the disassembly information.
1090 if (disasm_info_ != nullptr) {
1091 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1092 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1093 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1094 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1095 it.second.start = __ GetAdjustedPosition(it.second.start);
1096 it.second.end = __ GetAdjustedPosition(it.second.end);
1097 }
1098 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1099 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1100 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1101 }
1102 }
1103
Alexey Frunze4dda3372015-06-01 18:31:49 -07001104 CodeGenerator::Finalize(allocator);
1105}
1106
1107Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
1108 return codegen_->GetAssembler();
1109}
1110
1111void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001112 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001113 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1114}
1115
1116void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001117 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001118 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
1119}
1120
1121void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
1122 // Pop reg
1123 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +02001124 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001125}
1126
1127void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
1128 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +02001129 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001130 __ Sd(GpuRegister(reg), SP, 0);
1131}
1132
1133void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
1134 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
1135 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
1136 // Allocate a scratch register other than TMP, if available.
1137 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1138 // automatically unspilled when the scratch scope object is destroyed).
1139 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1140 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +02001141 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001142 __ LoadFromOffset(load_type,
1143 GpuRegister(ensure_scratch.GetRegister()),
1144 SP,
1145 index1 + stack_offset);
1146 __ LoadFromOffset(load_type,
1147 TMP,
1148 SP,
1149 index2 + stack_offset);
1150 __ StoreToOffset(store_type,
1151 GpuRegister(ensure_scratch.GetRegister()),
1152 SP,
1153 index2 + stack_offset);
1154 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
1155}
1156
1157static dwarf::Reg DWARFReg(GpuRegister reg) {
1158 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1159}
1160
David Srbeckyba702002016-02-01 18:15:29 +00001161static dwarf::Reg DWARFReg(FpuRegister reg) {
1162 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
1163}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001164
1165void CodeGeneratorMIPS64::GenerateFrameEntry() {
1166 __ Bind(&frame_entry_label_);
1167
Vladimir Marko33bff252017-11-01 14:35:42 +00001168 bool do_overflow_check =
1169 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kMips64) || !IsLeafMethod();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001170
1171 if (do_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001172 __ LoadFromOffset(
1173 kLoadWord,
1174 ZERO,
1175 SP,
1176 -static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kMips64)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001177 RecordPcInfo(nullptr, 0);
1178 }
1179
Alexey Frunze4dda3372015-06-01 18:31:49 -07001180 if (HasEmptyFrame()) {
1181 return;
1182 }
1183
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001184 // Make sure the frame size isn't unreasonably large.
Vladimir Marko33bff252017-11-01 14:35:42 +00001185 if (GetFrameSize() > GetStackOverflowReservedBytes(InstructionSet::kMips64)) {
1186 LOG(FATAL) << "Stack frame larger than "
1187 << GetStackOverflowReservedBytes(InstructionSet::kMips64) << " bytes";
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001188 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001189
1190 // Spill callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001191
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001192 uint32_t ofs = GetFrameSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001193 __ IncreaseFrameSize(ofs);
1194
1195 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1196 GpuRegister reg = kCoreCalleeSaves[i];
1197 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001198 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001199 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001200 __ cfi().RelOffset(DWARFReg(reg), ofs);
1201 }
1202 }
1203
1204 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1205 FpuRegister reg = kFpuCalleeSaves[i];
1206 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001207 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001208 __ StoreFpuToOffset(kStoreDoubleword, reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +00001209 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001210 }
1211 }
1212
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001213 // Save the current method if we need it. Note that we do not
1214 // do this in HCurrentMethod, as the instruction might have been removed
1215 // in the SSA graph.
1216 if (RequiresCurrentMethod()) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001217 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001218 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001219
1220 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1221 // Initialize should_deoptimize flag to 0.
1222 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1223 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001224}
1225
1226void CodeGeneratorMIPS64::GenerateFrameExit() {
1227 __ cfi().RememberState();
1228
Alexey Frunze4dda3372015-06-01 18:31:49 -07001229 if (!HasEmptyFrame()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001230 // Restore callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001231
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001232 // For better instruction scheduling restore RA before other registers.
1233 uint32_t ofs = GetFrameSize();
1234 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001235 GpuRegister reg = kCoreCalleeSaves[i];
1236 if (allocated_registers_.ContainsCoreRegister(reg)) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001237 ofs -= kMips64DoublewordSize;
1238 __ LoadFromOffset(kLoadDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001239 __ cfi().Restore(DWARFReg(reg));
1240 }
1241 }
1242
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001243 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1244 FpuRegister reg = kFpuCalleeSaves[i];
1245 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
1246 ofs -= kMips64DoublewordSize;
1247 __ LoadFpuFromOffset(kLoadDoubleword, reg, SP, ofs);
1248 __ cfi().Restore(DWARFReg(reg));
1249 }
1250 }
1251
1252 __ DecreaseFrameSize(GetFrameSize());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001253 }
1254
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001255 __ Jic(RA, 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001256
1257 __ cfi().RestoreState();
1258 __ cfi().DefCFAOffset(GetFrameSize());
1259}
1260
1261void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
1262 __ Bind(GetLabelOf(block));
1263}
1264
1265void CodeGeneratorMIPS64::MoveLocation(Location destination,
1266 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001267 DataType::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001268 if (source.Equals(destination)) {
1269 return;
1270 }
1271
1272 // A valid move can always be inferred from the destination and source
1273 // locations. When moving from and to a register, the argument type can be
1274 // used to generate 32bit instead of 64bit moves.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001275 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001276 DCHECK_EQ(unspecified_type, false);
1277
1278 if (destination.IsRegister() || destination.IsFpuRegister()) {
1279 if (unspecified_type) {
1280 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1281 if (source.IsStackSlot() ||
1282 (src_cst != nullptr && (src_cst->IsIntConstant()
1283 || src_cst->IsFloatConstant()
1284 || src_cst->IsNullConstant()))) {
1285 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001286 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001287 } else {
1288 // If the source is a double stack slot or a 64bit constant, a 64bit
1289 // type is appropriate. Else the source is a register, and since the
1290 // type has not been specified, we chose a 64bit type to force a 64bit
1291 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001292 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001293 }
1294 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001295 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1296 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001297 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1298 // Move to GPR/FPR from stack
1299 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001300 if (DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001301 __ LoadFpuFromOffset(load_type,
1302 destination.AsFpuRegister<FpuRegister>(),
1303 SP,
1304 source.GetStackIndex());
1305 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001306 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001307 __ LoadFromOffset(load_type,
1308 destination.AsRegister<GpuRegister>(),
1309 SP,
1310 source.GetStackIndex());
1311 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001312 } else if (source.IsSIMDStackSlot()) {
1313 __ LoadFpuFromOffset(kLoadQuadword,
1314 destination.AsFpuRegister<FpuRegister>(),
1315 SP,
1316 source.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001317 } else if (source.IsConstant()) {
1318 // Move to GPR/FPR from constant
1319 GpuRegister gpr = AT;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001320 if (!DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001321 gpr = destination.AsRegister<GpuRegister>();
1322 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001323 if (dst_type == DataType::Type::kInt32 || dst_type == DataType::Type::kFloat32) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001324 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001325 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001326 gpr = ZERO;
1327 } else {
1328 __ LoadConst32(gpr, value);
1329 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001330 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001331 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001332 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001333 gpr = ZERO;
1334 } else {
1335 __ LoadConst64(gpr, value);
1336 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001337 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001338 if (dst_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001339 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001340 } else if (dst_type == DataType::Type::kFloat64) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001341 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
1342 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001343 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001344 if (destination.IsRegister()) {
1345 // Move to GPR from GPR
1346 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
1347 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001348 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001349 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001350 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1351 } else {
1352 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1353 }
1354 }
1355 } else if (source.IsFpuRegister()) {
1356 if (destination.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001357 if (GetGraph()->HasSIMD()) {
1358 __ MoveV(VectorRegisterFrom(destination),
1359 VectorRegisterFrom(source));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001360 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001361 // Move to FPR from FPR
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001362 if (dst_type == DataType::Type::kFloat32) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001363 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1364 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001365 DCHECK_EQ(dst_type, DataType::Type::kFloat64);
Lena Djokicca8c2952017-05-29 11:31:46 +02001366 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1367 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001368 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001369 } else {
1370 DCHECK(destination.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001371 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001372 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1373 } else {
1374 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1375 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001376 }
1377 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001378 } else if (destination.IsSIMDStackSlot()) {
1379 if (source.IsFpuRegister()) {
1380 __ StoreFpuToOffset(kStoreQuadword,
1381 source.AsFpuRegister<FpuRegister>(),
1382 SP,
1383 destination.GetStackIndex());
1384 } else {
1385 DCHECK(source.IsSIMDStackSlot());
1386 __ LoadFpuFromOffset(kLoadQuadword,
1387 FTMP,
1388 SP,
1389 source.GetStackIndex());
1390 __ StoreFpuToOffset(kStoreQuadword,
1391 FTMP,
1392 SP,
1393 destination.GetStackIndex());
1394 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001395 } else { // The destination is not a register. It must be a stack slot.
1396 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1397 if (source.IsRegister() || source.IsFpuRegister()) {
1398 if (unspecified_type) {
1399 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001400 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001401 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001402 dst_type =
1403 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001404 }
1405 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001406 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1407 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001408 // Move to stack from GPR/FPR
1409 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1410 if (source.IsRegister()) {
1411 __ StoreToOffset(store_type,
1412 source.AsRegister<GpuRegister>(),
1413 SP,
1414 destination.GetStackIndex());
1415 } else {
1416 __ StoreFpuToOffset(store_type,
1417 source.AsFpuRegister<FpuRegister>(),
1418 SP,
1419 destination.GetStackIndex());
1420 }
1421 } else if (source.IsConstant()) {
1422 // Move to stack from constant
1423 HConstant* src_cst = source.GetConstant();
1424 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001425 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001426 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001427 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
1428 if (value != 0) {
1429 gpr = TMP;
1430 __ LoadConst32(gpr, value);
1431 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001432 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001433 DCHECK(destination.IsDoubleStackSlot());
1434 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
1435 if (value != 0) {
1436 gpr = TMP;
1437 __ LoadConst64(gpr, value);
1438 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001439 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001440 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001441 } else {
1442 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
1443 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
1444 // Move to stack from stack
1445 if (destination.IsStackSlot()) {
1446 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1447 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1448 } else {
1449 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
1450 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
1451 }
1452 }
1453 }
1454}
1455
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001456void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001457 DCHECK(!loc1.IsConstant());
1458 DCHECK(!loc2.IsConstant());
1459
1460 if (loc1.Equals(loc2)) {
1461 return;
1462 }
1463
1464 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
1465 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
1466 bool is_fp_reg1 = loc1.IsFpuRegister();
1467 bool is_fp_reg2 = loc2.IsFpuRegister();
1468
1469 if (loc2.IsRegister() && loc1.IsRegister()) {
1470 // Swap 2 GPRs
1471 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
1472 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
1473 __ Move(TMP, r2);
1474 __ Move(r2, r1);
1475 __ Move(r1, TMP);
1476 } else if (is_fp_reg2 && is_fp_reg1) {
1477 // Swap 2 FPRs
1478 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
1479 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001480 if (type == DataType::Type::kFloat32) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001481 __ MovS(FTMP, r1);
1482 __ MovS(r1, r2);
1483 __ MovS(r2, FTMP);
1484 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001485 DCHECK_EQ(type, DataType::Type::kFloat64);
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001486 __ MovD(FTMP, r1);
1487 __ MovD(r1, r2);
1488 __ MovD(r2, FTMP);
1489 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001490 } else if (is_slot1 != is_slot2) {
1491 // Swap GPR/FPR and stack slot
1492 Location reg_loc = is_slot1 ? loc2 : loc1;
1493 Location mem_loc = is_slot1 ? loc1 : loc2;
1494 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
1495 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001496 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001497 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
1498 if (reg_loc.IsFpuRegister()) {
1499 __ StoreFpuToOffset(store_type,
1500 reg_loc.AsFpuRegister<FpuRegister>(),
1501 SP,
1502 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001503 if (mem_loc.IsStackSlot()) {
1504 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1505 } else {
1506 DCHECK(mem_loc.IsDoubleStackSlot());
1507 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1508 }
1509 } else {
1510 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
1511 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
1512 }
1513 } else if (is_slot1 && is_slot2) {
1514 move_resolver_.Exchange(loc1.GetStackIndex(),
1515 loc2.GetStackIndex(),
1516 loc1.IsDoubleStackSlot());
1517 } else {
1518 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
1519 }
1520}
1521
Calin Juravle175dc732015-08-25 15:42:32 +01001522void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
1523 DCHECK(location.IsRegister());
1524 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
1525}
1526
Calin Juravlee460d1d2015-09-29 04:52:17 +01001527void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1528 if (location.IsRegister()) {
1529 locations->AddTemp(location);
1530 } else {
1531 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1532 }
1533}
1534
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001535void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
1536 GpuRegister value,
1537 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001538 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001539 GpuRegister card = AT;
1540 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001541 if (value_can_be_null) {
1542 __ Beqzc(value, &done);
1543 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001544 __ LoadFromOffset(kLoadDoubleword,
1545 card,
1546 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001547 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001548 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
1549 __ Daddu(temp, card, temp);
1550 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001551 if (value_can_be_null) {
1552 __ Bind(&done);
1553 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001554}
1555
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001556template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Alexey Frunze19f6c692016-11-30 19:19:55 -08001557inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
1558 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001559 ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001560 for (const PcRelativePatchInfo& info : infos) {
1561 const DexFile& dex_file = info.target_dex_file;
1562 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001563 DCHECK(info.label.IsBound());
1564 uint32_t literal_offset = __ GetLabelLocation(&info.label);
1565 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1566 uint32_t pc_rel_offset = __ GetLabelLocation(&info_high.label);
1567 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Alexey Frunze19f6c692016-11-30 19:19:55 -08001568 }
1569}
1570
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001571void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001572 DCHECK(linker_patches->empty());
1573 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001574 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001575 method_bss_entry_patches_.size() +
Alexey Frunzef63f5692016-12-13 17:43:11 -08001576 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001577 type_bss_entry_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001578 pc_relative_string_patches_.size() +
1579 string_bss_entry_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -08001580 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001581 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001582 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1583 pc_relative_method_patches_, linker_patches);
1584 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1585 pc_relative_type_patches_, linker_patches);
1586 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
1587 pc_relative_string_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001588 } else {
1589 DCHECK(pc_relative_method_patches_.empty());
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001590 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>(
1591 pc_relative_type_patches_, linker_patches);
1592 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>(
1593 pc_relative_string_patches_, linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001594 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001595 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1596 method_bss_entry_patches_, linker_patches);
1597 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1598 type_bss_entry_patches_, linker_patches);
1599 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1600 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001601 DCHECK_EQ(size, linker_patches->size());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001602}
1603
Vladimir Marko65979462017-05-19 17:25:12 +01001604CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001605 MethodReference target_method,
1606 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001607 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001608 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001609 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001610 &pc_relative_method_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001611}
1612
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001613CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001614 MethodReference target_method,
1615 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001616 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001617 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001618 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001619 &method_bss_entry_patches_);
1620}
1621
Alexey Frunzef63f5692016-12-13 17:43:11 -08001622CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001623 const DexFile& dex_file,
1624 dex::TypeIndex type_index,
1625 const PcRelativePatchInfo* info_high) {
1626 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001627}
1628
Vladimir Marko1998cd02017-01-13 13:02:58 +00001629CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001630 const DexFile& dex_file,
1631 dex::TypeIndex type_index,
1632 const PcRelativePatchInfo* info_high) {
1633 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001634}
1635
Vladimir Marko65979462017-05-19 17:25:12 +01001636CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001637 const DexFile& dex_file,
1638 dex::StringIndex string_index,
1639 const PcRelativePatchInfo* info_high) {
1640 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001641}
1642
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001643CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewStringBssEntryPatch(
1644 const DexFile& dex_file,
1645 dex::StringIndex string_index,
1646 const PcRelativePatchInfo* info_high) {
1647 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
1648}
1649
Alexey Frunze19f6c692016-11-30 19:19:55 -08001650CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001651 const DexFile& dex_file,
1652 uint32_t offset_or_index,
1653 const PcRelativePatchInfo* info_high,
1654 ArenaDeque<PcRelativePatchInfo>* patches) {
1655 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001656 return &patches->back();
1657}
1658
Alexey Frunzef63f5692016-12-13 17:43:11 -08001659Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1660 return map->GetOrCreate(
1661 value,
1662 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1663}
1664
Alexey Frunze19f6c692016-11-30 19:19:55 -08001665Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1666 return uint64_literals_.GetOrCreate(
1667 value,
1668 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1669}
1670
Alexey Frunzef63f5692016-12-13 17:43:11 -08001671Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001672 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001673}
1674
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001675void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
1676 GpuRegister out,
1677 PcRelativePatchInfo* info_low) {
1678 DCHECK(!info_high->patch_info_high);
1679 __ Bind(&info_high->label);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001680 // Add the high half of a 32-bit offset to PC.
1681 __ Auipc(out, /* placeholder */ 0x1234);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001682 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001683 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001684 if (info_low != nullptr) {
1685 DCHECK_EQ(info_low->patch_info_high, info_high);
1686 __ Bind(&info_low->label);
1687 }
Alexey Frunze19f6c692016-11-30 19:19:55 -08001688}
1689
Alexey Frunze627c1a02017-01-30 19:28:14 -08001690Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file,
1691 dex::StringIndex string_index,
1692 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001693 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001694 return jit_string_patches_.GetOrCreate(
1695 StringReference(&dex_file, string_index),
1696 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1697}
1698
1699Literal* CodeGeneratorMIPS64::DeduplicateJitClassLiteral(const DexFile& dex_file,
1700 dex::TypeIndex type_index,
1701 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001702 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001703 return jit_class_patches_.GetOrCreate(
1704 TypeReference(&dex_file, type_index),
1705 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1706}
1707
1708void CodeGeneratorMIPS64::PatchJitRootUse(uint8_t* code,
1709 const uint8_t* roots_data,
1710 const Literal* literal,
1711 uint64_t index_in_table) const {
1712 uint32_t literal_offset = GetAssembler().GetLabelLocation(literal->GetLabel());
1713 uintptr_t address =
1714 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1715 reinterpret_cast<uint32_t*>(code + literal_offset)[0] = dchecked_integral_cast<uint32_t>(address);
1716}
1717
1718void CodeGeneratorMIPS64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1719 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001720 const StringReference& string_reference = entry.first;
1721 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001722 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001723 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001724 }
1725 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001726 const TypeReference& type_reference = entry.first;
1727 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001728 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001729 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001730 }
1731}
1732
David Brazdil58282f42016-01-14 12:45:10 +00001733void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001734 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1735 blocked_core_registers_[ZERO] = true;
1736 blocked_core_registers_[K0] = true;
1737 blocked_core_registers_[K1] = true;
1738 blocked_core_registers_[GP] = true;
1739 blocked_core_registers_[SP] = true;
1740 blocked_core_registers_[RA] = true;
1741
Lazar Trsicd9672662015-09-03 17:33:01 +02001742 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1743 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001744 blocked_core_registers_[AT] = true;
1745 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001746 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001747 blocked_fpu_registers_[FTMP] = true;
1748
1749 // Reserve suspend and thread registers.
1750 blocked_core_registers_[S0] = true;
1751 blocked_core_registers_[TR] = true;
1752
1753 // Reserve T9 for function calls
1754 blocked_core_registers_[T9] = true;
1755
Goran Jakovljevic782be112016-06-21 12:39:04 +02001756 if (GetGraph()->IsDebuggable()) {
1757 // Stubs do not save callee-save floating point registers. If the graph
1758 // is debuggable, we need to deal with these registers differently. For
1759 // now, just block them.
1760 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1761 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1762 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001763 }
1764}
1765
Alexey Frunze4dda3372015-06-01 18:31:49 -07001766size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1767 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001768 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001769}
1770
1771size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1772 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001773 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001774}
1775
1776size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001777 __ StoreFpuToOffset(GetGraph()->HasSIMD() ? kStoreQuadword : kStoreDoubleword,
1778 FpuRegister(reg_id),
1779 SP,
1780 stack_index);
1781 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001782}
1783
1784size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001785 __ LoadFpuFromOffset(GetGraph()->HasSIMD() ? kLoadQuadword : kLoadDoubleword,
1786 FpuRegister(reg_id),
1787 SP,
1788 stack_index);
1789 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001790}
1791
1792void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001793 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001794}
1795
1796void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001797 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001798}
1799
Calin Juravle175dc732015-08-25 15:42:32 +01001800void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001801 HInstruction* instruction,
1802 uint32_t dex_pc,
1803 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001804 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001805 GenerateInvokeRuntime(GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Serban Constantinescufc734082016-07-19 17:18:07 +01001806 if (EntrypointRequiresStackMap(entrypoint)) {
1807 RecordPcInfo(instruction, dex_pc, slow_path);
1808 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001809}
1810
Alexey Frunze15958152017-02-09 19:08:30 -08001811void CodeGeneratorMIPS64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1812 HInstruction* instruction,
1813 SlowPathCode* slow_path) {
1814 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1815 GenerateInvokeRuntime(entry_point_offset);
1816}
1817
1818void CodeGeneratorMIPS64::GenerateInvokeRuntime(int32_t entry_point_offset) {
1819 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
1820 __ Jalr(T9);
1821 __ Nop();
1822}
1823
Alexey Frunze4dda3372015-06-01 18:31:49 -07001824void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1825 GpuRegister class_reg) {
Igor Murashkin86083f72017-10-27 10:59:04 -07001826 __ LoadFromOffset(kLoadSignedByte, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001827 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1828 __ Bltc(TMP, AT, slow_path->GetEntryLabel());
Alexey Frunze15958152017-02-09 19:08:30 -08001829 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1830 __ Sync(0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001831 __ Bind(slow_path->GetExitLabel());
1832}
1833
1834void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1835 __ Sync(0); // only stype 0 is supported
1836}
1837
1838void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1839 HBasicBlock* successor) {
1840 SuspendCheckSlowPathMIPS64* slow_path =
Chris Larsena2045912017-11-02 12:39:54 -07001841 down_cast<SuspendCheckSlowPathMIPS64*>(instruction->GetSlowPath());
1842
1843 if (slow_path == nullptr) {
1844 slow_path =
1845 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathMIPS64(instruction, successor);
1846 instruction->SetSlowPath(slow_path);
1847 codegen_->AddSlowPath(slow_path);
1848 if (successor != nullptr) {
1849 DCHECK(successor->IsLoopHeader());
1850 }
1851 } else {
1852 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1853 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001854
1855 __ LoadFromOffset(kLoadUnsignedHalfword,
1856 TMP,
1857 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001858 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001859 if (successor == nullptr) {
1860 __ Bnezc(TMP, slow_path->GetEntryLabel());
1861 __ Bind(slow_path->GetReturnLabel());
1862 } else {
1863 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001864 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001865 // slow_path will return to GetLabelOf(successor).
1866 }
1867}
1868
1869InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1870 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001871 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001872 assembler_(codegen->GetAssembler()),
1873 codegen_(codegen) {}
1874
1875void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1876 DCHECK_EQ(instruction->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001877 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001878 DataType::Type type = instruction->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001879 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001880 case DataType::Type::kInt32:
1881 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001882 locations->SetInAt(0, Location::RequiresRegister());
1883 HInstruction* right = instruction->InputAt(1);
1884 bool can_use_imm = false;
1885 if (right->IsConstant()) {
1886 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1887 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1888 can_use_imm = IsUint<16>(imm);
1889 } else if (instruction->IsAdd()) {
1890 can_use_imm = IsInt<16>(imm);
1891 } else {
1892 DCHECK(instruction->IsSub());
1893 can_use_imm = IsInt<16>(-imm);
1894 }
1895 }
1896 if (can_use_imm)
1897 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1898 else
1899 locations->SetInAt(1, Location::RequiresRegister());
1900 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1901 }
1902 break;
1903
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001904 case DataType::Type::kFloat32:
1905 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001906 locations->SetInAt(0, Location::RequiresFpuRegister());
1907 locations->SetInAt(1, Location::RequiresFpuRegister());
1908 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1909 break;
1910
1911 default:
1912 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1913 }
1914}
1915
1916void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001917 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001918 LocationSummary* locations = instruction->GetLocations();
1919
1920 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001921 case DataType::Type::kInt32:
1922 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001923 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1924 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1925 Location rhs_location = locations->InAt(1);
1926
1927 GpuRegister rhs_reg = ZERO;
1928 int64_t rhs_imm = 0;
1929 bool use_imm = rhs_location.IsConstant();
1930 if (use_imm) {
1931 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1932 } else {
1933 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1934 }
1935
1936 if (instruction->IsAnd()) {
1937 if (use_imm)
1938 __ Andi(dst, lhs, rhs_imm);
1939 else
1940 __ And(dst, lhs, rhs_reg);
1941 } else if (instruction->IsOr()) {
1942 if (use_imm)
1943 __ Ori(dst, lhs, rhs_imm);
1944 else
1945 __ Or(dst, lhs, rhs_reg);
1946 } else if (instruction->IsXor()) {
1947 if (use_imm)
1948 __ Xori(dst, lhs, rhs_imm);
1949 else
1950 __ Xor(dst, lhs, rhs_reg);
1951 } else if (instruction->IsAdd()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001952 if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001953 if (use_imm)
1954 __ Addiu(dst, lhs, rhs_imm);
1955 else
1956 __ Addu(dst, lhs, rhs_reg);
1957 } else {
1958 if (use_imm)
1959 __ Daddiu(dst, lhs, rhs_imm);
1960 else
1961 __ Daddu(dst, lhs, rhs_reg);
1962 }
1963 } else {
1964 DCHECK(instruction->IsSub());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001965 if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001966 if (use_imm)
1967 __ Addiu(dst, lhs, -rhs_imm);
1968 else
1969 __ Subu(dst, lhs, rhs_reg);
1970 } else {
1971 if (use_imm)
1972 __ Daddiu(dst, lhs, -rhs_imm);
1973 else
1974 __ Dsubu(dst, lhs, rhs_reg);
1975 }
1976 }
1977 break;
1978 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001979 case DataType::Type::kFloat32:
1980 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001981 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
1982 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1983 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1984 if (instruction->IsAdd()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001985 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07001986 __ AddS(dst, lhs, rhs);
1987 else
1988 __ AddD(dst, lhs, rhs);
1989 } else if (instruction->IsSub()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001990 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07001991 __ SubS(dst, lhs, rhs);
1992 else
1993 __ SubD(dst, lhs, rhs);
1994 } else {
1995 LOG(FATAL) << "Unexpected floating-point binary operation";
1996 }
1997 break;
1998 }
1999 default:
2000 LOG(FATAL) << "Unexpected binary operation type " << type;
2001 }
2002}
2003
2004void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002005 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002006
Vladimir Markoca6fff82017-10-03 14:49:14 +01002007 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002008 DataType::Type type = instr->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002009 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002010 case DataType::Type::kInt32:
2011 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002012 locations->SetInAt(0, Location::RequiresRegister());
2013 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002014 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002015 break;
2016 }
2017 default:
2018 LOG(FATAL) << "Unexpected shift type " << type;
2019 }
2020}
2021
2022void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002023 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002024 LocationSummary* locations = instr->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002025 DataType::Type type = instr->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002026
2027 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002028 case DataType::Type::kInt32:
2029 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002030 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2031 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2032 Location rhs_location = locations->InAt(1);
2033
2034 GpuRegister rhs_reg = ZERO;
2035 int64_t rhs_imm = 0;
2036 bool use_imm = rhs_location.IsConstant();
2037 if (use_imm) {
2038 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2039 } else {
2040 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2041 }
2042
2043 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00002044 uint32_t shift_value = rhs_imm &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002045 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002046
Alexey Frunze92d90602015-12-18 18:16:36 -08002047 if (shift_value == 0) {
2048 if (dst != lhs) {
2049 __ Move(dst, lhs);
2050 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002051 } else if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002052 if (instr->IsShl()) {
2053 __ Sll(dst, lhs, shift_value);
2054 } else if (instr->IsShr()) {
2055 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002056 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002057 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002058 } else {
2059 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002060 }
2061 } else {
2062 if (shift_value < 32) {
2063 if (instr->IsShl()) {
2064 __ Dsll(dst, lhs, shift_value);
2065 } else if (instr->IsShr()) {
2066 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002067 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002068 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002069 } else {
2070 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002071 }
2072 } else {
2073 shift_value -= 32;
2074 if (instr->IsShl()) {
2075 __ Dsll32(dst, lhs, shift_value);
2076 } else if (instr->IsShr()) {
2077 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002078 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002079 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002080 } else {
2081 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002082 }
2083 }
2084 }
2085 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002086 if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002087 if (instr->IsShl()) {
2088 __ Sllv(dst, lhs, rhs_reg);
2089 } else if (instr->IsShr()) {
2090 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002091 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002092 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002093 } else {
2094 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002095 }
2096 } else {
2097 if (instr->IsShl()) {
2098 __ Dsllv(dst, lhs, rhs_reg);
2099 } else if (instr->IsShr()) {
2100 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002101 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002102 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002103 } else {
2104 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002105 }
2106 }
2107 }
2108 break;
2109 }
2110 default:
2111 LOG(FATAL) << "Unexpected shift operation type " << type;
2112 }
2113}
2114
2115void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
2116 HandleBinaryOp(instruction);
2117}
2118
2119void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
2120 HandleBinaryOp(instruction);
2121}
2122
2123void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
2124 HandleBinaryOp(instruction);
2125}
2126
2127void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
2128 HandleBinaryOp(instruction);
2129}
2130
2131void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002132 DataType::Type type = instruction->GetType();
Alexey Frunze15958152017-02-09 19:08:30 -08002133 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002134 kEmitCompilerReadBarrier && (type == DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002135 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002136 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2137 object_array_get_with_read_barrier
2138 ? LocationSummary::kCallOnSlowPath
2139 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002140 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2141 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2142 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002143 locations->SetInAt(0, Location::RequiresRegister());
2144 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002145 if (DataType::IsFloatingPointType(type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002146 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2147 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002148 // The output overlaps in the case of an object array get with
2149 // read barriers enabled: we do not want the move to overwrite the
2150 // array's location, as we need it to emit the read barrier.
2151 locations->SetOut(Location::RequiresRegister(),
2152 object_array_get_with_read_barrier
2153 ? Location::kOutputOverlap
2154 : Location::kNoOutputOverlap);
2155 }
2156 // We need a temporary register for the read barrier marking slow
2157 // path in CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier.
2158 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002159 bool temp_needed = instruction->GetIndex()->IsConstant()
2160 ? !kBakerReadBarrierThunksEnableForFields
2161 : !kBakerReadBarrierThunksEnableForArrays;
2162 if (temp_needed) {
2163 locations->AddTemp(Location::RequiresRegister());
2164 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002165 }
2166}
2167
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002168static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS64* codegen) {
2169 auto null_checker = [codegen, instruction]() {
2170 codegen->MaybeRecordImplicitNullCheck(instruction);
2171 };
2172 return null_checker;
2173}
2174
Alexey Frunze4dda3372015-06-01 18:31:49 -07002175void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
2176 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002177 Location obj_loc = locations->InAt(0);
2178 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
2179 Location out_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002180 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002181 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002182 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002183
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002184 DataType::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002185 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2186 instruction->IsStringCharAt();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002187 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002188 case DataType::Type::kBool:
2189 case DataType::Type::kUint8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002190 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002191 if (index.IsConstant()) {
2192 size_t offset =
2193 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002194 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002195 } else {
2196 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002197 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002198 }
2199 break;
2200 }
2201
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002202 case DataType::Type::kInt8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002203 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002204 if (index.IsConstant()) {
2205 size_t offset =
2206 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002207 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002208 } else {
2209 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002210 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002211 }
2212 break;
2213 }
2214
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002215 case DataType::Type::kUint16: {
Alexey Frunze15958152017-02-09 19:08:30 -08002216 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002217 if (maybe_compressed_char_at) {
2218 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002219 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002220 __ Dext(TMP, TMP, 0, 1);
2221 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2222 "Expecting 0=compressed, 1=uncompressed");
2223 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002224 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002225 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2226 if (maybe_compressed_char_at) {
2227 Mips64Label uncompressed_load, done;
2228 __ Bnezc(TMP, &uncompressed_load);
2229 __ LoadFromOffset(kLoadUnsignedByte,
2230 out,
2231 obj,
2232 data_offset + (const_index << TIMES_1));
2233 __ Bc(&done);
2234 __ Bind(&uncompressed_load);
2235 __ LoadFromOffset(kLoadUnsignedHalfword,
2236 out,
2237 obj,
2238 data_offset + (const_index << TIMES_2));
2239 __ Bind(&done);
2240 } else {
2241 __ LoadFromOffset(kLoadUnsignedHalfword,
2242 out,
2243 obj,
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002244 data_offset + (const_index << TIMES_2),
2245 null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002246 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002247 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002248 GpuRegister index_reg = index.AsRegister<GpuRegister>();
2249 if (maybe_compressed_char_at) {
2250 Mips64Label uncompressed_load, done;
2251 __ Bnezc(TMP, &uncompressed_load);
2252 __ Daddu(TMP, obj, index_reg);
2253 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2254 __ Bc(&done);
2255 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002256 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002257 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2258 __ Bind(&done);
2259 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002260 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002261 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002262 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002263 }
2264 break;
2265 }
2266
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002267 case DataType::Type::kInt16: {
2268 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2269 if (index.IsConstant()) {
2270 size_t offset =
2271 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2272 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
2273 } else {
2274 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_2);
2275 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
2276 }
2277 break;
2278 }
2279
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002280 case DataType::Type::kInt32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002281 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002282 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002283 LoadOperandType load_type =
2284 (type == DataType::Type::kReference) ? kLoadUnsignedWord : kLoadWord;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002285 if (index.IsConstant()) {
2286 size_t offset =
2287 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002288 __ LoadFromOffset(load_type, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002289 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002290 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002291 __ LoadFromOffset(load_type, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002292 }
2293 break;
2294 }
2295
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002296 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002297 static_assert(
2298 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2299 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2300 // /* HeapReference<Object> */ out =
2301 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2302 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002303 bool temp_needed = index.IsConstant()
2304 ? !kBakerReadBarrierThunksEnableForFields
2305 : !kBakerReadBarrierThunksEnableForArrays;
2306 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002307 // Note that a potential implicit null check is handled in this
2308 // CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002309 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2310 if (index.IsConstant()) {
2311 // Array load with a constant index can be treated as a field load.
2312 size_t offset =
2313 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2314 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2315 out_loc,
2316 obj,
2317 offset,
2318 temp,
2319 /* needs_null_check */ false);
2320 } else {
2321 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2322 out_loc,
2323 obj,
2324 data_offset,
2325 index,
2326 temp,
2327 /* needs_null_check */ false);
2328 }
Alexey Frunze15958152017-02-09 19:08:30 -08002329 } else {
2330 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2331 if (index.IsConstant()) {
2332 size_t offset =
2333 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2334 __ LoadFromOffset(kLoadUnsignedWord, out, obj, offset, null_checker);
2335 // If read barriers are enabled, emit read barriers other than
2336 // Baker's using a slow path (and also unpoison the loaded
2337 // reference, if heap poisoning is enabled).
2338 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2339 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002340 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002341 __ LoadFromOffset(kLoadUnsignedWord, out, TMP, data_offset, null_checker);
2342 // If read barriers are enabled, emit read barriers other than
2343 // Baker's using a slow path (and also unpoison the loaded
2344 // reference, if heap poisoning is enabled).
2345 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2346 out_loc,
2347 out_loc,
2348 obj_loc,
2349 data_offset,
2350 index);
2351 }
2352 }
2353 break;
2354 }
2355
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002356 case DataType::Type::kInt64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002357 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002358 if (index.IsConstant()) {
2359 size_t offset =
2360 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002361 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002362 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002363 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002364 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002365 }
2366 break;
2367 }
2368
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002369 case DataType::Type::kFloat32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002370 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002371 if (index.IsConstant()) {
2372 size_t offset =
2373 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002374 __ LoadFpuFromOffset(kLoadWord, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002375 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002376 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002377 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002378 }
2379 break;
2380 }
2381
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002382 case DataType::Type::kFloat64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002383 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002384 if (index.IsConstant()) {
2385 size_t offset =
2386 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002387 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002388 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002389 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002390 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002391 }
2392 break;
2393 }
2394
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002395 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002396 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2397 UNREACHABLE();
2398 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002399}
2400
2401void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002402 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002403 locations->SetInAt(0, Location::RequiresRegister());
2404 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2405}
2406
2407void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
2408 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002409 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002410 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2411 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2412 __ LoadFromOffset(kLoadWord, out, obj, offset);
2413 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002414 // Mask out compression flag from String's array length.
2415 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2416 __ Srl(out, out, 1u);
2417 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002418}
2419
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002420Location LocationsBuilderMIPS64::RegisterOrZeroConstant(HInstruction* instruction) {
2421 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2422 ? Location::ConstantLocation(instruction->AsConstant())
2423 : Location::RequiresRegister();
2424}
2425
2426Location LocationsBuilderMIPS64::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2427 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2428 // We can store a non-zero float or double constant without first loading it into the FPU,
2429 // but we should only prefer this if the constant has a single use.
2430 if (instruction->IsConstant() &&
2431 (instruction->AsConstant()->IsZeroBitPattern() ||
2432 instruction->GetUses().HasExactlyOneElement())) {
2433 return Location::ConstantLocation(instruction->AsConstant());
2434 // Otherwise fall through and require an FPU register for the constant.
2435 }
2436 return Location::RequiresFpuRegister();
2437}
2438
Alexey Frunze4dda3372015-06-01 18:31:49 -07002439void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002440 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002441
2442 bool needs_write_barrier =
2443 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2444 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2445
Vladimir Markoca6fff82017-10-03 14:49:14 +01002446 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze4dda3372015-06-01 18:31:49 -07002447 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002448 may_need_runtime_call_for_type_check ?
2449 LocationSummary::kCallOnSlowPath :
2450 LocationSummary::kNoCall);
2451
2452 locations->SetInAt(0, Location::RequiresRegister());
2453 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002454 if (DataType::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
Alexey Frunze15958152017-02-09 19:08:30 -08002455 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002456 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002457 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2458 }
2459 if (needs_write_barrier) {
2460 // Temporary register for the write barrier.
2461 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002462 }
2463}
2464
2465void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
2466 LocationSummary* locations = instruction->GetLocations();
2467 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2468 Location index = locations->InAt(1);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002469 Location value_location = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002470 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002471 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002472 bool needs_write_barrier =
2473 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002474 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002475 GpuRegister base_reg = index.IsConstant() ? obj : TMP;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002476
2477 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002478 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002479 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002480 case DataType::Type::kInt8: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002481 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002482 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002483 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002484 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002485 __ Daddu(base_reg, obj, index.AsRegister<GpuRegister>());
2486 }
2487 if (value_location.IsConstant()) {
2488 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2489 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2490 } else {
2491 GpuRegister value = value_location.AsRegister<GpuRegister>();
2492 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002493 }
2494 break;
2495 }
2496
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002497 case DataType::Type::kUint16:
2498 case DataType::Type::kInt16: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002499 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002500 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002501 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002502 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002503 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002504 }
2505 if (value_location.IsConstant()) {
2506 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2507 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2508 } else {
2509 GpuRegister value = value_location.AsRegister<GpuRegister>();
2510 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002511 }
2512 break;
2513 }
2514
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002515 case DataType::Type::kInt32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002516 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2517 if (index.IsConstant()) {
2518 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2519 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002520 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002521 }
2522 if (value_location.IsConstant()) {
2523 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2524 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2525 } else {
2526 GpuRegister value = value_location.AsRegister<GpuRegister>();
2527 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2528 }
2529 break;
2530 }
2531
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002532 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002533 if (value_location.IsConstant()) {
2534 // Just setting null.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002535 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002536 if (index.IsConstant()) {
Alexey Frunzec061de12017-02-14 13:27:23 -08002537 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002538 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002539 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunzec061de12017-02-14 13:27:23 -08002540 }
Alexey Frunze15958152017-02-09 19:08:30 -08002541 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2542 DCHECK_EQ(value, 0);
2543 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2544 DCHECK(!needs_write_barrier);
2545 DCHECK(!may_need_runtime_call_for_type_check);
2546 break;
2547 }
2548
2549 DCHECK(needs_write_barrier);
2550 GpuRegister value = value_location.AsRegister<GpuRegister>();
2551 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
2552 GpuRegister temp2 = TMP; // Doesn't need to survive slow path.
2553 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2554 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2555 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2556 Mips64Label done;
2557 SlowPathCodeMIPS64* slow_path = nullptr;
2558
2559 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002560 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathMIPS64(instruction);
Alexey Frunze15958152017-02-09 19:08:30 -08002561 codegen_->AddSlowPath(slow_path);
2562 if (instruction->GetValueCanBeNull()) {
2563 Mips64Label non_zero;
2564 __ Bnezc(value, &non_zero);
2565 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2566 if (index.IsConstant()) {
2567 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002568 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002569 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002570 }
Alexey Frunze15958152017-02-09 19:08:30 -08002571 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2572 __ Bc(&done);
2573 __ Bind(&non_zero);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002574 }
Alexey Frunze15958152017-02-09 19:08:30 -08002575
2576 // Note that when read barriers are enabled, the type checks
2577 // are performed without read barriers. This is fine, even in
2578 // the case where a class object is in the from-space after
2579 // the flip, as a comparison involving such a type would not
2580 // produce a false positive; it may of course produce a false
2581 // negative, in which case we would take the ArraySet slow
2582 // path.
2583
2584 // /* HeapReference<Class> */ temp1 = obj->klass_
2585 __ LoadFromOffset(kLoadUnsignedWord, temp1, obj, class_offset, null_checker);
2586 __ MaybeUnpoisonHeapReference(temp1);
2587
2588 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2589 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, component_offset);
2590 // /* HeapReference<Class> */ temp2 = value->klass_
2591 __ LoadFromOffset(kLoadUnsignedWord, temp2, value, class_offset);
2592 // If heap poisoning is enabled, no need to unpoison `temp1`
2593 // nor `temp2`, as we are comparing two poisoned references.
2594
2595 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2596 Mips64Label do_put;
2597 __ Beqc(temp1, temp2, &do_put);
2598 // If heap poisoning is enabled, the `temp1` reference has
2599 // not been unpoisoned yet; unpoison it now.
2600 __ MaybeUnpoisonHeapReference(temp1);
2601
2602 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2603 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, super_offset);
2604 // If heap poisoning is enabled, no need to unpoison
2605 // `temp1`, as we are comparing against null below.
2606 __ Bnezc(temp1, slow_path->GetEntryLabel());
2607 __ Bind(&do_put);
2608 } else {
2609 __ Bnec(temp1, temp2, slow_path->GetEntryLabel());
2610 }
2611 }
2612
2613 GpuRegister source = value;
2614 if (kPoisonHeapReferences) {
2615 // Note that in the case where `value` is a null reference,
2616 // we do not enter this block, as a null reference does not
2617 // need poisoning.
2618 __ Move(temp1, value);
2619 __ PoisonHeapReference(temp1);
2620 source = temp1;
2621 }
2622
2623 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2624 if (index.IsConstant()) {
2625 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002626 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002627 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002628 }
2629 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2630
2631 if (!may_need_runtime_call_for_type_check) {
2632 codegen_->MaybeRecordImplicitNullCheck(instruction);
2633 }
2634
2635 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2636
2637 if (done.IsLinked()) {
2638 __ Bind(&done);
2639 }
2640
2641 if (slow_path != nullptr) {
2642 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002643 }
2644 break;
2645 }
2646
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002647 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002648 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002649 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002650 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002651 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002652 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002653 }
2654 if (value_location.IsConstant()) {
2655 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2656 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2657 } else {
2658 GpuRegister value = value_location.AsRegister<GpuRegister>();
2659 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002660 }
2661 break;
2662 }
2663
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002664 case DataType::Type::kFloat32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002665 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002666 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002667 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002668 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002669 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002670 }
2671 if (value_location.IsConstant()) {
2672 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2673 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2674 } else {
2675 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2676 __ StoreFpuToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002677 }
2678 break;
2679 }
2680
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002681 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002682 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002683 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002684 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002685 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002686 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002687 }
2688 if (value_location.IsConstant()) {
2689 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2690 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2691 } else {
2692 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2693 __ StoreFpuToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002694 }
2695 break;
2696 }
2697
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002698 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002699 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2700 UNREACHABLE();
2701 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002702}
2703
2704void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002705 RegisterSet caller_saves = RegisterSet::Empty();
2706 InvokeRuntimeCallingConvention calling_convention;
2707 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2708 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2709 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002710 locations->SetInAt(0, Location::RequiresRegister());
2711 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002712}
2713
2714void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
2715 LocationSummary* locations = instruction->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002716 BoundsCheckSlowPathMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002717 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002718 codegen_->AddSlowPath(slow_path);
2719
2720 GpuRegister index = locations->InAt(0).AsRegister<GpuRegister>();
2721 GpuRegister length = locations->InAt(1).AsRegister<GpuRegister>();
2722
2723 // length is limited by the maximum positive signed 32-bit integer.
2724 // Unsigned comparison of length and index checks for index < 0
2725 // and for length <= index simultaneously.
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002726 __ Bgeuc(index, length, slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002727}
2728
Alexey Frunze15958152017-02-09 19:08:30 -08002729// Temp is used for read barrier.
2730static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
2731 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002732 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08002733 (kUseBakerReadBarrier ||
2734 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2735 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2736 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
2737 return 1;
2738 }
2739 return 0;
2740}
2741
2742// Extra temp is used for read barrier.
2743static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
2744 return 1 + NumberOfInstanceOfTemps(type_check_kind);
2745}
2746
Alexey Frunze4dda3372015-06-01 18:31:49 -07002747void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002748 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2749 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
2750
2751 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
2752 switch (type_check_kind) {
2753 case TypeCheckKind::kExactCheck:
2754 case TypeCheckKind::kAbstractClassCheck:
2755 case TypeCheckKind::kClassHierarchyCheck:
2756 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08002757 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002758 ? LocationSummary::kCallOnSlowPath
2759 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
2760 break;
2761 case TypeCheckKind::kArrayCheck:
2762 case TypeCheckKind::kUnresolvedCheck:
2763 case TypeCheckKind::kInterfaceCheck:
2764 call_kind = LocationSummary::kCallOnSlowPath;
2765 break;
2766 }
2767
Vladimir Markoca6fff82017-10-03 14:49:14 +01002768 LocationSummary* locations =
2769 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002770 locations->SetInAt(0, Location::RequiresRegister());
2771 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08002772 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002773}
2774
2775void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002776 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002777 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002778 Location obj_loc = locations->InAt(0);
2779 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002780 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08002781 Location temp_loc = locations->GetTemp(0);
2782 GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
2783 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
2784 DCHECK_LE(num_temps, 2u);
2785 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002786 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2787 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2788 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2789 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2790 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
2791 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
2792 const uint32_t object_array_data_offset =
2793 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2794 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002795
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002796 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
2797 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
2798 // read barriers is done for performance and code size reasons.
2799 bool is_type_check_slow_path_fatal = false;
2800 if (!kEmitCompilerReadBarrier) {
2801 is_type_check_slow_path_fatal =
2802 (type_check_kind == TypeCheckKind::kExactCheck ||
2803 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2804 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2805 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
2806 !instruction->CanThrowIntoCatchBlock();
2807 }
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002808 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002809 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
2810 instruction, is_type_check_slow_path_fatal);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002811 codegen_->AddSlowPath(slow_path);
2812
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002813 // Avoid this check if we know `obj` is not null.
2814 if (instruction->MustDoNullCheck()) {
2815 __ Beqzc(obj, &done);
2816 }
2817
2818 switch (type_check_kind) {
2819 case TypeCheckKind::kExactCheck:
2820 case TypeCheckKind::kArrayCheck: {
2821 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002822 GenerateReferenceLoadTwoRegisters(instruction,
2823 temp_loc,
2824 obj_loc,
2825 class_offset,
2826 maybe_temp2_loc,
2827 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002828 // Jump to slow path for throwing the exception or doing a
2829 // more involved array check.
2830 __ Bnec(temp, cls, slow_path->GetEntryLabel());
2831 break;
2832 }
2833
2834 case TypeCheckKind::kAbstractClassCheck: {
2835 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002836 GenerateReferenceLoadTwoRegisters(instruction,
2837 temp_loc,
2838 obj_loc,
2839 class_offset,
2840 maybe_temp2_loc,
2841 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002842 // If the class is abstract, we eagerly fetch the super class of the
2843 // object to avoid doing a comparison we know will fail.
2844 Mips64Label loop;
2845 __ Bind(&loop);
2846 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002847 GenerateReferenceLoadOneRegister(instruction,
2848 temp_loc,
2849 super_offset,
2850 maybe_temp2_loc,
2851 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002852 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2853 // exception.
2854 __ Beqzc(temp, slow_path->GetEntryLabel());
2855 // Otherwise, compare the classes.
2856 __ Bnec(temp, cls, &loop);
2857 break;
2858 }
2859
2860 case TypeCheckKind::kClassHierarchyCheck: {
2861 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002862 GenerateReferenceLoadTwoRegisters(instruction,
2863 temp_loc,
2864 obj_loc,
2865 class_offset,
2866 maybe_temp2_loc,
2867 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002868 // Walk over the class hierarchy to find a match.
2869 Mips64Label loop;
2870 __ Bind(&loop);
2871 __ Beqc(temp, cls, &done);
2872 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002873 GenerateReferenceLoadOneRegister(instruction,
2874 temp_loc,
2875 super_offset,
2876 maybe_temp2_loc,
2877 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002878 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2879 // exception. Otherwise, jump to the beginning of the loop.
2880 __ Bnezc(temp, &loop);
2881 __ Bc(slow_path->GetEntryLabel());
2882 break;
2883 }
2884
2885 case TypeCheckKind::kArrayObjectCheck: {
2886 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002887 GenerateReferenceLoadTwoRegisters(instruction,
2888 temp_loc,
2889 obj_loc,
2890 class_offset,
2891 maybe_temp2_loc,
2892 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002893 // Do an exact check.
2894 __ Beqc(temp, cls, &done);
2895 // Otherwise, we need to check that the object's class is a non-primitive array.
2896 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08002897 GenerateReferenceLoadOneRegister(instruction,
2898 temp_loc,
2899 component_offset,
2900 maybe_temp2_loc,
2901 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002902 // If the component type is null, jump to the slow path to throw the exception.
2903 __ Beqzc(temp, slow_path->GetEntryLabel());
2904 // Otherwise, the object is indeed an array, further check that this component
2905 // type is not a primitive type.
2906 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
2907 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2908 __ Bnezc(temp, slow_path->GetEntryLabel());
2909 break;
2910 }
2911
2912 case TypeCheckKind::kUnresolvedCheck:
2913 // We always go into the type check slow path for the unresolved check case.
2914 // We cannot directly call the CheckCast runtime entry point
2915 // without resorting to a type checking slow path here (i.e. by
2916 // calling InvokeRuntime directly), as it would require to
2917 // assign fixed registers for the inputs of this HInstanceOf
2918 // instruction (following the runtime calling convention), which
2919 // might be cluttered by the potential first read barrier
2920 // emission at the beginning of this method.
2921 __ Bc(slow_path->GetEntryLabel());
2922 break;
2923
2924 case TypeCheckKind::kInterfaceCheck: {
2925 // Avoid read barriers to improve performance of the fast path. We can not get false
2926 // positives by doing this.
2927 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002928 GenerateReferenceLoadTwoRegisters(instruction,
2929 temp_loc,
2930 obj_loc,
2931 class_offset,
2932 maybe_temp2_loc,
2933 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002934 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08002935 GenerateReferenceLoadTwoRegisters(instruction,
2936 temp_loc,
2937 temp_loc,
2938 iftable_offset,
2939 maybe_temp2_loc,
2940 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002941 // Iftable is never null.
2942 __ Lw(TMP, temp, array_length_offset);
2943 // Loop through the iftable and check if any class matches.
2944 Mips64Label loop;
2945 __ Bind(&loop);
2946 __ Beqzc(TMP, slow_path->GetEntryLabel());
2947 __ Lwu(AT, temp, object_array_data_offset);
2948 __ MaybeUnpoisonHeapReference(AT);
2949 // Go to next interface.
2950 __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
2951 __ Addiu(TMP, TMP, -2);
2952 // Compare the classes and continue the loop if they do not match.
2953 __ Bnec(AT, cls, &loop);
2954 break;
2955 }
2956 }
2957
2958 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002959 __ Bind(slow_path->GetExitLabel());
2960}
2961
2962void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
2963 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002964 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002965 locations->SetInAt(0, Location::RequiresRegister());
2966 if (check->HasUses()) {
2967 locations->SetOut(Location::SameAsFirstInput());
2968 }
2969}
2970
2971void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
2972 // We assume the class is not null.
Vladimir Marko174b2e22017-10-12 13:34:49 +01002973 SlowPathCodeMIPS64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(
Alexey Frunze4dda3372015-06-01 18:31:49 -07002974 check->GetLoadClass(),
2975 check,
2976 check->GetDexPc(),
2977 true);
2978 codegen_->AddSlowPath(slow_path);
2979 GenerateClassInitializationCheck(slow_path,
2980 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
2981}
2982
2983void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002984 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002985
Vladimir Markoca6fff82017-10-03 14:49:14 +01002986 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002987
2988 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002989 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002990 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002991 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002992 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002993 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002994 case DataType::Type::kInt32:
2995 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002996 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002997 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002998 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2999 break;
3000
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003001 case DataType::Type::kFloat32:
3002 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003003 locations->SetInAt(0, Location::RequiresFpuRegister());
3004 locations->SetInAt(1, Location::RequiresFpuRegister());
3005 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003006 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003007
3008 default:
3009 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3010 }
3011}
3012
3013void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
3014 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003015 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003016 DataType::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003017
3018 // 0 if: left == right
3019 // 1 if: left > right
3020 // -1 if: left < right
3021 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003022 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003023 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003024 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003025 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003026 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003027 case DataType::Type::kInt32:
3028 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003029 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003030 Location rhs_location = locations->InAt(1);
3031 bool use_imm = rhs_location.IsConstant();
3032 GpuRegister rhs = ZERO;
3033 if (use_imm) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003034 if (in_type == DataType::Type::kInt64) {
Aart Bika19616e2016-02-01 18:57:58 -08003035 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
3036 if (value != 0) {
3037 rhs = AT;
3038 __ LoadConst64(rhs, value);
3039 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00003040 } else {
3041 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
3042 if (value != 0) {
3043 rhs = AT;
3044 __ LoadConst32(rhs, value);
3045 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003046 }
3047 } else {
3048 rhs = rhs_location.AsRegister<GpuRegister>();
3049 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003050 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08003051 __ Slt(res, rhs, lhs);
3052 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003053 break;
3054 }
3055
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003056 case DataType::Type::kFloat32: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003057 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3058 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3059 Mips64Label done;
3060 __ CmpEqS(FTMP, lhs, rhs);
3061 __ LoadConst32(res, 0);
3062 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003063 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003064 __ CmpLtS(FTMP, lhs, rhs);
3065 __ LoadConst32(res, -1);
3066 __ Bc1nez(FTMP, &done);
3067 __ LoadConst32(res, 1);
3068 } else {
3069 __ CmpLtS(FTMP, rhs, lhs);
3070 __ LoadConst32(res, 1);
3071 __ Bc1nez(FTMP, &done);
3072 __ LoadConst32(res, -1);
3073 }
3074 __ Bind(&done);
3075 break;
3076 }
3077
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003078 case DataType::Type::kFloat64: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003079 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3080 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3081 Mips64Label done;
3082 __ CmpEqD(FTMP, lhs, rhs);
3083 __ LoadConst32(res, 0);
3084 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003085 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003086 __ CmpLtD(FTMP, lhs, rhs);
3087 __ LoadConst32(res, -1);
3088 __ Bc1nez(FTMP, &done);
3089 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003090 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08003091 __ CmpLtD(FTMP, rhs, lhs);
3092 __ LoadConst32(res, 1);
3093 __ Bc1nez(FTMP, &done);
3094 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003095 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003096 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003097 break;
3098 }
3099
3100 default:
3101 LOG(FATAL) << "Unimplemented compare type " << in_type;
3102 }
3103}
3104
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003105void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003106 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08003107 switch (instruction->InputAt(0)->GetType()) {
3108 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003109 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003110 locations->SetInAt(0, Location::RequiresRegister());
3111 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3112 break;
3113
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003114 case DataType::Type::kFloat32:
3115 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003116 locations->SetInAt(0, Location::RequiresFpuRegister());
3117 locations->SetInAt(1, Location::RequiresFpuRegister());
3118 break;
3119 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003120 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003121 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3122 }
3123}
3124
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003125void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003126 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003127 return;
3128 }
3129
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003130 DataType::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003131 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003132 switch (type) {
3133 default:
3134 // Integer case.
3135 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
3136 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003137 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003138 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
3139 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003140 case DataType::Type::kFloat32:
3141 case DataType::Type::kFloat64:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003142 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3143 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003144 }
3145}
3146
Alexey Frunzec857c742015-09-23 15:12:39 -07003147void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3148 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003149 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003150
3151 LocationSummary* locations = instruction->GetLocations();
3152 Location second = locations->InAt(1);
3153 DCHECK(second.IsConstant());
3154
3155 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3156 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3157 int64_t imm = Int64FromConstant(second.GetConstant());
3158 DCHECK(imm == 1 || imm == -1);
3159
3160 if (instruction->IsRem()) {
3161 __ Move(out, ZERO);
3162 } else {
3163 if (imm == -1) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003164 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003165 __ Subu(out, ZERO, dividend);
3166 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003167 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003168 __ Dsubu(out, ZERO, dividend);
3169 }
3170 } else if (out != dividend) {
3171 __ Move(out, dividend);
3172 }
3173 }
3174}
3175
3176void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3177 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003178 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003179
3180 LocationSummary* locations = instruction->GetLocations();
3181 Location second = locations->InAt(1);
3182 DCHECK(second.IsConstant());
3183
3184 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3185 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3186 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003187 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07003188 int ctz_imm = CTZ(abs_imm);
3189
3190 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003191 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003192 if (ctz_imm == 1) {
3193 // Fast path for division by +/-2, which is very common.
3194 __ Srl(TMP, dividend, 31);
3195 } else {
3196 __ Sra(TMP, dividend, 31);
3197 __ Srl(TMP, TMP, 32 - ctz_imm);
3198 }
3199 __ Addu(out, dividend, TMP);
3200 __ Sra(out, out, ctz_imm);
3201 if (imm < 0) {
3202 __ Subu(out, ZERO, out);
3203 }
3204 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003205 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003206 if (ctz_imm == 1) {
3207 // Fast path for division by +/-2, which is very common.
3208 __ Dsrl32(TMP, dividend, 31);
3209 } else {
3210 __ Dsra32(TMP, dividend, 31);
3211 if (ctz_imm > 32) {
3212 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3213 } else {
3214 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3215 }
3216 }
3217 __ Daddu(out, dividend, TMP);
3218 if (ctz_imm < 32) {
3219 __ Dsra(out, out, ctz_imm);
3220 } else {
3221 __ Dsra32(out, out, ctz_imm - 32);
3222 }
3223 if (imm < 0) {
3224 __ Dsubu(out, ZERO, out);
3225 }
3226 }
3227 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003228 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003229 if (ctz_imm == 1) {
3230 // Fast path for modulo +/-2, which is very common.
3231 __ Sra(TMP, dividend, 31);
3232 __ Subu(out, dividend, TMP);
3233 __ Andi(out, out, 1);
3234 __ Addu(out, out, TMP);
3235 } else {
3236 __ Sra(TMP, dividend, 31);
3237 __ Srl(TMP, TMP, 32 - ctz_imm);
3238 __ Addu(out, dividend, TMP);
3239 if (IsUint<16>(abs_imm - 1)) {
3240 __ Andi(out, out, abs_imm - 1);
3241 } else {
3242 __ Sll(out, out, 32 - ctz_imm);
3243 __ Srl(out, out, 32 - ctz_imm);
3244 }
3245 __ Subu(out, out, TMP);
3246 }
3247 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003248 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003249 if (ctz_imm == 1) {
3250 // Fast path for modulo +/-2, which is very common.
3251 __ Dsra32(TMP, dividend, 31);
3252 __ Dsubu(out, dividend, TMP);
3253 __ Andi(out, out, 1);
3254 __ Daddu(out, out, TMP);
3255 } else {
3256 __ Dsra32(TMP, dividend, 31);
3257 if (ctz_imm > 32) {
3258 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3259 } else {
3260 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3261 }
3262 __ Daddu(out, dividend, TMP);
3263 if (IsUint<16>(abs_imm - 1)) {
3264 __ Andi(out, out, abs_imm - 1);
3265 } else {
3266 if (ctz_imm > 32) {
3267 __ Dsll(out, out, 64 - ctz_imm);
3268 __ Dsrl(out, out, 64 - ctz_imm);
3269 } else {
3270 __ Dsll32(out, out, 32 - ctz_imm);
3271 __ Dsrl32(out, out, 32 - ctz_imm);
3272 }
3273 }
3274 __ Dsubu(out, out, TMP);
3275 }
3276 }
3277 }
3278}
3279
3280void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3281 DCHECK(instruction->IsDiv() || instruction->IsRem());
3282
3283 LocationSummary* locations = instruction->GetLocations();
3284 Location second = locations->InAt(1);
3285 DCHECK(second.IsConstant());
3286
3287 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3288 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3289 int64_t imm = Int64FromConstant(second.GetConstant());
3290
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003291 DataType::Type type = instruction->GetResultType();
3292 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003293
3294 int64_t magic;
3295 int shift;
3296 CalculateMagicAndShiftForDivRem(imm,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003297 (type == DataType::Type::kInt64),
Alexey Frunzec857c742015-09-23 15:12:39 -07003298 &magic,
3299 &shift);
3300
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003301 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003302 __ LoadConst32(TMP, magic);
3303 __ MuhR6(TMP, dividend, TMP);
3304
3305 if (imm > 0 && magic < 0) {
3306 __ Addu(TMP, TMP, dividend);
3307 } else if (imm < 0 && magic > 0) {
3308 __ Subu(TMP, TMP, dividend);
3309 }
3310
3311 if (shift != 0) {
3312 __ Sra(TMP, TMP, shift);
3313 }
3314
3315 if (instruction->IsDiv()) {
3316 __ Sra(out, TMP, 31);
3317 __ Subu(out, TMP, out);
3318 } else {
3319 __ Sra(AT, TMP, 31);
3320 __ Subu(AT, TMP, AT);
3321 __ LoadConst32(TMP, imm);
3322 __ MulR6(TMP, AT, TMP);
3323 __ Subu(out, dividend, TMP);
3324 }
3325 } else {
3326 __ LoadConst64(TMP, magic);
3327 __ Dmuh(TMP, dividend, TMP);
3328
3329 if (imm > 0 && magic < 0) {
3330 __ Daddu(TMP, TMP, dividend);
3331 } else if (imm < 0 && magic > 0) {
3332 __ Dsubu(TMP, TMP, dividend);
3333 }
3334
3335 if (shift >= 32) {
3336 __ Dsra32(TMP, TMP, shift - 32);
3337 } else if (shift > 0) {
3338 __ Dsra(TMP, TMP, shift);
3339 }
3340
3341 if (instruction->IsDiv()) {
3342 __ Dsra32(out, TMP, 31);
3343 __ Dsubu(out, TMP, out);
3344 } else {
3345 __ Dsra32(AT, TMP, 31);
3346 __ Dsubu(AT, TMP, AT);
3347 __ LoadConst64(TMP, imm);
3348 __ Dmul(TMP, AT, TMP);
3349 __ Dsubu(out, dividend, TMP);
3350 }
3351 }
3352}
3353
3354void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3355 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003356 DataType::Type type = instruction->GetResultType();
3357 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003358
3359 LocationSummary* locations = instruction->GetLocations();
3360 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3361 Location second = locations->InAt(1);
3362
3363 if (second.IsConstant()) {
3364 int64_t imm = Int64FromConstant(second.GetConstant());
3365 if (imm == 0) {
3366 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3367 } else if (imm == 1 || imm == -1) {
3368 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003369 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003370 DivRemByPowerOfTwo(instruction);
3371 } else {
3372 DCHECK(imm <= -2 || imm >= 2);
3373 GenerateDivRemWithAnyConstant(instruction);
3374 }
3375 } else {
3376 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3377 GpuRegister divisor = second.AsRegister<GpuRegister>();
3378 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003379 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003380 __ DivR6(out, dividend, divisor);
3381 else
3382 __ Ddiv(out, dividend, divisor);
3383 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003384 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003385 __ ModR6(out, dividend, divisor);
3386 else
3387 __ Dmod(out, dividend, divisor);
3388 }
3389 }
3390}
3391
Alexey Frunze4dda3372015-06-01 18:31:49 -07003392void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
3393 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003394 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003395 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003396 case DataType::Type::kInt32:
3397 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003398 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003399 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003400 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3401 break;
3402
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003403 case DataType::Type::kFloat32:
3404 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003405 locations->SetInAt(0, Location::RequiresFpuRegister());
3406 locations->SetInAt(1, Location::RequiresFpuRegister());
3407 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3408 break;
3409
3410 default:
3411 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3412 }
3413}
3414
3415void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003416 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003417 LocationSummary* locations = instruction->GetLocations();
3418
3419 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003420 case DataType::Type::kInt32:
3421 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07003422 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003423 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003424 case DataType::Type::kFloat32:
3425 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003426 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3427 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3428 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003429 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07003430 __ DivS(dst, lhs, rhs);
3431 else
3432 __ DivD(dst, lhs, rhs);
3433 break;
3434 }
3435 default:
3436 LOG(FATAL) << "Unexpected div type " << type;
3437 }
3438}
3439
3440void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003441 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003442 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003443}
3444
3445void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3446 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003447 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003448 codegen_->AddSlowPath(slow_path);
3449 Location value = instruction->GetLocations()->InAt(0);
3450
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003451 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003452
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003453 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003454 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003455 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003456 }
3457
3458 if (value.IsConstant()) {
3459 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
3460 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003461 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003462 } else {
3463 // A division by a non-null constant is valid. We don't need to perform
3464 // any check, so simply fall through.
3465 }
3466 } else {
3467 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3468 }
3469}
3470
3471void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
3472 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003473 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003474 locations->SetOut(Location::ConstantLocation(constant));
3475}
3476
3477void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3478 // Will be generated at use site.
3479}
3480
3481void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
3482 exit->SetLocations(nullptr);
3483}
3484
3485void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3486}
3487
3488void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
3489 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003490 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003491 locations->SetOut(Location::ConstantLocation(constant));
3492}
3493
3494void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3495 // Will be generated at use site.
3496}
3497
David Brazdilfc6a86a2015-06-26 10:33:45 +00003498void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003499 DCHECK(!successor->IsExitBlock());
3500 HBasicBlock* block = got->GetBlock();
3501 HInstruction* previous = got->GetPrevious();
3502 HLoopInformation* info = block->GetLoopInformation();
3503
3504 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003505 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3506 return;
3507 }
3508 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3509 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3510 }
3511 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003512 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003513 }
3514}
3515
David Brazdilfc6a86a2015-06-26 10:33:45 +00003516void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
3517 got->SetLocations(nullptr);
3518}
3519
3520void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
3521 HandleGoto(got, got->GetSuccessor());
3522}
3523
3524void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3525 try_boundary->SetLocations(nullptr);
3526}
3527
3528void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3529 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3530 if (!successor->IsExitBlock()) {
3531 HandleGoto(try_boundary, successor);
3532 }
3533}
3534
Alexey Frunze299a9392015-12-08 16:08:02 -08003535void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
3536 bool is64bit,
3537 LocationSummary* locations) {
3538 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3539 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3540 Location rhs_location = locations->InAt(1);
3541 GpuRegister rhs_reg = ZERO;
3542 int64_t rhs_imm = 0;
3543 bool use_imm = rhs_location.IsConstant();
3544 if (use_imm) {
3545 if (is64bit) {
3546 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3547 } else {
3548 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3549 }
3550 } else {
3551 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3552 }
3553 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3554
3555 switch (cond) {
3556 case kCondEQ:
3557 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003558 if (use_imm && IsInt<16>(-rhs_imm)) {
3559 if (rhs_imm == 0) {
3560 if (cond == kCondEQ) {
3561 __ Sltiu(dst, lhs, 1);
3562 } else {
3563 __ Sltu(dst, ZERO, lhs);
3564 }
3565 } else {
3566 if (is64bit) {
3567 __ Daddiu(dst, lhs, -rhs_imm);
3568 } else {
3569 __ Addiu(dst, lhs, -rhs_imm);
3570 }
3571 if (cond == kCondEQ) {
3572 __ Sltiu(dst, dst, 1);
3573 } else {
3574 __ Sltu(dst, ZERO, dst);
3575 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003576 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003577 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003578 if (use_imm && IsUint<16>(rhs_imm)) {
3579 __ Xori(dst, lhs, rhs_imm);
3580 } else {
3581 if (use_imm) {
3582 rhs_reg = TMP;
3583 __ LoadConst64(rhs_reg, rhs_imm);
3584 }
3585 __ Xor(dst, lhs, rhs_reg);
3586 }
3587 if (cond == kCondEQ) {
3588 __ Sltiu(dst, dst, 1);
3589 } else {
3590 __ Sltu(dst, ZERO, dst);
3591 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003592 }
3593 break;
3594
3595 case kCondLT:
3596 case kCondGE:
3597 if (use_imm && IsInt<16>(rhs_imm)) {
3598 __ Slti(dst, lhs, rhs_imm);
3599 } else {
3600 if (use_imm) {
3601 rhs_reg = TMP;
3602 __ LoadConst64(rhs_reg, rhs_imm);
3603 }
3604 __ Slt(dst, lhs, rhs_reg);
3605 }
3606 if (cond == kCondGE) {
3607 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3608 // only the slt instruction but no sge.
3609 __ Xori(dst, dst, 1);
3610 }
3611 break;
3612
3613 case kCondLE:
3614 case kCondGT:
3615 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3616 // Simulate lhs <= rhs via lhs < rhs + 1.
3617 __ Slti(dst, lhs, rhs_imm_plus_one);
3618 if (cond == kCondGT) {
3619 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3620 // only the slti instruction but no sgti.
3621 __ Xori(dst, dst, 1);
3622 }
3623 } else {
3624 if (use_imm) {
3625 rhs_reg = TMP;
3626 __ LoadConst64(rhs_reg, rhs_imm);
3627 }
3628 __ Slt(dst, rhs_reg, lhs);
3629 if (cond == kCondLE) {
3630 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3631 // only the slt instruction but no sle.
3632 __ Xori(dst, dst, 1);
3633 }
3634 }
3635 break;
3636
3637 case kCondB:
3638 case kCondAE:
3639 if (use_imm && IsInt<16>(rhs_imm)) {
3640 // Sltiu sign-extends its 16-bit immediate operand before
3641 // the comparison and thus lets us compare directly with
3642 // unsigned values in the ranges [0, 0x7fff] and
3643 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3644 __ Sltiu(dst, lhs, rhs_imm);
3645 } else {
3646 if (use_imm) {
3647 rhs_reg = TMP;
3648 __ LoadConst64(rhs_reg, rhs_imm);
3649 }
3650 __ Sltu(dst, lhs, rhs_reg);
3651 }
3652 if (cond == kCondAE) {
3653 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3654 // only the sltu instruction but no sgeu.
3655 __ Xori(dst, dst, 1);
3656 }
3657 break;
3658
3659 case kCondBE:
3660 case kCondA:
3661 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3662 // Simulate lhs <= rhs via lhs < rhs + 1.
3663 // Note that this only works if rhs + 1 does not overflow
3664 // to 0, hence the check above.
3665 // Sltiu sign-extends its 16-bit immediate operand before
3666 // the comparison and thus lets us compare directly with
3667 // unsigned values in the ranges [0, 0x7fff] and
3668 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3669 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3670 if (cond == kCondA) {
3671 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3672 // only the sltiu instruction but no sgtiu.
3673 __ Xori(dst, dst, 1);
3674 }
3675 } else {
3676 if (use_imm) {
3677 rhs_reg = TMP;
3678 __ LoadConst64(rhs_reg, rhs_imm);
3679 }
3680 __ Sltu(dst, rhs_reg, lhs);
3681 if (cond == kCondBE) {
3682 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3683 // only the sltu instruction but no sleu.
3684 __ Xori(dst, dst, 1);
3685 }
3686 }
3687 break;
3688 }
3689}
3690
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003691bool InstructionCodeGeneratorMIPS64::MaterializeIntLongCompare(IfCondition cond,
3692 bool is64bit,
3693 LocationSummary* input_locations,
3694 GpuRegister dst) {
3695 GpuRegister lhs = input_locations->InAt(0).AsRegister<GpuRegister>();
3696 Location rhs_location = input_locations->InAt(1);
3697 GpuRegister rhs_reg = ZERO;
3698 int64_t rhs_imm = 0;
3699 bool use_imm = rhs_location.IsConstant();
3700 if (use_imm) {
3701 if (is64bit) {
3702 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3703 } else {
3704 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3705 }
3706 } else {
3707 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3708 }
3709 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3710
3711 switch (cond) {
3712 case kCondEQ:
3713 case kCondNE:
3714 if (use_imm && IsInt<16>(-rhs_imm)) {
3715 if (is64bit) {
3716 __ Daddiu(dst, lhs, -rhs_imm);
3717 } else {
3718 __ Addiu(dst, lhs, -rhs_imm);
3719 }
3720 } else if (use_imm && IsUint<16>(rhs_imm)) {
3721 __ Xori(dst, lhs, rhs_imm);
3722 } else {
3723 if (use_imm) {
3724 rhs_reg = TMP;
3725 __ LoadConst64(rhs_reg, rhs_imm);
3726 }
3727 __ Xor(dst, lhs, rhs_reg);
3728 }
3729 return (cond == kCondEQ);
3730
3731 case kCondLT:
3732 case kCondGE:
3733 if (use_imm && IsInt<16>(rhs_imm)) {
3734 __ Slti(dst, lhs, rhs_imm);
3735 } else {
3736 if (use_imm) {
3737 rhs_reg = TMP;
3738 __ LoadConst64(rhs_reg, rhs_imm);
3739 }
3740 __ Slt(dst, lhs, rhs_reg);
3741 }
3742 return (cond == kCondGE);
3743
3744 case kCondLE:
3745 case kCondGT:
3746 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3747 // Simulate lhs <= rhs via lhs < rhs + 1.
3748 __ Slti(dst, lhs, rhs_imm_plus_one);
3749 return (cond == kCondGT);
3750 } else {
3751 if (use_imm) {
3752 rhs_reg = TMP;
3753 __ LoadConst64(rhs_reg, rhs_imm);
3754 }
3755 __ Slt(dst, rhs_reg, lhs);
3756 return (cond == kCondLE);
3757 }
3758
3759 case kCondB:
3760 case kCondAE:
3761 if (use_imm && IsInt<16>(rhs_imm)) {
3762 // Sltiu sign-extends its 16-bit immediate operand before
3763 // the comparison and thus lets us compare directly with
3764 // unsigned values in the ranges [0, 0x7fff] and
3765 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3766 __ Sltiu(dst, lhs, rhs_imm);
3767 } else {
3768 if (use_imm) {
3769 rhs_reg = TMP;
3770 __ LoadConst64(rhs_reg, rhs_imm);
3771 }
3772 __ Sltu(dst, lhs, rhs_reg);
3773 }
3774 return (cond == kCondAE);
3775
3776 case kCondBE:
3777 case kCondA:
3778 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3779 // Simulate lhs <= rhs via lhs < rhs + 1.
3780 // Note that this only works if rhs + 1 does not overflow
3781 // to 0, hence the check above.
3782 // Sltiu sign-extends its 16-bit immediate operand before
3783 // the comparison and thus lets us compare directly with
3784 // unsigned values in the ranges [0, 0x7fff] and
3785 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3786 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3787 return (cond == kCondA);
3788 } else {
3789 if (use_imm) {
3790 rhs_reg = TMP;
3791 __ LoadConst64(rhs_reg, rhs_imm);
3792 }
3793 __ Sltu(dst, rhs_reg, lhs);
3794 return (cond == kCondBE);
3795 }
3796 }
3797}
3798
Alexey Frunze299a9392015-12-08 16:08:02 -08003799void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
3800 bool is64bit,
3801 LocationSummary* locations,
3802 Mips64Label* label) {
3803 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3804 Location rhs_location = locations->InAt(1);
3805 GpuRegister rhs_reg = ZERO;
3806 int64_t rhs_imm = 0;
3807 bool use_imm = rhs_location.IsConstant();
3808 if (use_imm) {
3809 if (is64bit) {
3810 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3811 } else {
3812 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3813 }
3814 } else {
3815 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3816 }
3817
3818 if (use_imm && rhs_imm == 0) {
3819 switch (cond) {
3820 case kCondEQ:
3821 case kCondBE: // <= 0 if zero
3822 __ Beqzc(lhs, label);
3823 break;
3824 case kCondNE:
3825 case kCondA: // > 0 if non-zero
3826 __ Bnezc(lhs, label);
3827 break;
3828 case kCondLT:
3829 __ Bltzc(lhs, label);
3830 break;
3831 case kCondGE:
3832 __ Bgezc(lhs, label);
3833 break;
3834 case kCondLE:
3835 __ Blezc(lhs, label);
3836 break;
3837 case kCondGT:
3838 __ Bgtzc(lhs, label);
3839 break;
3840 case kCondB: // always false
3841 break;
3842 case kCondAE: // always true
3843 __ Bc(label);
3844 break;
3845 }
3846 } else {
3847 if (use_imm) {
3848 rhs_reg = TMP;
3849 __ LoadConst64(rhs_reg, rhs_imm);
3850 }
3851 switch (cond) {
3852 case kCondEQ:
3853 __ Beqc(lhs, rhs_reg, label);
3854 break;
3855 case kCondNE:
3856 __ Bnec(lhs, rhs_reg, label);
3857 break;
3858 case kCondLT:
3859 __ Bltc(lhs, rhs_reg, label);
3860 break;
3861 case kCondGE:
3862 __ Bgec(lhs, rhs_reg, label);
3863 break;
3864 case kCondLE:
3865 __ Bgec(rhs_reg, lhs, label);
3866 break;
3867 case kCondGT:
3868 __ Bltc(rhs_reg, lhs, label);
3869 break;
3870 case kCondB:
3871 __ Bltuc(lhs, rhs_reg, label);
3872 break;
3873 case kCondAE:
3874 __ Bgeuc(lhs, rhs_reg, label);
3875 break;
3876 case kCondBE:
3877 __ Bgeuc(rhs_reg, lhs, label);
3878 break;
3879 case kCondA:
3880 __ Bltuc(rhs_reg, lhs, label);
3881 break;
3882 }
3883 }
3884}
3885
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003886void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
3887 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003888 DataType::Type type,
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003889 LocationSummary* locations) {
3890 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3891 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3892 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003893 if (type == DataType::Type::kFloat32) {
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003894 switch (cond) {
3895 case kCondEQ:
3896 __ CmpEqS(FTMP, lhs, rhs);
3897 __ Mfc1(dst, FTMP);
3898 __ Andi(dst, dst, 1);
3899 break;
3900 case kCondNE:
3901 __ CmpEqS(FTMP, lhs, rhs);
3902 __ Mfc1(dst, FTMP);
3903 __ Addiu(dst, dst, 1);
3904 break;
3905 case kCondLT:
3906 if (gt_bias) {
3907 __ CmpLtS(FTMP, lhs, rhs);
3908 } else {
3909 __ CmpUltS(FTMP, lhs, rhs);
3910 }
3911 __ Mfc1(dst, FTMP);
3912 __ Andi(dst, dst, 1);
3913 break;
3914 case kCondLE:
3915 if (gt_bias) {
3916 __ CmpLeS(FTMP, lhs, rhs);
3917 } else {
3918 __ CmpUleS(FTMP, lhs, rhs);
3919 }
3920 __ Mfc1(dst, FTMP);
3921 __ Andi(dst, dst, 1);
3922 break;
3923 case kCondGT:
3924 if (gt_bias) {
3925 __ CmpUltS(FTMP, rhs, lhs);
3926 } else {
3927 __ CmpLtS(FTMP, rhs, lhs);
3928 }
3929 __ Mfc1(dst, FTMP);
3930 __ Andi(dst, dst, 1);
3931 break;
3932 case kCondGE:
3933 if (gt_bias) {
3934 __ CmpUleS(FTMP, rhs, lhs);
3935 } else {
3936 __ CmpLeS(FTMP, rhs, lhs);
3937 }
3938 __ Mfc1(dst, FTMP);
3939 __ Andi(dst, dst, 1);
3940 break;
3941 default:
3942 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3943 UNREACHABLE();
3944 }
3945 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003946 DCHECK_EQ(type, DataType::Type::kFloat64);
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003947 switch (cond) {
3948 case kCondEQ:
3949 __ CmpEqD(FTMP, lhs, rhs);
3950 __ Mfc1(dst, FTMP);
3951 __ Andi(dst, dst, 1);
3952 break;
3953 case kCondNE:
3954 __ CmpEqD(FTMP, lhs, rhs);
3955 __ Mfc1(dst, FTMP);
3956 __ Addiu(dst, dst, 1);
3957 break;
3958 case kCondLT:
3959 if (gt_bias) {
3960 __ CmpLtD(FTMP, lhs, rhs);
3961 } else {
3962 __ CmpUltD(FTMP, lhs, rhs);
3963 }
3964 __ Mfc1(dst, FTMP);
3965 __ Andi(dst, dst, 1);
3966 break;
3967 case kCondLE:
3968 if (gt_bias) {
3969 __ CmpLeD(FTMP, lhs, rhs);
3970 } else {
3971 __ CmpUleD(FTMP, lhs, rhs);
3972 }
3973 __ Mfc1(dst, FTMP);
3974 __ Andi(dst, dst, 1);
3975 break;
3976 case kCondGT:
3977 if (gt_bias) {
3978 __ CmpUltD(FTMP, rhs, lhs);
3979 } else {
3980 __ CmpLtD(FTMP, rhs, lhs);
3981 }
3982 __ Mfc1(dst, FTMP);
3983 __ Andi(dst, dst, 1);
3984 break;
3985 case kCondGE:
3986 if (gt_bias) {
3987 __ CmpUleD(FTMP, rhs, lhs);
3988 } else {
3989 __ CmpLeD(FTMP, rhs, lhs);
3990 }
3991 __ Mfc1(dst, FTMP);
3992 __ Andi(dst, dst, 1);
3993 break;
3994 default:
3995 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3996 UNREACHABLE();
3997 }
3998 }
3999}
4000
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004001bool InstructionCodeGeneratorMIPS64::MaterializeFpCompare(IfCondition cond,
4002 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004003 DataType::Type type,
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004004 LocationSummary* input_locations,
4005 FpuRegister dst) {
4006 FpuRegister lhs = input_locations->InAt(0).AsFpuRegister<FpuRegister>();
4007 FpuRegister rhs = input_locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004008 if (type == DataType::Type::kFloat32) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004009 switch (cond) {
4010 case kCondEQ:
4011 __ CmpEqS(dst, lhs, rhs);
4012 return false;
4013 case kCondNE:
4014 __ CmpEqS(dst, lhs, rhs);
4015 return true;
4016 case kCondLT:
4017 if (gt_bias) {
4018 __ CmpLtS(dst, lhs, rhs);
4019 } else {
4020 __ CmpUltS(dst, lhs, rhs);
4021 }
4022 return false;
4023 case kCondLE:
4024 if (gt_bias) {
4025 __ CmpLeS(dst, lhs, rhs);
4026 } else {
4027 __ CmpUleS(dst, lhs, rhs);
4028 }
4029 return false;
4030 case kCondGT:
4031 if (gt_bias) {
4032 __ CmpUltS(dst, rhs, lhs);
4033 } else {
4034 __ CmpLtS(dst, rhs, lhs);
4035 }
4036 return false;
4037 case kCondGE:
4038 if (gt_bias) {
4039 __ CmpUleS(dst, rhs, lhs);
4040 } else {
4041 __ CmpLeS(dst, rhs, lhs);
4042 }
4043 return false;
4044 default:
4045 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4046 UNREACHABLE();
4047 }
4048 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004049 DCHECK_EQ(type, DataType::Type::kFloat64);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004050 switch (cond) {
4051 case kCondEQ:
4052 __ CmpEqD(dst, lhs, rhs);
4053 return false;
4054 case kCondNE:
4055 __ CmpEqD(dst, lhs, rhs);
4056 return true;
4057 case kCondLT:
4058 if (gt_bias) {
4059 __ CmpLtD(dst, lhs, rhs);
4060 } else {
4061 __ CmpUltD(dst, lhs, rhs);
4062 }
4063 return false;
4064 case kCondLE:
4065 if (gt_bias) {
4066 __ CmpLeD(dst, lhs, rhs);
4067 } else {
4068 __ CmpUleD(dst, lhs, rhs);
4069 }
4070 return false;
4071 case kCondGT:
4072 if (gt_bias) {
4073 __ CmpUltD(dst, rhs, lhs);
4074 } else {
4075 __ CmpLtD(dst, rhs, lhs);
4076 }
4077 return false;
4078 case kCondGE:
4079 if (gt_bias) {
4080 __ CmpUleD(dst, rhs, lhs);
4081 } else {
4082 __ CmpLeD(dst, rhs, lhs);
4083 }
4084 return false;
4085 default:
4086 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4087 UNREACHABLE();
4088 }
4089 }
4090}
4091
Alexey Frunze299a9392015-12-08 16:08:02 -08004092void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
4093 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004094 DataType::Type type,
Alexey Frunze299a9392015-12-08 16:08:02 -08004095 LocationSummary* locations,
4096 Mips64Label* label) {
4097 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4098 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004099 if (type == DataType::Type::kFloat32) {
Alexey Frunze299a9392015-12-08 16:08:02 -08004100 switch (cond) {
4101 case kCondEQ:
4102 __ CmpEqS(FTMP, lhs, rhs);
4103 __ Bc1nez(FTMP, label);
4104 break;
4105 case kCondNE:
4106 __ CmpEqS(FTMP, lhs, rhs);
4107 __ Bc1eqz(FTMP, label);
4108 break;
4109 case kCondLT:
4110 if (gt_bias) {
4111 __ CmpLtS(FTMP, lhs, rhs);
4112 } else {
4113 __ CmpUltS(FTMP, lhs, rhs);
4114 }
4115 __ Bc1nez(FTMP, label);
4116 break;
4117 case kCondLE:
4118 if (gt_bias) {
4119 __ CmpLeS(FTMP, lhs, rhs);
4120 } else {
4121 __ CmpUleS(FTMP, lhs, rhs);
4122 }
4123 __ Bc1nez(FTMP, label);
4124 break;
4125 case kCondGT:
4126 if (gt_bias) {
4127 __ CmpUltS(FTMP, rhs, lhs);
4128 } else {
4129 __ CmpLtS(FTMP, rhs, lhs);
4130 }
4131 __ Bc1nez(FTMP, label);
4132 break;
4133 case kCondGE:
4134 if (gt_bias) {
4135 __ CmpUleS(FTMP, rhs, lhs);
4136 } else {
4137 __ CmpLeS(FTMP, rhs, lhs);
4138 }
4139 __ Bc1nez(FTMP, label);
4140 break;
4141 default:
4142 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004143 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004144 }
4145 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004146 DCHECK_EQ(type, DataType::Type::kFloat64);
Alexey Frunze299a9392015-12-08 16:08:02 -08004147 switch (cond) {
4148 case kCondEQ:
4149 __ CmpEqD(FTMP, lhs, rhs);
4150 __ Bc1nez(FTMP, label);
4151 break;
4152 case kCondNE:
4153 __ CmpEqD(FTMP, lhs, rhs);
4154 __ Bc1eqz(FTMP, label);
4155 break;
4156 case kCondLT:
4157 if (gt_bias) {
4158 __ CmpLtD(FTMP, lhs, rhs);
4159 } else {
4160 __ CmpUltD(FTMP, lhs, rhs);
4161 }
4162 __ Bc1nez(FTMP, label);
4163 break;
4164 case kCondLE:
4165 if (gt_bias) {
4166 __ CmpLeD(FTMP, lhs, rhs);
4167 } else {
4168 __ CmpUleD(FTMP, lhs, rhs);
4169 }
4170 __ Bc1nez(FTMP, label);
4171 break;
4172 case kCondGT:
4173 if (gt_bias) {
4174 __ CmpUltD(FTMP, rhs, lhs);
4175 } else {
4176 __ CmpLtD(FTMP, rhs, lhs);
4177 }
4178 __ Bc1nez(FTMP, label);
4179 break;
4180 case kCondGE:
4181 if (gt_bias) {
4182 __ CmpUleD(FTMP, rhs, lhs);
4183 } else {
4184 __ CmpLeD(FTMP, rhs, lhs);
4185 }
4186 __ Bc1nez(FTMP, label);
4187 break;
4188 default:
4189 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004190 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004191 }
4192 }
4193}
4194
Alexey Frunze4dda3372015-06-01 18:31:49 -07004195void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00004196 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004197 Mips64Label* true_target,
4198 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00004199 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004200
David Brazdil0debae72015-11-12 18:37:00 +00004201 if (true_target == nullptr && false_target == nullptr) {
4202 // Nothing to do. The code always falls through.
4203 return;
4204 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00004205 // Constant condition, statically compared against "true" (integer value 1).
4206 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00004207 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004208 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004209 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004210 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00004211 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00004212 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004213 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00004214 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004215 }
David Brazdil0debae72015-11-12 18:37:00 +00004216 return;
4217 }
4218
4219 // The following code generates these patterns:
4220 // (1) true_target == nullptr && false_target != nullptr
4221 // - opposite condition true => branch to false_target
4222 // (2) true_target != nullptr && false_target == nullptr
4223 // - condition true => branch to true_target
4224 // (3) true_target != nullptr && false_target != nullptr
4225 // - condition true => branch to true_target
4226 // - branch to false_target
4227 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004228 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00004229 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004230 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00004231 if (true_target == nullptr) {
4232 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
4233 } else {
4234 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
4235 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004236 } else {
4237 // The condition instruction has not been materialized, use its inputs as
4238 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00004239 HCondition* condition = cond->AsCondition();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004240 DataType::Type type = condition->InputAt(0)->GetType();
Alexey Frunze299a9392015-12-08 16:08:02 -08004241 LocationSummary* locations = cond->GetLocations();
4242 IfCondition if_cond = condition->GetCondition();
4243 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00004244
David Brazdil0debae72015-11-12 18:37:00 +00004245 if (true_target == nullptr) {
4246 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08004247 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00004248 }
4249
Alexey Frunze299a9392015-12-08 16:08:02 -08004250 switch (type) {
4251 default:
4252 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
4253 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004254 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004255 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
4256 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004257 case DataType::Type::kFloat32:
4258 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004259 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
4260 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004261 }
4262 }
David Brazdil0debae72015-11-12 18:37:00 +00004263
4264 // If neither branch falls through (case 3), the conditional branch to `true_target`
4265 // was already emitted (case 2) and we need to emit a jump to `false_target`.
4266 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004267 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004268 }
4269}
4270
4271void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004272 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00004273 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004274 locations->SetInAt(0, Location::RequiresRegister());
4275 }
4276}
4277
4278void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00004279 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
4280 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004281 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004282 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004283 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004284 nullptr : codegen_->GetLabelOf(false_successor);
4285 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004286}
4287
4288void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004289 LocationSummary* locations = new (GetGraph()->GetAllocator())
Alexey Frunze4dda3372015-06-01 18:31:49 -07004290 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01004291 InvokeRuntimeCallingConvention calling_convention;
4292 RegisterSet caller_saves = RegisterSet::Empty();
4293 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4294 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00004295 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004296 locations->SetInAt(0, Location::RequiresRegister());
4297 }
4298}
4299
4300void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08004301 SlowPathCodeMIPS64* slow_path =
4302 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00004303 GenerateTestAndBranch(deoptimize,
4304 /* condition_input_index */ 0,
4305 slow_path->GetEntryLabel(),
4306 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004307}
4308
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004309// This function returns true if a conditional move can be generated for HSelect.
4310// Otherwise it returns false and HSelect must be implemented in terms of conditonal
4311// branches and regular moves.
4312//
4313// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
4314//
4315// While determining feasibility of a conditional move and setting inputs/outputs
4316// are two distinct tasks, this function does both because they share quite a bit
4317// of common logic.
4318static bool CanMoveConditionally(HSelect* select, LocationSummary* locations_to_set) {
4319 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
4320 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4321 HCondition* condition = cond->AsCondition();
4322
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004323 DataType::Type cond_type =
4324 materialized ? DataType::Type::kInt32 : condition->InputAt(0)->GetType();
4325 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004326
4327 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
4328 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
4329 bool is_true_value_zero_constant =
4330 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
4331 bool is_false_value_zero_constant =
4332 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
4333
4334 bool can_move_conditionally = false;
4335 bool use_const_for_false_in = false;
4336 bool use_const_for_true_in = false;
4337
4338 if (!cond->IsConstant()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004339 if (!DataType::IsFloatingPointType(cond_type)) {
4340 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004341 // Moving int/long on int/long condition.
4342 if (is_true_value_zero_constant) {
4343 // seleqz out_reg, false_reg, cond_reg
4344 can_move_conditionally = true;
4345 use_const_for_true_in = true;
4346 } else if (is_false_value_zero_constant) {
4347 // selnez out_reg, true_reg, cond_reg
4348 can_move_conditionally = true;
4349 use_const_for_false_in = true;
4350 } else if (materialized) {
4351 // Not materializing unmaterialized int conditions
4352 // to keep the instruction count low.
4353 // selnez AT, true_reg, cond_reg
4354 // seleqz TMP, false_reg, cond_reg
4355 // or out_reg, AT, TMP
4356 can_move_conditionally = true;
4357 }
4358 } else {
4359 // Moving float/double on int/long condition.
4360 if (materialized) {
4361 // Not materializing unmaterialized int conditions
4362 // to keep the instruction count low.
4363 can_move_conditionally = true;
4364 if (is_true_value_zero_constant) {
4365 // sltu TMP, ZERO, cond_reg
4366 // mtc1 TMP, temp_cond_reg
4367 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4368 use_const_for_true_in = true;
4369 } else if (is_false_value_zero_constant) {
4370 // sltu TMP, ZERO, cond_reg
4371 // mtc1 TMP, temp_cond_reg
4372 // selnez.fmt out_reg, true_reg, temp_cond_reg
4373 use_const_for_false_in = true;
4374 } else {
4375 // sltu TMP, ZERO, cond_reg
4376 // mtc1 TMP, temp_cond_reg
4377 // sel.fmt temp_cond_reg, false_reg, true_reg
4378 // mov.fmt out_reg, temp_cond_reg
4379 }
4380 }
4381 }
4382 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004383 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004384 // Moving int/long on float/double condition.
4385 can_move_conditionally = true;
4386 if (is_true_value_zero_constant) {
4387 // mfc1 TMP, temp_cond_reg
4388 // seleqz out_reg, false_reg, TMP
4389 use_const_for_true_in = true;
4390 } else if (is_false_value_zero_constant) {
4391 // mfc1 TMP, temp_cond_reg
4392 // selnez out_reg, true_reg, TMP
4393 use_const_for_false_in = true;
4394 } else {
4395 // mfc1 TMP, temp_cond_reg
4396 // selnez AT, true_reg, TMP
4397 // seleqz TMP, false_reg, TMP
4398 // or out_reg, AT, TMP
4399 }
4400 } else {
4401 // Moving float/double on float/double condition.
4402 can_move_conditionally = true;
4403 if (is_true_value_zero_constant) {
4404 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4405 use_const_for_true_in = true;
4406 } else if (is_false_value_zero_constant) {
4407 // selnez.fmt out_reg, true_reg, temp_cond_reg
4408 use_const_for_false_in = true;
4409 } else {
4410 // sel.fmt temp_cond_reg, false_reg, true_reg
4411 // mov.fmt out_reg, temp_cond_reg
4412 }
4413 }
4414 }
4415 }
4416
4417 if (can_move_conditionally) {
4418 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
4419 } else {
4420 DCHECK(!use_const_for_false_in);
4421 DCHECK(!use_const_for_true_in);
4422 }
4423
4424 if (locations_to_set != nullptr) {
4425 if (use_const_for_false_in) {
4426 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
4427 } else {
4428 locations_to_set->SetInAt(0,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004429 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004430 ? Location::RequiresFpuRegister()
4431 : Location::RequiresRegister());
4432 }
4433 if (use_const_for_true_in) {
4434 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
4435 } else {
4436 locations_to_set->SetInAt(1,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004437 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004438 ? Location::RequiresFpuRegister()
4439 : Location::RequiresRegister());
4440 }
4441 if (materialized) {
4442 locations_to_set->SetInAt(2, Location::RequiresRegister());
4443 }
4444
4445 if (can_move_conditionally) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004446 locations_to_set->SetOut(DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004447 ? Location::RequiresFpuRegister()
4448 : Location::RequiresRegister());
4449 } else {
4450 locations_to_set->SetOut(Location::SameAsFirstInput());
4451 }
4452 }
4453
4454 return can_move_conditionally;
4455}
4456
4457
4458void InstructionCodeGeneratorMIPS64::GenConditionalMove(HSelect* select) {
4459 LocationSummary* locations = select->GetLocations();
4460 Location dst = locations->Out();
4461 Location false_src = locations->InAt(0);
4462 Location true_src = locations->InAt(1);
4463 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4464 GpuRegister cond_reg = TMP;
4465 FpuRegister fcond_reg = FTMP;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004466 DataType::Type cond_type = DataType::Type::kInt32;
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004467 bool cond_inverted = false;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004468 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004469
4470 if (IsBooleanValueOrMaterializedCondition(cond)) {
4471 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<GpuRegister>();
4472 } else {
4473 HCondition* condition = cond->AsCondition();
4474 LocationSummary* cond_locations = cond->GetLocations();
4475 IfCondition if_cond = condition->GetCondition();
4476 cond_type = condition->InputAt(0)->GetType();
4477 switch (cond_type) {
4478 default:
4479 cond_inverted = MaterializeIntLongCompare(if_cond,
4480 /* is64bit */ false,
4481 cond_locations,
4482 cond_reg);
4483 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004484 case DataType::Type::kInt64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004485 cond_inverted = MaterializeIntLongCompare(if_cond,
4486 /* is64bit */ true,
4487 cond_locations,
4488 cond_reg);
4489 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004490 case DataType::Type::kFloat32:
4491 case DataType::Type::kFloat64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004492 cond_inverted = MaterializeFpCompare(if_cond,
4493 condition->IsGtBias(),
4494 cond_type,
4495 cond_locations,
4496 fcond_reg);
4497 break;
4498 }
4499 }
4500
4501 if (true_src.IsConstant()) {
4502 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
4503 }
4504 if (false_src.IsConstant()) {
4505 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
4506 }
4507
4508 switch (dst_type) {
4509 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004510 if (DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004511 __ Mfc1(cond_reg, fcond_reg);
4512 }
4513 if (true_src.IsConstant()) {
4514 if (cond_inverted) {
4515 __ Selnez(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4516 } else {
4517 __ Seleqz(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4518 }
4519 } else if (false_src.IsConstant()) {
4520 if (cond_inverted) {
4521 __ Seleqz(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4522 } else {
4523 __ Selnez(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4524 }
4525 } else {
4526 DCHECK_NE(cond_reg, AT);
4527 if (cond_inverted) {
4528 __ Seleqz(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4529 __ Selnez(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4530 } else {
4531 __ Selnez(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4532 __ Seleqz(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4533 }
4534 __ Or(dst.AsRegister<GpuRegister>(), AT, TMP);
4535 }
4536 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004537 case DataType::Type::kFloat32: {
4538 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004539 // sel*.fmt tests bit 0 of the condition register, account for that.
4540 __ Sltu(TMP, ZERO, cond_reg);
4541 __ Mtc1(TMP, fcond_reg);
4542 }
4543 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4544 if (true_src.IsConstant()) {
4545 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4546 if (cond_inverted) {
4547 __ SelnezS(dst_reg, src_reg, fcond_reg);
4548 } else {
4549 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4550 }
4551 } else if (false_src.IsConstant()) {
4552 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4553 if (cond_inverted) {
4554 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4555 } else {
4556 __ SelnezS(dst_reg, src_reg, fcond_reg);
4557 }
4558 } else {
4559 if (cond_inverted) {
4560 __ SelS(fcond_reg,
4561 true_src.AsFpuRegister<FpuRegister>(),
4562 false_src.AsFpuRegister<FpuRegister>());
4563 } else {
4564 __ SelS(fcond_reg,
4565 false_src.AsFpuRegister<FpuRegister>(),
4566 true_src.AsFpuRegister<FpuRegister>());
4567 }
4568 __ MovS(dst_reg, fcond_reg);
4569 }
4570 break;
4571 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004572 case DataType::Type::kFloat64: {
4573 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004574 // sel*.fmt tests bit 0 of the condition register, account for that.
4575 __ Sltu(TMP, ZERO, cond_reg);
4576 __ Mtc1(TMP, fcond_reg);
4577 }
4578 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4579 if (true_src.IsConstant()) {
4580 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4581 if (cond_inverted) {
4582 __ SelnezD(dst_reg, src_reg, fcond_reg);
4583 } else {
4584 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4585 }
4586 } else if (false_src.IsConstant()) {
4587 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4588 if (cond_inverted) {
4589 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4590 } else {
4591 __ SelnezD(dst_reg, src_reg, fcond_reg);
4592 }
4593 } else {
4594 if (cond_inverted) {
4595 __ SelD(fcond_reg,
4596 true_src.AsFpuRegister<FpuRegister>(),
4597 false_src.AsFpuRegister<FpuRegister>());
4598 } else {
4599 __ SelD(fcond_reg,
4600 false_src.AsFpuRegister<FpuRegister>(),
4601 true_src.AsFpuRegister<FpuRegister>());
4602 }
4603 __ MovD(dst_reg, fcond_reg);
4604 }
4605 break;
4606 }
4607 }
4608}
4609
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004610void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004611 LocationSummary* locations = new (GetGraph()->GetAllocator())
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004612 LocationSummary(flag, LocationSummary::kNoCall);
4613 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07004614}
4615
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004616void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4617 __ LoadFromOffset(kLoadWord,
4618 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
4619 SP,
4620 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07004621}
4622
David Brazdil74eb1b22015-12-14 11:44:01 +00004623void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004624 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004625 CanMoveConditionally(select, locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00004626}
4627
4628void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004629 if (CanMoveConditionally(select, /* locations_to_set */ nullptr)) {
4630 GenConditionalMove(select);
4631 } else {
4632 LocationSummary* locations = select->GetLocations();
4633 Mips64Label false_target;
4634 GenerateTestAndBranch(select,
4635 /* condition_input_index */ 2,
4636 /* true_target */ nullptr,
4637 &false_target);
4638 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
4639 __ Bind(&false_target);
4640 }
David Brazdil74eb1b22015-12-14 11:44:01 +00004641}
4642
David Srbecky0cf44932015-12-09 14:09:59 +00004643void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004644 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00004645}
4646
David Srbeckyd28f4a02016-03-14 17:14:24 +00004647void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
4648 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00004649}
4650
4651void CodeGeneratorMIPS64::GenerateNop() {
4652 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00004653}
4654
Alexey Frunze4dda3372015-06-01 18:31:49 -07004655void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08004656 const FieldInfo& field_info) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004657 DataType::Type field_type = field_info.GetFieldType();
Alexey Frunze15958152017-02-09 19:08:30 -08004658 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004659 kEmitCompilerReadBarrier && (field_type == DataType::Type::kReference);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004660 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08004661 instruction,
4662 object_field_get_with_read_barrier
4663 ? LocationSummary::kCallOnSlowPath
4664 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004665 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4666 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4667 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004668 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004669 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004670 locations->SetOut(Location::RequiresFpuRegister());
4671 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004672 // The output overlaps in the case of an object field get with
4673 // read barriers enabled: we do not want the move to overwrite the
4674 // object's location, as we need it to emit the read barrier.
4675 locations->SetOut(Location::RequiresRegister(),
4676 object_field_get_with_read_barrier
4677 ? Location::kOutputOverlap
4678 : Location::kNoOutputOverlap);
4679 }
4680 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4681 // We need a temporary register for the read barrier marking slow
4682 // path in CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004683 if (!kBakerReadBarrierThunksEnableForFields) {
4684 locations->AddTemp(Location::RequiresRegister());
4685 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004686 }
4687}
4688
4689void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
4690 const FieldInfo& field_info) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004691 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4692 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004693 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004694 Location obj_loc = locations->InAt(0);
4695 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
4696 Location dst_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004697 LoadOperandType load_type = kLoadUnsignedByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004698 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004699 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004700 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4701
Alexey Frunze4dda3372015-06-01 18:31:49 -07004702 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004703 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004704 case DataType::Type::kUint8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004705 load_type = kLoadUnsignedByte;
4706 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004707 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004708 load_type = kLoadSignedByte;
4709 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004710 case DataType::Type::kUint16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004711 load_type = kLoadUnsignedHalfword;
4712 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004713 case DataType::Type::kInt16:
4714 load_type = kLoadSignedHalfword;
4715 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004716 case DataType::Type::kInt32:
4717 case DataType::Type::kFloat32:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004718 load_type = kLoadWord;
4719 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004720 case DataType::Type::kInt64:
4721 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004722 load_type = kLoadDoubleword;
4723 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004724 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004725 load_type = kLoadUnsignedWord;
4726 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004727 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004728 LOG(FATAL) << "Unreachable type " << type;
4729 UNREACHABLE();
4730 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004731 if (!DataType::IsFloatingPointType(type)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004732 DCHECK(dst_loc.IsRegister());
4733 GpuRegister dst = dst_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004734 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08004735 // /* HeapReference<Object> */ dst = *(obj + offset)
4736 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004737 Location temp_loc =
4738 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08004739 // Note that a potential implicit null check is handled in this
4740 // CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier call.
4741 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4742 dst_loc,
4743 obj,
4744 offset,
4745 temp_loc,
4746 /* needs_null_check */ true);
4747 if (is_volatile) {
4748 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4749 }
4750 } else {
4751 __ LoadFromOffset(kLoadUnsignedWord, dst, obj, offset, null_checker);
4752 if (is_volatile) {
4753 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4754 }
4755 // If read barriers are enabled, emit read barriers other than
4756 // Baker's using a slow path (and also unpoison the loaded
4757 // reference, if heap poisoning is enabled).
4758 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
4759 }
4760 } else {
4761 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
4762 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004763 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004764 DCHECK(dst_loc.IsFpuRegister());
4765 FpuRegister dst = dst_loc.AsFpuRegister<FpuRegister>();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004766 __ LoadFpuFromOffset(load_type, dst, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004767 }
Alexey Frunzec061de12017-02-14 13:27:23 -08004768
Alexey Frunze15958152017-02-09 19:08:30 -08004769 // Memory barriers, in the case of references, are handled in the
4770 // previous switch statement.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004771 if (is_volatile && (type != DataType::Type::kReference)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004772 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Alexey Frunzec061de12017-02-14 13:27:23 -08004773 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004774}
4775
4776void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
4777 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
4778 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004779 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004780 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004781 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004782 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004783 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004784 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004785 }
4786}
4787
4788void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004789 const FieldInfo& field_info,
4790 bool value_can_be_null) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004791 DataType::Type type = field_info.GetFieldType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004792 LocationSummary* locations = instruction->GetLocations();
4793 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004794 Location value_location = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004795 StoreOperandType store_type = kStoreByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004796 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004797 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4798 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004799 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4800
Alexey Frunze4dda3372015-06-01 18:31:49 -07004801 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004802 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004803 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004804 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004805 store_type = kStoreByte;
4806 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004807 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004808 case DataType::Type::kInt16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004809 store_type = kStoreHalfword;
4810 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004811 case DataType::Type::kInt32:
4812 case DataType::Type::kFloat32:
4813 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004814 store_type = kStoreWord;
4815 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004816 case DataType::Type::kInt64:
4817 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004818 store_type = kStoreDoubleword;
4819 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004820 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004821 LOG(FATAL) << "Unreachable type " << type;
4822 UNREACHABLE();
4823 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004824
Alexey Frunze15958152017-02-09 19:08:30 -08004825 if (is_volatile) {
4826 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
4827 }
4828
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004829 if (value_location.IsConstant()) {
4830 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
4831 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
4832 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004833 if (!DataType::IsFloatingPointType(type)) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004834 DCHECK(value_location.IsRegister());
4835 GpuRegister src = value_location.AsRegister<GpuRegister>();
4836 if (kPoisonHeapReferences && needs_write_barrier) {
4837 // Note that in the case where `value` is a null reference,
4838 // we do not enter this block, as a null reference does not
4839 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004840 DCHECK_EQ(type, DataType::Type::kReference);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004841 __ PoisonHeapReference(TMP, src);
4842 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
4843 } else {
4844 __ StoreToOffset(store_type, src, obj, offset, null_checker);
4845 }
4846 } else {
4847 DCHECK(value_location.IsFpuRegister());
4848 FpuRegister src = value_location.AsFpuRegister<FpuRegister>();
4849 __ StoreFpuToOffset(store_type, src, obj, offset, null_checker);
4850 }
4851 }
Alexey Frunze15958152017-02-09 19:08:30 -08004852
Alexey Frunzec061de12017-02-14 13:27:23 -08004853 if (needs_write_barrier) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004854 DCHECK(value_location.IsRegister());
4855 GpuRegister src = value_location.AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004856 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004857 }
Alexey Frunze15958152017-02-09 19:08:30 -08004858
4859 if (is_volatile) {
4860 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
4861 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004862}
4863
4864void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4865 HandleFieldGet(instruction, instruction->GetFieldInfo());
4866}
4867
4868void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4869 HandleFieldGet(instruction, instruction->GetFieldInfo());
4870}
4871
4872void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4873 HandleFieldSet(instruction, instruction->GetFieldInfo());
4874}
4875
4876void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004877 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004878}
4879
Alexey Frunze15958152017-02-09 19:08:30 -08004880void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadOneRegister(
4881 HInstruction* instruction,
4882 Location out,
4883 uint32_t offset,
4884 Location maybe_temp,
4885 ReadBarrierOption read_barrier_option) {
4886 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4887 if (read_barrier_option == kWithReadBarrier) {
4888 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004889 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
4890 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4891 }
Alexey Frunze15958152017-02-09 19:08:30 -08004892 if (kUseBakerReadBarrier) {
4893 // Load with fast path based Baker's read barrier.
4894 // /* HeapReference<Object> */ out = *(out + offset)
4895 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4896 out,
4897 out_reg,
4898 offset,
4899 maybe_temp,
4900 /* needs_null_check */ false);
4901 } else {
4902 // Load with slow path based read barrier.
4903 // Save the value of `out` into `maybe_temp` before overwriting it
4904 // in the following move operation, as we will need it for the
4905 // read barrier below.
4906 __ Move(maybe_temp.AsRegister<GpuRegister>(), out_reg);
4907 // /* HeapReference<Object> */ out = *(out + offset)
4908 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4909 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4910 }
4911 } else {
4912 // Plain load with no read barrier.
4913 // /* HeapReference<Object> */ out = *(out + offset)
4914 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4915 __ MaybeUnpoisonHeapReference(out_reg);
4916 }
4917}
4918
4919void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadTwoRegisters(
4920 HInstruction* instruction,
4921 Location out,
4922 Location obj,
4923 uint32_t offset,
4924 Location maybe_temp,
4925 ReadBarrierOption read_barrier_option) {
4926 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4927 GpuRegister obj_reg = obj.AsRegister<GpuRegister>();
4928 if (read_barrier_option == kWithReadBarrier) {
4929 CHECK(kEmitCompilerReadBarrier);
4930 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004931 if (!kBakerReadBarrierThunksEnableForFields) {
4932 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4933 }
Alexey Frunze15958152017-02-09 19:08:30 -08004934 // Load with fast path based Baker's read barrier.
4935 // /* HeapReference<Object> */ out = *(obj + offset)
4936 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4937 out,
4938 obj_reg,
4939 offset,
4940 maybe_temp,
4941 /* needs_null_check */ false);
4942 } else {
4943 // Load with slow path based read barrier.
4944 // /* HeapReference<Object> */ out = *(obj + offset)
4945 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
4946 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4947 }
4948 } else {
4949 // Plain load with no read barrier.
4950 // /* HeapReference<Object> */ out = *(obj + offset)
4951 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
4952 __ MaybeUnpoisonHeapReference(out_reg);
4953 }
4954}
4955
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004956static inline int GetBakerMarkThunkNumber(GpuRegister reg) {
4957 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 20, "Expecting equal");
4958 if (reg >= V0 && reg <= T2) { // 13 consequtive regs.
4959 return reg - V0;
4960 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
4961 return 13 + (reg - S2);
4962 } else if (reg == S8) { // One more.
4963 return 19;
4964 }
4965 LOG(FATAL) << "Unexpected register " << reg;
4966 UNREACHABLE();
4967}
4968
4969static inline int GetBakerMarkFieldArrayThunkDisplacement(GpuRegister reg, bool short_offset) {
4970 int num = GetBakerMarkThunkNumber(reg) +
4971 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
4972 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
4973}
4974
4975static inline int GetBakerMarkGcRootThunkDisplacement(GpuRegister reg) {
4976 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
4977 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
4978}
4979
4980void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(HInstruction* instruction,
4981 Location root,
4982 GpuRegister obj,
4983 uint32_t offset,
4984 ReadBarrierOption read_barrier_option,
4985 Mips64Label* label_low) {
4986 if (label_low != nullptr) {
4987 DCHECK_EQ(offset, 0x5678u);
4988 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08004989 GpuRegister root_reg = root.AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08004990 if (read_barrier_option == kWithReadBarrier) {
4991 DCHECK(kEmitCompilerReadBarrier);
4992 if (kUseBakerReadBarrier) {
4993 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4994 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004995 if (kBakerReadBarrierThunksEnableForGcRoots) {
4996 // Note that we do not actually check the value of `GetIsGcMarking()`
4997 // to decide whether to mark the loaded GC root or not. Instead, we
4998 // load into `temp` (T9) the read barrier mark introspection entrypoint.
4999 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5000 // vice versa.
5001 //
5002 // We use thunks for the slow path. That thunk checks the reference
5003 // and jumps to the entrypoint if needed.
5004 //
5005 // temp = Thread::Current()->pReadBarrierMarkReg00
5006 // // AKA &art_quick_read_barrier_mark_introspection.
5007 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5008 // if (temp != nullptr) {
5009 // temp = &gc_root_thunk<root_reg>
5010 // root = temp(root)
5011 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005012
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005013 const int32_t entry_point_offset =
5014 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5015 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
5016 int16_t offset_low = Low16Bits(offset);
5017 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
5018 // extension in lwu.
5019 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5020 GpuRegister base = short_offset ? obj : TMP;
5021 // Loading the entrypoint does not require a load acquire since it is only changed when
5022 // threads are suspended or running a checkpoint.
5023 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5024 if (!short_offset) {
5025 DCHECK(!label_low);
5026 __ Daui(base, obj, offset_high);
5027 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07005028 Mips64Label skip_call;
5029 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005030 if (label_low != nullptr) {
5031 DCHECK(short_offset);
5032 __ Bind(label_low);
5033 }
5034 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5035 __ LoadFromOffset(kLoadUnsignedWord, root_reg, base, offset_low); // Single instruction
5036 // in delay slot.
5037 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005038 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005039 } else {
5040 // Note that we do not actually check the value of `GetIsGcMarking()`
5041 // to decide whether to mark the loaded GC root or not. Instead, we
5042 // load into `temp` (T9) the read barrier mark entry point corresponding
5043 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
5044 // is false, and vice versa.
5045 //
5046 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5047 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5048 // if (temp != null) {
5049 // root = temp(root)
5050 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005051
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005052 if (label_low != nullptr) {
5053 __ Bind(label_low);
5054 }
5055 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5056 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5057 static_assert(
5058 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5059 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5060 "have different sizes.");
5061 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5062 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5063 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08005064
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005065 // Slow path marking the GC root `root`.
5066 Location temp = Location::RegisterLocation(T9);
5067 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005068 new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005069 instruction,
5070 root,
5071 /*entrypoint*/ temp);
5072 codegen_->AddSlowPath(slow_path);
5073
5074 const int32_t entry_point_offset =
5075 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(root.reg() - 1);
5076 // Loading the entrypoint does not require a load acquire since it is only changed when
5077 // threads are suspended or running a checkpoint.
5078 __ LoadFromOffset(kLoadDoubleword, temp.AsRegister<GpuRegister>(), TR, entry_point_offset);
5079 __ Bnezc(temp.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
5080 __ Bind(slow_path->GetExitLabel());
5081 }
Alexey Frunze15958152017-02-09 19:08:30 -08005082 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005083 if (label_low != nullptr) {
5084 __ Bind(label_low);
5085 }
Alexey Frunze15958152017-02-09 19:08:30 -08005086 // GC root loaded through a slow path for read barriers other
5087 // than Baker's.
5088 // /* GcRoot<mirror::Object>* */ root = obj + offset
5089 __ Daddiu64(root_reg, obj, static_cast<int32_t>(offset));
5090 // /* mirror::Object* */ root = root->Read()
5091 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5092 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005093 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005094 if (label_low != nullptr) {
5095 __ Bind(label_low);
5096 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005097 // Plain GC root load with no read barrier.
5098 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5099 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5100 // Note that GC roots are not affected by heap poisoning, thus we
5101 // do not have to unpoison `root_reg` here.
5102 }
5103}
5104
Alexey Frunze15958152017-02-09 19:08:30 -08005105void CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5106 Location ref,
5107 GpuRegister obj,
5108 uint32_t offset,
5109 Location temp,
5110 bool needs_null_check) {
5111 DCHECK(kEmitCompilerReadBarrier);
5112 DCHECK(kUseBakerReadBarrier);
5113
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005114 if (kBakerReadBarrierThunksEnableForFields) {
5115 // Note that we do not actually check the value of `GetIsGcMarking()`
5116 // to decide whether to mark the loaded reference or not. Instead, we
5117 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5118 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5119 // vice versa.
5120 //
5121 // We use thunks for the slow path. That thunk checks the reference
5122 // and jumps to the entrypoint if needed. If the holder is not gray,
5123 // it issues a load-load memory barrier and returns to the original
5124 // reference load.
5125 //
5126 // temp = Thread::Current()->pReadBarrierMarkReg00
5127 // // AKA &art_quick_read_barrier_mark_introspection.
5128 // if (temp != nullptr) {
5129 // temp = &field_array_thunk<holder_reg>
5130 // temp()
5131 // }
5132 // not_gray_return_address:
5133 // // If the offset is too large to fit into the lw instruction, we
5134 // // use an adjusted base register (TMP) here. This register
5135 // // receives bits 16 ... 31 of the offset before the thunk invocation
5136 // // and the thunk benefits from it.
5137 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
5138 // gray_return_address:
5139
5140 DCHECK(temp.IsInvalid());
5141 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5142 const int32_t entry_point_offset =
5143 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5144 // There may have or may have not been a null check if the field offset is smaller than
5145 // the page size.
5146 // There must've been a null check in case it's actually a load from an array.
5147 // We will, however, perform an explicit null check in the thunk as it's easier to
5148 // do it than not.
5149 if (instruction->IsArrayGet()) {
5150 DCHECK(!needs_null_check);
5151 }
5152 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
5153 // Loading the entrypoint does not require a load acquire since it is only changed when
5154 // threads are suspended or running a checkpoint.
5155 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5156 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07005157 Mips64Label skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005158 if (short_offset) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07005159 __ Beqzc(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005160 __ Nop(); // In forbidden slot.
5161 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005162 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005163 // /* HeapReference<Object> */ ref = *(obj + offset)
5164 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset); // Single instruction.
5165 } else {
5166 int16_t offset_low = Low16Bits(offset);
5167 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lwu.
Alexey Frunze0cab6562017-07-25 15:19:36 -07005168 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005169 __ Daui(TMP, obj, offset_high); // In delay slot.
5170 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005171 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005172 // /* HeapReference<Object> */ ref = *(obj + offset)
5173 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset_low); // Single instruction.
5174 }
5175 if (needs_null_check) {
5176 MaybeRecordImplicitNullCheck(instruction);
5177 }
5178 __ MaybeUnpoisonHeapReference(ref_reg);
5179 return;
5180 }
5181
Alexey Frunze15958152017-02-09 19:08:30 -08005182 // /* HeapReference<Object> */ ref = *(obj + offset)
5183 Location no_index = Location::NoLocation();
5184 ScaleFactor no_scale_factor = TIMES_1;
5185 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5186 ref,
5187 obj,
5188 offset,
5189 no_index,
5190 no_scale_factor,
5191 temp,
5192 needs_null_check);
5193}
5194
5195void CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5196 Location ref,
5197 GpuRegister obj,
5198 uint32_t data_offset,
5199 Location index,
5200 Location temp,
5201 bool needs_null_check) {
5202 DCHECK(kEmitCompilerReadBarrier);
5203 DCHECK(kUseBakerReadBarrier);
5204
5205 static_assert(
5206 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5207 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005208 ScaleFactor scale_factor = TIMES_4;
5209
5210 if (kBakerReadBarrierThunksEnableForArrays) {
5211 // Note that we do not actually check the value of `GetIsGcMarking()`
5212 // to decide whether to mark the loaded reference or not. Instead, we
5213 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5214 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5215 // vice versa.
5216 //
5217 // We use thunks for the slow path. That thunk checks the reference
5218 // and jumps to the entrypoint if needed. If the holder is not gray,
5219 // it issues a load-load memory barrier and returns to the original
5220 // reference load.
5221 //
5222 // temp = Thread::Current()->pReadBarrierMarkReg00
5223 // // AKA &art_quick_read_barrier_mark_introspection.
5224 // if (temp != nullptr) {
5225 // temp = &field_array_thunk<holder_reg>
5226 // temp()
5227 // }
5228 // not_gray_return_address:
5229 // // The element address is pre-calculated in the TMP register before the
5230 // // thunk invocation and the thunk benefits from it.
5231 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
5232 // gray_return_address:
5233
5234 DCHECK(temp.IsInvalid());
5235 DCHECK(index.IsValid());
5236 const int32_t entry_point_offset =
5237 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5238 // We will not do the explicit null check in the thunk as some form of a null check
5239 // must've been done earlier.
5240 DCHECK(!needs_null_check);
5241 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
5242 // Loading the entrypoint does not require a load acquire since it is only changed when
5243 // threads are suspended or running a checkpoint.
5244 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005245 Mips64Label skip_call;
5246 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005247 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5248 GpuRegister index_reg = index.AsRegister<GpuRegister>();
5249 __ Dlsa(TMP, index_reg, obj, scale_factor); // In delay slot.
5250 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005251 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005252 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
5253 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
5254 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, data_offset); // Single instruction.
5255 __ MaybeUnpoisonHeapReference(ref_reg);
5256 return;
5257 }
5258
Alexey Frunze15958152017-02-09 19:08:30 -08005259 // /* HeapReference<Object> */ ref =
5260 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08005261 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5262 ref,
5263 obj,
5264 data_offset,
5265 index,
5266 scale_factor,
5267 temp,
5268 needs_null_check);
5269}
5270
5271void CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5272 Location ref,
5273 GpuRegister obj,
5274 uint32_t offset,
5275 Location index,
5276 ScaleFactor scale_factor,
5277 Location temp,
5278 bool needs_null_check,
5279 bool always_update_field) {
5280 DCHECK(kEmitCompilerReadBarrier);
5281 DCHECK(kUseBakerReadBarrier);
5282
5283 // In slow path based read barriers, the read barrier call is
5284 // inserted after the original load. However, in fast path based
5285 // Baker's read barriers, we need to perform the load of
5286 // mirror::Object::monitor_ *before* the original reference load.
5287 // This load-load ordering is required by the read barrier.
5288 // The fast path/slow path (for Baker's algorithm) should look like:
5289 //
5290 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5291 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5292 // HeapReference<Object> ref = *src; // Original reference load.
5293 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5294 // if (is_gray) {
5295 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5296 // }
5297 //
5298 // Note: the original implementation in ReadBarrier::Barrier is
5299 // slightly more complex as it performs additional checks that we do
5300 // not do here for performance reasons.
5301
5302 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5303 GpuRegister temp_reg = temp.AsRegister<GpuRegister>();
5304 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5305
5306 // /* int32_t */ monitor = obj->monitor_
5307 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
5308 if (needs_null_check) {
5309 MaybeRecordImplicitNullCheck(instruction);
5310 }
5311 // /* LockWord */ lock_word = LockWord(monitor)
5312 static_assert(sizeof(LockWord) == sizeof(int32_t),
5313 "art::LockWord and int32_t have different sizes.");
5314
5315 __ Sync(0); // Barrier to prevent load-load reordering.
5316
5317 // The actual reference load.
5318 if (index.IsValid()) {
5319 // Load types involving an "index": ArrayGet,
5320 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5321 // intrinsics.
5322 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5323 if (index.IsConstant()) {
5324 size_t computed_offset =
5325 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
5326 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, computed_offset);
5327 } else {
5328 GpuRegister index_reg = index.AsRegister<GpuRegister>();
Chris Larsencd0295d2017-03-31 15:26:54 -07005329 if (scale_factor == TIMES_1) {
5330 __ Daddu(TMP, index_reg, obj);
5331 } else {
5332 __ Dlsa(TMP, index_reg, obj, scale_factor);
5333 }
Alexey Frunze15958152017-02-09 19:08:30 -08005334 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset);
5335 }
5336 } else {
5337 // /* HeapReference<Object> */ ref = *(obj + offset)
5338 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset);
5339 }
5340
5341 // Object* ref = ref_addr->AsMirrorPtr()
5342 __ MaybeUnpoisonHeapReference(ref_reg);
5343
5344 // Slow path marking the object `ref` when it is gray.
5345 SlowPathCodeMIPS64* slow_path;
5346 if (always_update_field) {
5347 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 only supports address
5348 // of the form `obj + field_offset`, where `obj` is a register and
5349 // `field_offset` is a register. Thus `offset` and `scale_factor`
5350 // above are expected to be null in this code path.
5351 DCHECK_EQ(offset, 0u);
5352 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005353 slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005354 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(instruction,
5355 ref,
5356 obj,
5357 /* field_offset */ index,
5358 temp_reg);
5359 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005360 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(instruction, ref);
Alexey Frunze15958152017-02-09 19:08:30 -08005361 }
5362 AddSlowPath(slow_path);
5363
5364 // if (rb_state == ReadBarrier::GrayState())
5365 // ref = ReadBarrier::Mark(ref);
5366 // Given the numeric representation, it's enough to check the low bit of the
5367 // rb_state. We do that by shifting the bit into the sign bit (31) and
5368 // performing a branch on less than zero.
5369 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5370 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
5371 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
5372 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
5373 __ Bltzc(temp_reg, slow_path->GetEntryLabel());
5374 __ Bind(slow_path->GetExitLabel());
5375}
5376
5377void CodeGeneratorMIPS64::GenerateReadBarrierSlow(HInstruction* instruction,
5378 Location out,
5379 Location ref,
5380 Location obj,
5381 uint32_t offset,
5382 Location index) {
5383 DCHECK(kEmitCompilerReadBarrier);
5384
5385 // Insert a slow path based read barrier *after* the reference load.
5386 //
5387 // If heap poisoning is enabled, the unpoisoning of the loaded
5388 // reference will be carried out by the runtime within the slow
5389 // path.
5390 //
5391 // Note that `ref` currently does not get unpoisoned (when heap
5392 // poisoning is enabled), which is alright as the `ref` argument is
5393 // not used by the artReadBarrierSlow entry point.
5394 //
5395 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01005396 SlowPathCodeMIPS64* slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005397 ReadBarrierForHeapReferenceSlowPathMIPS64(instruction, out, ref, obj, offset, index);
5398 AddSlowPath(slow_path);
5399
5400 __ Bc(slow_path->GetEntryLabel());
5401 __ Bind(slow_path->GetExitLabel());
5402}
5403
5404void CodeGeneratorMIPS64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5405 Location out,
5406 Location ref,
5407 Location obj,
5408 uint32_t offset,
5409 Location index) {
5410 if (kEmitCompilerReadBarrier) {
5411 // Baker's read barriers shall be handled by the fast path
5412 // (CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier).
5413 DCHECK(!kUseBakerReadBarrier);
5414 // If heap poisoning is enabled, unpoisoning will be taken care of
5415 // by the runtime within the slow path.
5416 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
5417 } else if (kPoisonHeapReferences) {
5418 __ UnpoisonHeapReference(out.AsRegister<GpuRegister>());
5419 }
5420}
5421
5422void CodeGeneratorMIPS64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5423 Location out,
5424 Location root) {
5425 DCHECK(kEmitCompilerReadBarrier);
5426
5427 // Insert a slow path based read barrier *after* the GC root load.
5428 //
5429 // Note that GC roots are not affected by heap poisoning, so we do
5430 // not need to do anything special for this here.
5431 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005432 new (GetScopedAllocator()) ReadBarrierForRootSlowPathMIPS64(instruction, out, root);
Alexey Frunze15958152017-02-09 19:08:30 -08005433 AddSlowPath(slow_path);
5434
5435 __ Bc(slow_path->GetEntryLabel());
5436 __ Bind(slow_path->GetExitLabel());
5437}
5438
Alexey Frunze4dda3372015-06-01 18:31:49 -07005439void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005440 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5441 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07005442 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005443 switch (type_check_kind) {
5444 case TypeCheckKind::kExactCheck:
5445 case TypeCheckKind::kAbstractClassCheck:
5446 case TypeCheckKind::kClassHierarchyCheck:
5447 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08005448 call_kind =
5449 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005450 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005451 break;
5452 case TypeCheckKind::kArrayCheck:
5453 case TypeCheckKind::kUnresolvedCheck:
5454 case TypeCheckKind::kInterfaceCheck:
5455 call_kind = LocationSummary::kCallOnSlowPath;
5456 break;
5457 }
5458
Vladimir Markoca6fff82017-10-03 14:49:14 +01005459 LocationSummary* locations =
5460 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005461 if (baker_read_barrier_slow_path) {
5462 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5463 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005464 locations->SetInAt(0, Location::RequiresRegister());
5465 locations->SetInAt(1, Location::RequiresRegister());
5466 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005467 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005468 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08005469 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005470}
5471
5472void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005473 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005474 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08005475 Location obj_loc = locations->InAt(0);
5476 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005477 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08005478 Location out_loc = locations->Out();
5479 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5480 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
5481 DCHECK_LE(num_temps, 1u);
5482 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005483 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5484 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5485 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5486 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005487 Mips64Label done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005488 SlowPathCodeMIPS64* slow_path = nullptr;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005489
5490 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005491 // Avoid this check if we know `obj` is not null.
5492 if (instruction->MustDoNullCheck()) {
5493 __ Move(out, ZERO);
5494 __ Beqzc(obj, &done);
5495 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005496
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005497 switch (type_check_kind) {
5498 case TypeCheckKind::kExactCheck: {
5499 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005500 GenerateReferenceLoadTwoRegisters(instruction,
5501 out_loc,
5502 obj_loc,
5503 class_offset,
5504 maybe_temp_loc,
5505 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005506 // Classes must be equal for the instanceof to succeed.
5507 __ Xor(out, out, cls);
5508 __ Sltiu(out, out, 1);
5509 break;
5510 }
5511
5512 case TypeCheckKind::kAbstractClassCheck: {
5513 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005514 GenerateReferenceLoadTwoRegisters(instruction,
5515 out_loc,
5516 obj_loc,
5517 class_offset,
5518 maybe_temp_loc,
5519 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005520 // If the class is abstract, we eagerly fetch the super class of the
5521 // object to avoid doing a comparison we know will fail.
5522 Mips64Label loop;
5523 __ Bind(&loop);
5524 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005525 GenerateReferenceLoadOneRegister(instruction,
5526 out_loc,
5527 super_offset,
5528 maybe_temp_loc,
5529 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005530 // If `out` is null, we use it for the result, and jump to `done`.
5531 __ Beqzc(out, &done);
5532 __ Bnec(out, cls, &loop);
5533 __ LoadConst32(out, 1);
5534 break;
5535 }
5536
5537 case TypeCheckKind::kClassHierarchyCheck: {
5538 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005539 GenerateReferenceLoadTwoRegisters(instruction,
5540 out_loc,
5541 obj_loc,
5542 class_offset,
5543 maybe_temp_loc,
5544 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005545 // Walk over the class hierarchy to find a match.
5546 Mips64Label loop, success;
5547 __ Bind(&loop);
5548 __ Beqc(out, cls, &success);
5549 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005550 GenerateReferenceLoadOneRegister(instruction,
5551 out_loc,
5552 super_offset,
5553 maybe_temp_loc,
5554 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005555 __ Bnezc(out, &loop);
5556 // If `out` is null, we use it for the result, and jump to `done`.
5557 __ Bc(&done);
5558 __ Bind(&success);
5559 __ LoadConst32(out, 1);
5560 break;
5561 }
5562
5563 case TypeCheckKind::kArrayObjectCheck: {
5564 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005565 GenerateReferenceLoadTwoRegisters(instruction,
5566 out_loc,
5567 obj_loc,
5568 class_offset,
5569 maybe_temp_loc,
5570 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005571 // Do an exact check.
5572 Mips64Label success;
5573 __ Beqc(out, cls, &success);
5574 // Otherwise, we need to check that the object's class is a non-primitive array.
5575 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08005576 GenerateReferenceLoadOneRegister(instruction,
5577 out_loc,
5578 component_offset,
5579 maybe_temp_loc,
5580 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005581 // If `out` is null, we use it for the result, and jump to `done`.
5582 __ Beqzc(out, &done);
5583 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
5584 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
5585 __ Sltiu(out, out, 1);
5586 __ Bc(&done);
5587 __ Bind(&success);
5588 __ LoadConst32(out, 1);
5589 break;
5590 }
5591
5592 case TypeCheckKind::kArrayCheck: {
5593 // No read barrier since the slow path will retry upon failure.
5594 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005595 GenerateReferenceLoadTwoRegisters(instruction,
5596 out_loc,
5597 obj_loc,
5598 class_offset,
5599 maybe_temp_loc,
5600 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005601 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005602 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
5603 instruction, /* is_fatal */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005604 codegen_->AddSlowPath(slow_path);
5605 __ Bnec(out, cls, slow_path->GetEntryLabel());
5606 __ LoadConst32(out, 1);
5607 break;
5608 }
5609
5610 case TypeCheckKind::kUnresolvedCheck:
5611 case TypeCheckKind::kInterfaceCheck: {
5612 // Note that we indeed only call on slow path, but we always go
5613 // into the slow path for the unresolved and interface check
5614 // cases.
5615 //
5616 // We cannot directly call the InstanceofNonTrivial runtime
5617 // entry point without resorting to a type checking slow path
5618 // here (i.e. by calling InvokeRuntime directly), as it would
5619 // require to assign fixed registers for the inputs of this
5620 // HInstanceOf instruction (following the runtime calling
5621 // convention), which might be cluttered by the potential first
5622 // read barrier emission at the beginning of this method.
5623 //
5624 // TODO: Introduce a new runtime entry point taking the object
5625 // to test (instead of its class) as argument, and let it deal
5626 // with the read barrier issues. This will let us refactor this
5627 // case of the `switch` code as it was previously (with a direct
5628 // call to the runtime not using a type checking slow path).
5629 // This should also be beneficial for the other cases above.
5630 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005631 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
5632 instruction, /* is_fatal */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005633 codegen_->AddSlowPath(slow_path);
5634 __ Bc(slow_path->GetEntryLabel());
5635 break;
5636 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005637 }
5638
5639 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005640
5641 if (slow_path != nullptr) {
5642 __ Bind(slow_path->GetExitLabel());
5643 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005644}
5645
5646void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005647 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005648 locations->SetOut(Location::ConstantLocation(constant));
5649}
5650
5651void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
5652 // Will be generated at use site.
5653}
5654
5655void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005656 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005657 locations->SetOut(Location::ConstantLocation(constant));
5658}
5659
5660void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
5661 // Will be generated at use site.
5662}
5663
Calin Juravle175dc732015-08-25 15:42:32 +01005664void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5665 // The trampoline uses the same calling convention as dex calling conventions,
5666 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
5667 // the method_idx.
5668 HandleInvoke(invoke);
5669}
5670
5671void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5672 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
5673}
5674
Alexey Frunze4dda3372015-06-01 18:31:49 -07005675void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
5676 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
5677 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
5678}
5679
5680void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5681 HandleInvoke(invoke);
5682 // The register T0 is required to be used for the hidden argument in
5683 // art_quick_imt_conflict_trampoline, so add the hidden argument.
5684 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
5685}
5686
5687void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5688 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
5689 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005690 Location receiver = invoke->GetLocations()->InAt(0);
5691 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005692 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005693
5694 // Set the hidden argument.
5695 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
5696 invoke->GetDexMethodIndex());
5697
5698 // temp = object->GetClass();
5699 if (receiver.IsStackSlot()) {
5700 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
5701 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
5702 } else {
5703 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
5704 }
5705 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005706 // Instead of simply (possibly) unpoisoning `temp` here, we should
5707 // emit a read barrier for the previous class reference load.
5708 // However this is not required in practice, as this is an
5709 // intermediate/temporary reference and because the current
5710 // concurrent copying collector keeps the from-space memory
5711 // intact/accessible until the end of the marking phase (the
5712 // concurrent copying collector may not in the future).
5713 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005714 __ LoadFromOffset(kLoadDoubleword, temp, temp,
5715 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
5716 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005717 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005718 // temp = temp->GetImtEntryAt(method_offset);
5719 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5720 // T9 = temp->GetEntryPoint();
5721 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5722 // T9();
5723 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005724 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005725 DCHECK(!codegen_->IsLeafMethod());
5726 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
5727}
5728
5729void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07005730 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5731 if (intrinsic.TryDispatch(invoke)) {
5732 return;
5733 }
5734
Alexey Frunze4dda3372015-06-01 18:31:49 -07005735 HandleInvoke(invoke);
5736}
5737
5738void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005739 // Explicit clinit checks triggered by static invokes must have been pruned by
5740 // art::PrepareForRegisterAllocation.
5741 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005742
Chris Larsen3039e382015-08-26 07:54:08 -07005743 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5744 if (intrinsic.TryDispatch(invoke)) {
5745 return;
5746 }
5747
Alexey Frunze4dda3372015-06-01 18:31:49 -07005748 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005749}
5750
Orion Hodsonac141392017-01-13 11:53:47 +00005751void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5752 HandleInvoke(invoke);
5753}
5754
5755void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5756 codegen_->GenerateInvokePolymorphicCall(invoke);
5757}
5758
Chris Larsen3039e382015-08-26 07:54:08 -07005759static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005760 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07005761 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
5762 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005763 return true;
5764 }
5765 return false;
5766}
5767
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005768HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08005769 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005770 bool fallback_load = false;
5771 switch (desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005772 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005773 case HLoadString::LoadKind::kBootImageInternTable:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005774 case HLoadString::LoadKind::kBssEntry:
5775 DCHECK(!Runtime::Current()->UseJitCompilation());
5776 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005777 case HLoadString::LoadKind::kJitTableAddress:
5778 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005779 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005780 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005781 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko764d4542017-05-16 10:31:41 +01005782 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005783 }
5784 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005785 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005786 }
5787 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005788}
5789
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005790HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
5791 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005792 bool fallback_load = false;
5793 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005794 case HLoadClass::LoadKind::kInvalid:
5795 LOG(FATAL) << "UNREACHABLE";
5796 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005797 case HLoadClass::LoadKind::kReferrersClass:
5798 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005799 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005800 case HLoadClass::LoadKind::kBootImageClassTable:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005801 case HLoadClass::LoadKind::kBssEntry:
5802 DCHECK(!Runtime::Current()->UseJitCompilation());
5803 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005804 case HLoadClass::LoadKind::kJitTableAddress:
5805 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005806 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005807 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005808 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005809 break;
5810 }
5811 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005812 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005813 }
5814 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005815}
5816
Vladimir Markodc151b22015-10-15 18:02:30 +01005817HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
5818 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01005819 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08005820 // On MIPS64 we support all dispatch types.
5821 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01005822}
5823
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005824void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(
5825 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005826 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00005827 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08005828 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
5829 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
5830
Alexey Frunze19f6c692016-11-30 19:19:55 -08005831 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005832 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00005833 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005834 uint32_t offset =
5835 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00005836 __ LoadFromOffset(kLoadDoubleword,
5837 temp.AsRegister<GpuRegister>(),
5838 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005839 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00005840 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005841 }
Vladimir Marko58155012015-08-19 12:49:41 +00005842 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00005843 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00005844 break;
Vladimir Marko65979462017-05-19 17:25:12 +01005845 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
5846 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005847 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko65979462017-05-19 17:25:12 +01005848 NewPcRelativeMethodPatch(invoke->GetTargetMethod());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005849 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
5850 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
5851 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Vladimir Marko65979462017-05-19 17:25:12 +01005852 __ Daddiu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5853 break;
5854 }
Vladimir Marko58155012015-08-19 12:49:41 +00005855 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005856 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
5857 kLoadDoubleword,
5858 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00005859 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005860 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005861 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005862 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005863 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
5864 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
5865 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunze19f6c692016-11-30 19:19:55 -08005866 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5867 break;
5868 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005869 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
5870 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
5871 return; // No code pointer retrieval; the runtime performs the call directly.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005872 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005873 }
5874
Alexey Frunze19f6c692016-11-30 19:19:55 -08005875 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00005876 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005877 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00005878 break;
Vladimir Marko58155012015-08-19 12:49:41 +00005879 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
5880 // T9 = callee_method->entry_point_from_quick_compiled_code_;
5881 __ LoadFromOffset(kLoadDoubleword,
5882 T9,
5883 callee_method.AsRegister<GpuRegister>(),
5884 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07005885 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00005886 // T9()
5887 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005888 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00005889 break;
5890 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005891 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
5892
Alexey Frunze4dda3372015-06-01 18:31:49 -07005893 DCHECK(!IsLeafMethod());
5894}
5895
5896void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005897 // Explicit clinit checks triggered by static invokes must have been pruned by
5898 // art::PrepareForRegisterAllocation.
5899 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005900
5901 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5902 return;
5903 }
5904
5905 LocationSummary* locations = invoke->GetLocations();
5906 codegen_->GenerateStaticOrDirectCall(invoke,
5907 locations->HasTemps()
5908 ? locations->GetTemp(0)
5909 : Location::NoLocation());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005910}
5911
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005912void CodeGeneratorMIPS64::GenerateVirtualCall(
5913 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005914 // Use the calling convention instead of the location of the receiver, as
5915 // intrinsics may have put the receiver in a different register. In the intrinsics
5916 // slow path, the arguments have been moved to the right place, so here we are
5917 // guaranteed that the receiver is the first register of the calling convention.
5918 InvokeDexCallingConvention calling_convention;
5919 GpuRegister receiver = calling_convention.GetRegisterAt(0);
5920
Alexey Frunze53afca12015-11-05 16:34:23 -08005921 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005922 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5923 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
5924 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005925 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005926
5927 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005928 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08005929 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005930 // Instead of simply (possibly) unpoisoning `temp` here, we should
5931 // emit a read barrier for the previous class reference load.
5932 // However this is not required in practice, as this is an
5933 // intermediate/temporary reference and because the current
5934 // concurrent copying collector keeps the from-space memory
5935 // intact/accessible until the end of the marking phase (the
5936 // concurrent copying collector may not in the future).
5937 __ MaybeUnpoisonHeapReference(temp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005938 // temp = temp->GetMethodAt(method_offset);
5939 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5940 // T9 = temp->GetEntryPoint();
5941 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5942 // T9();
5943 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005944 __ Nop();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005945 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Alexey Frunze53afca12015-11-05 16:34:23 -08005946}
5947
5948void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
5949 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5950 return;
5951 }
5952
5953 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005954 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005955}
5956
5957void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005958 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005959 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005960 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005961 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
5962 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005963 return;
5964 }
Vladimir Marko41559982017-01-06 14:04:23 +00005965 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005966
Alexey Frunze15958152017-02-09 19:08:30 -08005967 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5968 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunzef63f5692016-12-13 17:43:11 -08005969 ? LocationSummary::kCallOnSlowPath
5970 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005971 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005972 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
5973 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5974 }
Vladimir Marko41559982017-01-06 14:04:23 +00005975 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005976 locations->SetInAt(0, Location::RequiresRegister());
5977 }
5978 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005979 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5980 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5981 // Rely on the type resolution or initialization and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005982 // Request a temp to hold the BSS entry location for the slow path.
5983 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005984 RegisterSet caller_saves = RegisterSet::Empty();
5985 InvokeRuntimeCallingConvention calling_convention;
5986 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5987 locations->SetCustomSlowPathCallerSaves(caller_saves);
5988 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005989 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07005990 }
5991 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005992}
5993
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005994// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5995// move.
5996void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005997 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005998 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005999 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006000 return;
6001 }
Vladimir Marko41559982017-01-06 14:04:23 +00006002 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006003
Vladimir Marko41559982017-01-06 14:04:23 +00006004 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006005 Location out_loc = locations->Out();
6006 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6007 GpuRegister current_method_reg = ZERO;
6008 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006009 load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006010 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
6011 }
6012
Alexey Frunze15958152017-02-09 19:08:30 -08006013 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6014 ? kWithoutReadBarrier
6015 : kCompilerReadBarrierOption;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006016 bool generate_null_check = false;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006017 CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high = nullptr;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006018 switch (load_kind) {
6019 case HLoadClass::LoadKind::kReferrersClass:
6020 DCHECK(!cls->CanCallRuntime());
6021 DCHECK(!cls->MustGenerateClinitCheck());
6022 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6023 GenerateGcRootFieldLoad(cls,
6024 out_loc,
6025 current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08006026 ArtMethod::DeclaringClassOffset().Int32Value(),
6027 read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006028 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006029 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006030 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08006031 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006032 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Alexey Frunzef63f5692016-12-13 17:43:11 -08006033 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006034 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6035 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
6036 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006037 __ Daddiu(out, AT, /* placeholder */ 0x5678);
6038 break;
6039 }
6040 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08006041 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006042 uint32_t address = dchecked_integral_cast<uint32_t>(
6043 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
6044 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006045 __ LoadLiteral(out,
6046 kLoadUnsignedWord,
6047 codegen_->DeduplicateBootImageAddressLiteral(address));
6048 break;
6049 }
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006050 case HLoadClass::LoadKind::kBootImageClassTable: {
6051 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6052 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6053 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
6054 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6055 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
6056 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6057 __ Lwu(out, AT, /* placeholder */ 0x5678);
6058 // Extract the reference from the slot data, i.e. clear the hash bits.
6059 int32_t masked_hash = ClassTable::TableSlot::MaskHash(
6060 ComputeModifiedUtf8Hash(cls->GetDexFile().StringByTypeIdx(cls->GetTypeIndex())));
6061 if (masked_hash != 0) {
6062 __ Daddiu(out, out, -masked_hash);
6063 }
6064 break;
6065 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006066 case HLoadClass::LoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006067 bss_info_high = codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
6068 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6069 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
6070 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
6071 GpuRegister temp = non_baker_read_barrier
6072 ? out
6073 : locations->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006074 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high, temp);
6075 GenerateGcRootFieldLoad(cls,
6076 out_loc,
6077 temp,
6078 /* placeholder */ 0x5678,
6079 read_barrier_option,
6080 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006081 generate_null_check = true;
6082 break;
6083 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006084 case HLoadClass::LoadKind::kJitTableAddress:
6085 __ LoadLiteral(out,
6086 kLoadUnsignedWord,
6087 codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
6088 cls->GetTypeIndex(),
6089 cls->GetClass()));
Alexey Frunze15958152017-02-09 19:08:30 -08006090 GenerateGcRootFieldLoad(cls, out_loc, out, 0, read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006091 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006092 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006093 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006094 LOG(FATAL) << "UNREACHABLE";
6095 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006096 }
6097
6098 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6099 DCHECK(cls->CanCallRuntime());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006100 SlowPathCodeMIPS64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006101 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck(), bss_info_high);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006102 codegen_->AddSlowPath(slow_path);
6103 if (generate_null_check) {
6104 __ Beqzc(out, slow_path->GetEntryLabel());
6105 }
6106 if (cls->MustGenerateClinitCheck()) {
6107 GenerateClassInitializationCheck(slow_path, out);
6108 } else {
6109 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006110 }
6111 }
6112}
6113
David Brazdilcb1c0552015-08-04 16:22:25 +01006114static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006115 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01006116}
6117
Alexey Frunze4dda3372015-06-01 18:31:49 -07006118void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
6119 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006120 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006121 locations->SetOut(Location::RequiresRegister());
6122}
6123
6124void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
6125 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01006126 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
6127}
6128
6129void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006130 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006131}
6132
6133void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6134 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006135}
6136
Alexey Frunze4dda3372015-06-01 18:31:49 -07006137void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006138 HLoadString::LoadKind load_kind = load->GetLoadKind();
6139 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006140 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006141 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006142 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006143 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzef63f5692016-12-13 17:43:11 -08006144 } else {
6145 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006146 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6147 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6148 // Rely on the pResolveString and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006149 // Request a temp to hold the BSS entry location for the slow path.
6150 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006151 RegisterSet caller_saves = RegisterSet::Empty();
6152 InvokeRuntimeCallingConvention calling_convention;
6153 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6154 locations->SetCustomSlowPathCallerSaves(caller_saves);
6155 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006156 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006157 }
6158 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08006159 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006160}
6161
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006162// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6163// move.
6164void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006165 HLoadString::LoadKind load_kind = load->GetLoadKind();
6166 LocationSummary* locations = load->GetLocations();
6167 Location out_loc = locations->Out();
6168 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6169
6170 switch (load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006171 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
6172 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006173 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006174 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006175 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6176 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
6177 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006178 __ Daddiu(out, AT, /* placeholder */ 0x5678);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006179 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006180 }
6181 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006182 uint32_t address = dchecked_integral_cast<uint32_t>(
6183 reinterpret_cast<uintptr_t>(load->GetString().Get()));
6184 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006185 __ LoadLiteral(out,
6186 kLoadUnsignedWord,
6187 codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006188 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006189 }
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006190 case HLoadString::LoadKind::kBootImageInternTable: {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006191 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006192 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006193 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006194 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6195 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006196 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6197 __ Lwu(out, AT, /* placeholder */ 0x5678);
6198 return;
6199 }
6200 case HLoadString::LoadKind::kBssEntry: {
6201 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6202 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6203 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
6204 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6205 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006206 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
6207 GpuRegister temp = non_baker_read_barrier
6208 ? out
6209 : locations->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006210 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, temp);
Alexey Frunze15958152017-02-09 19:08:30 -08006211 GenerateGcRootFieldLoad(load,
6212 out_loc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006213 temp,
Alexey Frunze15958152017-02-09 19:08:30 -08006214 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006215 kCompilerReadBarrierOption,
6216 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006217 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006218 new (codegen_->GetScopedAllocator()) LoadStringSlowPathMIPS64(load, info_high);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006219 codegen_->AddSlowPath(slow_path);
6220 __ Beqzc(out, slow_path->GetEntryLabel());
6221 __ Bind(slow_path->GetExitLabel());
6222 return;
6223 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006224 case HLoadString::LoadKind::kJitTableAddress:
6225 __ LoadLiteral(out,
6226 kLoadUnsignedWord,
6227 codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
6228 load->GetStringIndex(),
6229 load->GetString()));
Alexey Frunze15958152017-02-09 19:08:30 -08006230 GenerateGcRootFieldLoad(load, out_loc, out, 0, kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08006231 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006232 default:
6233 break;
6234 }
6235
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006236 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006237 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006238 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006239 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006240 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
6241 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6242 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006243}
6244
Alexey Frunze4dda3372015-06-01 18:31:49 -07006245void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006246 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006247 locations->SetOut(Location::ConstantLocation(constant));
6248}
6249
6250void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
6251 // Will be generated at use site.
6252}
6253
6254void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006255 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6256 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006257 InvokeRuntimeCallingConvention calling_convention;
6258 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6259}
6260
6261void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006262 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07006263 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01006264 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006265 if (instruction->IsEnter()) {
6266 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6267 } else {
6268 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6269 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006270}
6271
6272void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
6273 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006274 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006275 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006276 case DataType::Type::kInt32:
6277 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006278 locations->SetInAt(0, Location::RequiresRegister());
6279 locations->SetInAt(1, Location::RequiresRegister());
6280 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6281 break;
6282
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006283 case DataType::Type::kFloat32:
6284 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006285 locations->SetInAt(0, Location::RequiresFpuRegister());
6286 locations->SetInAt(1, Location::RequiresFpuRegister());
6287 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6288 break;
6289
6290 default:
6291 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
6292 }
6293}
6294
6295void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006296 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006297 LocationSummary* locations = instruction->GetLocations();
6298
6299 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006300 case DataType::Type::kInt32:
6301 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006302 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6303 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6304 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006305 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006306 __ MulR6(dst, lhs, rhs);
6307 else
6308 __ Dmul(dst, lhs, rhs);
6309 break;
6310 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006311 case DataType::Type::kFloat32:
6312 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006313 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6314 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
6315 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006316 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006317 __ MulS(dst, lhs, rhs);
6318 else
6319 __ MulD(dst, lhs, rhs);
6320 break;
6321 }
6322 default:
6323 LOG(FATAL) << "Unexpected mul type " << type;
6324 }
6325}
6326
6327void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
6328 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006329 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006330 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006331 case DataType::Type::kInt32:
6332 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006333 locations->SetInAt(0, Location::RequiresRegister());
6334 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6335 break;
6336
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006337 case DataType::Type::kFloat32:
6338 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006339 locations->SetInAt(0, Location::RequiresFpuRegister());
6340 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6341 break;
6342
6343 default:
6344 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
6345 }
6346}
6347
6348void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006349 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006350 LocationSummary* locations = instruction->GetLocations();
6351
6352 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006353 case DataType::Type::kInt32:
6354 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006355 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6356 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006357 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006358 __ Subu(dst, ZERO, src);
6359 else
6360 __ Dsubu(dst, ZERO, src);
6361 break;
6362 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006363 case DataType::Type::kFloat32:
6364 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006365 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6366 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006367 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006368 __ NegS(dst, src);
6369 else
6370 __ NegD(dst, src);
6371 break;
6372 }
6373 default:
6374 LOG(FATAL) << "Unexpected neg type " << type;
6375 }
6376}
6377
6378void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006379 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6380 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006381 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006382 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006383 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6384 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006385}
6386
6387void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006388 // Note: if heap poisoning is enabled, the entry point takes care
6389 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02006390 QuickEntrypointEnum entrypoint =
6391 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
6392 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006393 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02006394 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006395}
6396
6397void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006398 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6399 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006400 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00006401 if (instruction->IsStringAlloc()) {
6402 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
6403 } else {
6404 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00006405 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006406 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006407}
6408
6409void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006410 // Note: if heap poisoning is enabled, the entry point takes care
6411 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00006412 if (instruction->IsStringAlloc()) {
6413 // String is allocated through StringFactory. Call NewEmptyString entry point.
6414 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02006415 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07006416 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00006417 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
6418 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
6419 __ Jalr(T9);
6420 __ Nop();
6421 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6422 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01006423 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00006424 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00006425 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006426}
6427
6428void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006429 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006430 locations->SetInAt(0, Location::RequiresRegister());
6431 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6432}
6433
6434void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006435 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006436 LocationSummary* locations = instruction->GetLocations();
6437
6438 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006439 case DataType::Type::kInt32:
6440 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006441 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6442 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6443 __ Nor(dst, src, ZERO);
6444 break;
6445 }
6446
6447 default:
6448 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
6449 }
6450}
6451
6452void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006453 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006454 locations->SetInAt(0, Location::RequiresRegister());
6455 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6456}
6457
6458void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6459 LocationSummary* locations = instruction->GetLocations();
6460 __ Xori(locations->Out().AsRegister<GpuRegister>(),
6461 locations->InAt(0).AsRegister<GpuRegister>(),
6462 1);
6463}
6464
6465void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006466 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
6467 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006468}
6469
Calin Juravle2ae48182016-03-16 14:05:09 +00006470void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
6471 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006472 return;
6473 }
6474 Location obj = instruction->GetLocations()->InAt(0);
6475
6476 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00006477 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006478}
6479
Calin Juravle2ae48182016-03-16 14:05:09 +00006480void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006481 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006482 new (GetScopedAllocator()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00006483 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006484
6485 Location obj = instruction->GetLocations()->InAt(0);
6486
6487 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
6488}
6489
6490void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00006491 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006492}
6493
6494void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
6495 HandleBinaryOp(instruction);
6496}
6497
6498void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
6499 HandleBinaryOp(instruction);
6500}
6501
6502void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
6503 LOG(FATAL) << "Unreachable";
6504}
6505
6506void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01006507 if (instruction->GetNext()->IsSuspendCheck() &&
6508 instruction->GetBlock()->GetLoopInformation() != nullptr) {
6509 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
6510 // The back edge will generate the suspend check.
6511 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
6512 }
6513
Alexey Frunze4dda3372015-06-01 18:31:49 -07006514 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6515}
6516
6517void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006518 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006519 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
6520 if (location.IsStackSlot()) {
6521 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6522 } else if (location.IsDoubleStackSlot()) {
6523 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6524 }
6525 locations->SetOut(location);
6526}
6527
6528void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
6529 ATTRIBUTE_UNUSED) {
6530 // Nothing to do, the parameter is already at its location.
6531}
6532
6533void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
6534 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006535 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006536 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
6537}
6538
6539void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
6540 ATTRIBUTE_UNUSED) {
6541 // Nothing to do, the method is already at its location.
6542}
6543
6544void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006545 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01006546 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006547 locations->SetInAt(i, Location::Any());
6548 }
6549 locations->SetOut(Location::Any());
6550}
6551
6552void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
6553 LOG(FATAL) << "Unreachable";
6554}
6555
6556void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006557 DataType::Type type = rem->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006558 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006559 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
6560 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006561 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006562
6563 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006564 case DataType::Type::kInt32:
6565 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006566 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07006567 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006568 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6569 break;
6570
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006571 case DataType::Type::kFloat32:
6572 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006573 InvokeRuntimeCallingConvention calling_convention;
6574 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
6575 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
6576 locations->SetOut(calling_convention.GetReturnLocation(type));
6577 break;
6578 }
6579
6580 default:
6581 LOG(FATAL) << "Unexpected rem type " << type;
6582 }
6583}
6584
6585void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006586 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006587
6588 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006589 case DataType::Type::kInt32:
6590 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07006591 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006592 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006593
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006594 case DataType::Type::kFloat32:
6595 case DataType::Type::kFloat64: {
6596 QuickEntrypointEnum entrypoint =
6597 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescufc734082016-07-19 17:18:07 +01006598 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006599 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00006600 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
6601 } else {
6602 CheckEntrypointTypes<kQuickFmod, double, double, double>();
6603 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006604 break;
6605 }
6606 default:
6607 LOG(FATAL) << "Unexpected rem type " << type;
6608 }
6609}
6610
Igor Murashkind01745e2017-04-05 16:40:31 -07006611void LocationsBuilderMIPS64::VisitConstructorFence(HConstructorFence* constructor_fence) {
6612 constructor_fence->SetLocations(nullptr);
6613}
6614
6615void InstructionCodeGeneratorMIPS64::VisitConstructorFence(
6616 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
6617 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
6618}
6619
Alexey Frunze4dda3372015-06-01 18:31:49 -07006620void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
6621 memory_barrier->SetLocations(nullptr);
6622}
6623
6624void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
6625 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
6626}
6627
6628void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006629 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(ret);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006630 DataType::Type return_type = ret->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006631 locations->SetInAt(0, Mips64ReturnLocation(return_type));
6632}
6633
6634void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
6635 codegen_->GenerateFrameExit();
6636}
6637
6638void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
6639 ret->SetLocations(nullptr);
6640}
6641
6642void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
6643 codegen_->GenerateFrameExit();
6644}
6645
Alexey Frunze92d90602015-12-18 18:16:36 -08006646void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
6647 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00006648}
6649
Alexey Frunze92d90602015-12-18 18:16:36 -08006650void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
6651 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00006652}
6653
Alexey Frunze4dda3372015-06-01 18:31:49 -07006654void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
6655 HandleShift(shl);
6656}
6657
6658void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
6659 HandleShift(shl);
6660}
6661
6662void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
6663 HandleShift(shr);
6664}
6665
6666void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
6667 HandleShift(shr);
6668}
6669
Alexey Frunze4dda3372015-06-01 18:31:49 -07006670void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
6671 HandleBinaryOp(instruction);
6672}
6673
6674void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
6675 HandleBinaryOp(instruction);
6676}
6677
6678void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6679 HandleFieldGet(instruction, instruction->GetFieldInfo());
6680}
6681
6682void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6683 HandleFieldGet(instruction, instruction->GetFieldInfo());
6684}
6685
6686void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
6687 HandleFieldSet(instruction, instruction->GetFieldInfo());
6688}
6689
6690void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01006691 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006692}
6693
Calin Juravlee460d1d2015-09-29 04:52:17 +01006694void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
6695 HUnresolvedInstanceFieldGet* instruction) {
6696 FieldAccessCallingConventionMIPS64 calling_convention;
6697 codegen_->CreateUnresolvedFieldLocationSummary(
6698 instruction, instruction->GetFieldType(), calling_convention);
6699}
6700
6701void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
6702 HUnresolvedInstanceFieldGet* instruction) {
6703 FieldAccessCallingConventionMIPS64 calling_convention;
6704 codegen_->GenerateUnresolvedFieldAccess(instruction,
6705 instruction->GetFieldType(),
6706 instruction->GetFieldIndex(),
6707 instruction->GetDexPc(),
6708 calling_convention);
6709}
6710
6711void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
6712 HUnresolvedInstanceFieldSet* instruction) {
6713 FieldAccessCallingConventionMIPS64 calling_convention;
6714 codegen_->CreateUnresolvedFieldLocationSummary(
6715 instruction, instruction->GetFieldType(), calling_convention);
6716}
6717
6718void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
6719 HUnresolvedInstanceFieldSet* instruction) {
6720 FieldAccessCallingConventionMIPS64 calling_convention;
6721 codegen_->GenerateUnresolvedFieldAccess(instruction,
6722 instruction->GetFieldType(),
6723 instruction->GetFieldIndex(),
6724 instruction->GetDexPc(),
6725 calling_convention);
6726}
6727
6728void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
6729 HUnresolvedStaticFieldGet* instruction) {
6730 FieldAccessCallingConventionMIPS64 calling_convention;
6731 codegen_->CreateUnresolvedFieldLocationSummary(
6732 instruction, instruction->GetFieldType(), calling_convention);
6733}
6734
6735void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
6736 HUnresolvedStaticFieldGet* instruction) {
6737 FieldAccessCallingConventionMIPS64 calling_convention;
6738 codegen_->GenerateUnresolvedFieldAccess(instruction,
6739 instruction->GetFieldType(),
6740 instruction->GetFieldIndex(),
6741 instruction->GetDexPc(),
6742 calling_convention);
6743}
6744
6745void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
6746 HUnresolvedStaticFieldSet* instruction) {
6747 FieldAccessCallingConventionMIPS64 calling_convention;
6748 codegen_->CreateUnresolvedFieldLocationSummary(
6749 instruction, instruction->GetFieldType(), calling_convention);
6750}
6751
6752void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
6753 HUnresolvedStaticFieldSet* instruction) {
6754 FieldAccessCallingConventionMIPS64 calling_convention;
6755 codegen_->GenerateUnresolvedFieldAccess(instruction,
6756 instruction->GetFieldType(),
6757 instruction->GetFieldIndex(),
6758 instruction->GetDexPc(),
6759 calling_convention);
6760}
6761
Alexey Frunze4dda3372015-06-01 18:31:49 -07006762void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006763 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6764 instruction, LocationSummary::kCallOnSlowPath);
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02006765 // In suspend check slow path, usually there are no caller-save registers at all.
6766 // If SIMD instructions are present, however, we force spilling all live SIMD
6767 // registers in full width (since the runtime only saves/restores lower part).
6768 locations->SetCustomSlowPathCallerSaves(
6769 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006770}
6771
6772void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
6773 HBasicBlock* block = instruction->GetBlock();
6774 if (block->GetLoopInformation() != nullptr) {
6775 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
6776 // The back edge will generate the suspend check.
6777 return;
6778 }
6779 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
6780 // The goto will generate the suspend check.
6781 return;
6782 }
6783 GenerateSuspendCheck(instruction, nullptr);
6784}
6785
Alexey Frunze4dda3372015-06-01 18:31:49 -07006786void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006787 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6788 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006789 InvokeRuntimeCallingConvention calling_convention;
6790 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6791}
6792
6793void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006794 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006795 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
6796}
6797
6798void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006799 DataType::Type input_type = conversion->GetInputType();
6800 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006801 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
6802 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006803
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006804 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
6805 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006806 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
6807 }
6808
Vladimir Markoca6fff82017-10-03 14:49:14 +01006809 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(conversion);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006810
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006811 if (DataType::IsFloatingPointType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006812 locations->SetInAt(0, Location::RequiresFpuRegister());
6813 } else {
6814 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006815 }
6816
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006817 if (DataType::IsFloatingPointType(result_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006818 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006819 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006820 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006821 }
6822}
6823
6824void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
6825 LocationSummary* locations = conversion->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006826 DataType::Type result_type = conversion->GetResultType();
6827 DataType::Type input_type = conversion->GetInputType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006828
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006829 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
6830 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006831
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006832 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006833 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6834 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6835
6836 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006837 case DataType::Type::kUint8:
6838 __ Andi(dst, src, 0xFF);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006839 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006840 case DataType::Type::kInt8:
6841 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00006842 // Type conversion from long to types narrower than int is a result of code
6843 // transformations. To avoid unpredictable results for SEB and SEH, we first
6844 // need to sign-extend the low 32-bit value into bits 32 through 63.
6845 __ Sll(dst, src, 0);
6846 __ Seb(dst, dst);
6847 } else {
6848 __ Seb(dst, src);
6849 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006850 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006851 case DataType::Type::kUint16:
6852 __ Andi(dst, src, 0xFFFF);
6853 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006854 case DataType::Type::kInt16:
6855 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00006856 // Type conversion from long to types narrower than int is a result of code
6857 // transformations. To avoid unpredictable results for SEB and SEH, we first
6858 // need to sign-extend the low 32-bit value into bits 32 through 63.
6859 __ Sll(dst, src, 0);
6860 __ Seh(dst, dst);
6861 } else {
6862 __ Seh(dst, src);
6863 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006864 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006865 case DataType::Type::kInt32:
6866 case DataType::Type::kInt64:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01006867 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
6868 // conversions, except when the input and output registers are the same and we are not
6869 // converting longs to shorter types. In these cases, do nothing.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006870 if ((input_type == DataType::Type::kInt64) || (dst != src)) {
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01006871 __ Sll(dst, src, 0);
6872 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006873 break;
6874
6875 default:
6876 LOG(FATAL) << "Unexpected type conversion from " << input_type
6877 << " to " << result_type;
6878 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006879 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006880 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6881 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006882 if (input_type == DataType::Type::kInt64) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006883 __ Dmtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006884 if (result_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006885 __ Cvtsl(dst, FTMP);
6886 } else {
6887 __ Cvtdl(dst, FTMP);
6888 }
6889 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006890 __ Mtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006891 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006892 __ Cvtsw(dst, FTMP);
6893 } else {
6894 __ Cvtdw(dst, FTMP);
6895 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006896 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006897 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
6898 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006899 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6900 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006901
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006902 if (result_type == DataType::Type::kInt64) {
6903 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006904 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006905 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006906 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006907 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006908 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006909 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006910 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006911 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006912 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006913 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006914 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006915 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006916 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006917 } else if (DataType::IsFloatingPointType(result_type) &&
6918 DataType::IsFloatingPointType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006919 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6920 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006921 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006922 __ Cvtsd(dst, src);
6923 } else {
6924 __ Cvtds(dst, src);
6925 }
6926 } else {
6927 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
6928 << " to " << result_type;
6929 }
6930}
6931
6932void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
6933 HandleShift(ushr);
6934}
6935
6936void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
6937 HandleShift(ushr);
6938}
6939
6940void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
6941 HandleBinaryOp(instruction);
6942}
6943
6944void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
6945 HandleBinaryOp(instruction);
6946}
6947
6948void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6949 // Nothing to do, this should be removed during prepare for register allocator.
6950 LOG(FATAL) << "Unreachable";
6951}
6952
6953void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6954 // Nothing to do, this should be removed during prepare for register allocator.
6955 LOG(FATAL) << "Unreachable";
6956}
6957
6958void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006959 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006960}
6961
6962void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006963 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006964}
6965
6966void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006967 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006968}
6969
6970void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006971 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006972}
6973
6974void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006975 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006976}
6977
6978void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006979 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006980}
6981
6982void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006983 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006984}
6985
6986void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006987 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006988}
6989
6990void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006991 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006992}
6993
6994void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006995 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006996}
6997
6998void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006999 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007000}
7001
7002void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007003 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007004}
7005
Aart Bike9f37602015-10-09 11:15:55 -07007006void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007007 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007008}
7009
7010void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007011 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007012}
7013
7014void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007015 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007016}
7017
7018void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007019 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007020}
7021
7022void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007023 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007024}
7025
7026void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007027 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007028}
7029
7030void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007031 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007032}
7033
7034void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007035 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007036}
7037
Mark Mendellfe57faa2015-09-18 09:26:15 -04007038// Simple implementation of packed switch - generate cascaded compare/jumps.
7039void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7040 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007041 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007042 locations->SetInAt(0, Location::RequiresRegister());
7043}
7044
Alexey Frunze0960ac52016-12-20 17:24:59 -08007045void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
7046 int32_t lower_bound,
7047 uint32_t num_entries,
7048 HBasicBlock* switch_block,
7049 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007050 // Create a set of compare/jumps.
7051 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08007052 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007053 // Jump to default if index is negative
7054 // Note: We don't check the case that index is positive while value < lower_bound, because in
7055 // this case, index >= num_entries must be true. So that we can save one branch instruction.
7056 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
7057
Alexey Frunze0960ac52016-12-20 17:24:59 -08007058 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007059 // Jump to successors[0] if value == lower_bound.
7060 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
7061 int32_t last_index = 0;
7062 for (; num_entries - last_index > 2; last_index += 2) {
7063 __ Addiu(temp_reg, temp_reg, -2);
7064 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
7065 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
7066 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
7067 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
7068 }
7069 if (num_entries - last_index == 2) {
7070 // The last missing case_value.
7071 __ Addiu(temp_reg, temp_reg, -1);
7072 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007073 }
7074
7075 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08007076 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07007077 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007078 }
7079}
7080
Alexey Frunze0960ac52016-12-20 17:24:59 -08007081void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
7082 int32_t lower_bound,
7083 uint32_t num_entries,
7084 HBasicBlock* switch_block,
7085 HBasicBlock* default_block) {
7086 // Create a jump table.
7087 std::vector<Mips64Label*> labels(num_entries);
7088 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7089 for (uint32_t i = 0; i < num_entries; i++) {
7090 labels[i] = codegen_->GetLabelOf(successors[i]);
7091 }
7092 JumpTable* table = __ CreateJumpTable(std::move(labels));
7093
7094 // Is the value in range?
7095 __ Addiu32(TMP, value_reg, -lower_bound);
7096 __ LoadConst32(AT, num_entries);
7097 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
7098
7099 // We are in the range of the table.
7100 // Load the target address from the jump table, indexing by the value.
7101 __ LoadLabelAddress(AT, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07007102 __ Dlsa(TMP, TMP, AT, 2);
Alexey Frunze0960ac52016-12-20 17:24:59 -08007103 __ Lw(TMP, TMP, 0);
7104 // Compute the absolute target address by adding the table start address
7105 // (the table contains offsets to targets relative to its start).
7106 __ Daddu(TMP, TMP, AT);
7107 // And jump.
7108 __ Jr(TMP);
7109 __ Nop();
7110}
7111
7112void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7113 int32_t lower_bound = switch_instr->GetStartValue();
7114 uint32_t num_entries = switch_instr->GetNumEntries();
7115 LocationSummary* locations = switch_instr->GetLocations();
7116 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
7117 HBasicBlock* switch_block = switch_instr->GetBlock();
7118 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7119
7120 if (num_entries > kPackedSwitchJumpTableThreshold) {
7121 GenTableBasedPackedSwitch(value_reg,
7122 lower_bound,
7123 num_entries,
7124 switch_block,
7125 default_block);
7126 } else {
7127 GenPackedSwitchWithCompares(value_reg,
7128 lower_bound,
7129 num_entries,
7130 switch_block,
7131 default_block);
7132 }
7133}
7134
Chris Larsenc9905a62017-03-13 17:06:18 -07007135void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7136 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007137 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Chris Larsenc9905a62017-03-13 17:06:18 -07007138 locations->SetInAt(0, Location::RequiresRegister());
7139 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007140}
7141
Chris Larsenc9905a62017-03-13 17:06:18 -07007142void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7143 LocationSummary* locations = instruction->GetLocations();
7144 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
7145 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7146 instruction->GetIndex(), kMips64PointerSize).SizeValue();
7147 __ LoadFromOffset(kLoadDoubleword,
7148 locations->Out().AsRegister<GpuRegister>(),
7149 locations->InAt(0).AsRegister<GpuRegister>(),
7150 method_offset);
7151 } else {
7152 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
7153 instruction->GetIndex(), kMips64PointerSize));
7154 __ LoadFromOffset(kLoadDoubleword,
7155 locations->Out().AsRegister<GpuRegister>(),
7156 locations->InAt(0).AsRegister<GpuRegister>(),
7157 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
7158 __ LoadFromOffset(kLoadDoubleword,
7159 locations->Out().AsRegister<GpuRegister>(),
7160 locations->Out().AsRegister<GpuRegister>(),
7161 method_offset);
7162 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007163}
7164
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007165void LocationsBuilderMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7166 ATTRIBUTE_UNUSED) {
7167 LOG(FATAL) << "Unreachable";
7168}
7169
7170void InstructionCodeGeneratorMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7171 ATTRIBUTE_UNUSED) {
7172 LOG(FATAL) << "Unreachable";
7173}
7174
Alexey Frunze4dda3372015-06-01 18:31:49 -07007175} // namespace mips64
7176} // namespace art