blob: 34720e258af23a3032e215cade1c9a87d6599541 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Zheng Xuc6667102015-05-15 16:08:45 +080020#include "code_generator_utils.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "common_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010022#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080023#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010024#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080025#include "intrinsics.h"
26#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010027#include "mirror/array-inl.h"
28#include "mirror/art_method.h"
29#include "mirror/class.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
36
37using namespace vixl; // NOLINT(build/namespaces)
38
39#ifdef __
40#error "ARM64 Codegen VIXL macro-assembler macro already defined."
41#endif
42
Alexandre Rames5319def2014-10-23 10:03:10 +010043namespace art {
44
45namespace arm64 {
46
Andreas Gampe878d58c2015-01-15 23:24:00 -080047using helpers::CPURegisterFrom;
48using helpers::DRegisterFrom;
49using helpers::FPRegisterFrom;
50using helpers::HeapOperand;
51using helpers::HeapOperandFrom;
52using helpers::InputCPURegisterAt;
53using helpers::InputFPRegisterAt;
54using helpers::InputRegisterAt;
55using helpers::InputOperandAt;
56using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080057using helpers::LocationFrom;
58using helpers::OperandFromMemOperand;
59using helpers::OutputCPURegister;
60using helpers::OutputFPRegister;
61using helpers::OutputRegister;
62using helpers::RegisterFrom;
63using helpers::StackOperandFrom;
64using helpers::VIXLRegCodeFromART;
65using helpers::WRegisterFrom;
66using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000067using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080068using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080069
Alexandre Rames5319def2014-10-23 10:03:10 +010070static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
71static constexpr int kCurrentMethodStackOffset = 0;
72
Alexandre Rames5319def2014-10-23 10:03:10 +010073inline Condition ARM64Condition(IfCondition cond) {
74 switch (cond) {
75 case kCondEQ: return eq;
76 case kCondNE: return ne;
77 case kCondLT: return lt;
78 case kCondLE: return le;
79 case kCondGT: return gt;
80 case kCondGE: return ge;
81 default:
82 LOG(FATAL) << "Unknown if condition";
83 }
84 return nv; // Unreachable.
85}
86
Alexandre Ramesa89086e2014-11-07 17:13:25 +000087Location ARM64ReturnLocation(Primitive::Type return_type) {
88 DCHECK_NE(return_type, Primitive::kPrimVoid);
89 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
90 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
91 // but we use the exact registers for clarity.
92 if (return_type == Primitive::kPrimFloat) {
93 return LocationFrom(s0);
94 } else if (return_type == Primitive::kPrimDouble) {
95 return LocationFrom(d0);
96 } else if (return_type == Primitive::kPrimLong) {
97 return LocationFrom(x0);
98 } else {
99 return LocationFrom(w0);
100 }
101}
102
Alexandre Rames5319def2014-10-23 10:03:10 +0100103Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000104 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100105}
106
Alexandre Rames67555f72014-11-18 10:55:16 +0000107#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
108#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100109
Zheng Xuda403092015-04-24 17:35:39 +0800110// Calculate memory accessing operand for save/restore live registers.
111static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
112 RegisterSet* register_set,
113 int64_t spill_offset,
114 bool is_save) {
115 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
116 codegen->GetNumberOfCoreRegisters(),
117 register_set->GetFloatingPointRegisters(),
118 codegen->GetNumberOfFloatingPointRegisters()));
119
120 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
121 register_set->GetCoreRegisters() & (~callee_saved_core_registers.list()));
122 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
123 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.list()));
124
125 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
126 UseScratchRegisterScope temps(masm);
127
128 Register base = masm->StackPointer();
129 int64_t core_spill_size = core_list.TotalSizeInBytes();
130 int64_t fp_spill_size = fp_list.TotalSizeInBytes();
131 int64_t reg_size = kXRegSizeInBytes;
132 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
133 uint32_t ls_access_size = WhichPowerOf2(reg_size);
134 if (((core_list.Count() > 1) || (fp_list.Count() > 1)) &&
135 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
136 // If the offset does not fit in the instruction's immediate field, use an alternate register
137 // to compute the base address(float point registers spill base address).
138 Register new_base = temps.AcquireSameSizeAs(base);
139 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
140 base = new_base;
141 spill_offset = -core_spill_size;
142 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
143 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
144 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
145 }
146
147 if (is_save) {
148 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
149 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
150 } else {
151 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
152 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
153 }
154}
155
156void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
157 RegisterSet* register_set = locations->GetLiveRegisters();
158 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
159 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
160 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
161 // If the register holds an object, update the stack mask.
162 if (locations->RegisterContainsObject(i)) {
163 locations->SetStackBit(stack_offset / kVRegSize);
164 }
165 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
166 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
167 saved_core_stack_offsets_[i] = stack_offset;
168 stack_offset += kXRegSizeInBytes;
169 }
170 }
171
172 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
173 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
174 register_set->ContainsFloatingPointRegister(i)) {
175 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
176 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
177 saved_fpu_stack_offsets_[i] = stack_offset;
178 stack_offset += kDRegSizeInBytes;
179 }
180 }
181
182 SaveRestoreLiveRegistersHelper(codegen, register_set,
183 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
184}
185
186void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
187 RegisterSet* register_set = locations->GetLiveRegisters();
188 SaveRestoreLiveRegistersHelper(codegen, register_set,
189 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
190}
191
Alexandre Rames5319def2014-10-23 10:03:10 +0100192class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
193 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000194 BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
195 Location index_location,
196 Location length_location)
197 : instruction_(instruction),
198 index_location_(index_location),
199 length_location_(length_location) {}
200
Alexandre Rames5319def2014-10-23 10:03:10 +0100201
Alexandre Rames67555f72014-11-18 10:55:16 +0000202 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000203 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100204 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000205 // We're moving two locations to locations that could overlap, so we need a parallel
206 // move resolver.
207 InvokeRuntimeCallingConvention calling_convention;
208 codegen->EmitParallelMoves(
Nicolas Geoffray90218252015-04-15 11:56:51 +0100209 index_location_, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
210 length_location_, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000211 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000212 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800213 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100214 }
215
216 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000217 HBoundsCheck* const instruction_;
218 const Location index_location_;
219 const Location length_location_;
220
Alexandre Rames5319def2014-10-23 10:03:10 +0100221 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
222};
223
Alexandre Rames67555f72014-11-18 10:55:16 +0000224class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
225 public:
226 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
227
228 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
229 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
230 __ Bind(GetEntryLabel());
231 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000232 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800233 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000234 }
235
236 private:
237 HDivZeroCheck* const instruction_;
238 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
239};
240
241class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
242 public:
243 LoadClassSlowPathARM64(HLoadClass* cls,
244 HInstruction* at,
245 uint32_t dex_pc,
246 bool do_clinit)
247 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
248 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
249 }
250
251 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
252 LocationSummary* locations = at_->GetLocations();
253 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
254
255 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000256 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000257
258 InvokeRuntimeCallingConvention calling_convention;
259 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000260 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
261 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000262 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800263 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100264 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800265 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100266 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800267 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000268
269 // Move the class to the desired location.
270 Location out = locations->Out();
271 if (out.IsValid()) {
272 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
273 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000274 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000275 }
276
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000277 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000278 __ B(GetExitLabel());
279 }
280
281 private:
282 // The class this slow path will load.
283 HLoadClass* const cls_;
284
285 // The instruction where this slow path is happening.
286 // (Might be the load class or an initialization check).
287 HInstruction* const at_;
288
289 // The dex PC of `at_`.
290 const uint32_t dex_pc_;
291
292 // Whether to initialize the class.
293 const bool do_clinit_;
294
295 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
296};
297
298class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
299 public:
300 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
301
302 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
303 LocationSummary* locations = instruction_->GetLocations();
304 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
305 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
306
307 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000308 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000309
310 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800311 __ Mov(calling_convention.GetRegisterAt(0).W(), instruction_->GetStringIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000312 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000313 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100314 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000315 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000316 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000317
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000318 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000319 __ B(GetExitLabel());
320 }
321
322 private:
323 HLoadString* const instruction_;
324
325 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
326};
327
Alexandre Rames5319def2014-10-23 10:03:10 +0100328class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
329 public:
330 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
331
Alexandre Rames67555f72014-11-18 10:55:16 +0000332 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
333 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100334 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000335 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000336 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800337 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100338 }
339
340 private:
341 HNullCheck* const instruction_;
342
343 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
344};
345
346class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
347 public:
348 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
349 HBasicBlock* successor)
350 : instruction_(instruction), successor_(successor) {}
351
Alexandre Rames67555f72014-11-18 10:55:16 +0000352 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
353 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100354 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000355 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000356 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000357 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800358 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000359 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000360 if (successor_ == nullptr) {
361 __ B(GetReturnLabel());
362 } else {
363 __ B(arm64_codegen->GetLabelOf(successor_));
364 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100365 }
366
367 vixl::Label* GetReturnLabel() {
368 DCHECK(successor_ == nullptr);
369 return &return_label_;
370 }
371
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100372 HBasicBlock* GetSuccessor() const {
373 return successor_;
374 }
375
Alexandre Rames5319def2014-10-23 10:03:10 +0100376 private:
377 HSuspendCheck* const instruction_;
378 // If not null, the block to branch to after the suspend check.
379 HBasicBlock* const successor_;
380
381 // If `successor_` is null, the label to branch to after the suspend check.
382 vixl::Label return_label_;
383
384 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
385};
386
Alexandre Rames67555f72014-11-18 10:55:16 +0000387class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
388 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000389 TypeCheckSlowPathARM64(HInstruction* instruction,
390 Location class_to_check,
391 Location object_class,
392 uint32_t dex_pc)
393 : instruction_(instruction),
394 class_to_check_(class_to_check),
395 object_class_(object_class),
396 dex_pc_(dex_pc) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000397
398 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000399 LocationSummary* locations = instruction_->GetLocations();
400 DCHECK(instruction_->IsCheckCast()
401 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
402 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
403
Alexandre Rames67555f72014-11-18 10:55:16 +0000404 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000405 SaveLiveRegisters(codegen, locations);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000406
407 // We're moving two locations to locations that could overlap, so we need a parallel
408 // move resolver.
409 InvokeRuntimeCallingConvention calling_convention;
410 codegen->EmitParallelMoves(
Nicolas Geoffray90218252015-04-15 11:56:51 +0100411 class_to_check_, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
412 object_class_, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000413
414 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000415 arm64_codegen->InvokeRuntime(
416 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_, this);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000417 Primitive::Type ret_type = instruction_->GetType();
418 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
419 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800420 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
421 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000422 } else {
423 DCHECK(instruction_->IsCheckCast());
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000424 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800425 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000426 }
427
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000428 RestoreLiveRegisters(codegen, locations);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000429 __ B(GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000430 }
431
432 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000433 HInstruction* const instruction_;
434 const Location class_to_check_;
435 const Location object_class_;
436 uint32_t dex_pc_;
437
Alexandre Rames67555f72014-11-18 10:55:16 +0000438 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
439};
440
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700441class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
442 public:
443 explicit DeoptimizationSlowPathARM64(HInstruction* instruction)
444 : instruction_(instruction) {}
445
446 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
447 __ Bind(GetEntryLabel());
448 SaveLiveRegisters(codegen, instruction_->GetLocations());
449 DCHECK(instruction_->IsDeoptimize());
450 HDeoptimize* deoptimize = instruction_->AsDeoptimize();
451 uint32_t dex_pc = deoptimize->GetDexPc();
452 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
453 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize), instruction_, dex_pc, this);
454 }
455
456 private:
457 HInstruction* const instruction_;
458 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
459};
460
Alexandre Rames5319def2014-10-23 10:03:10 +0100461#undef __
462
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100463Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100464 Location next_location;
465 if (type == Primitive::kPrimVoid) {
466 LOG(FATAL) << "Unreachable type " << type;
467 }
468
Alexandre Rames542361f2015-01-29 16:57:31 +0000469 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100470 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
471 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000472 } else if (!Primitive::IsFloatingPointType(type) &&
473 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000474 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
475 } else {
476 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000477 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
478 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100479 }
480
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000481 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000482 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100483 return next_location;
484}
485
Serban Constantinescu579885a2015-02-22 20:51:33 +0000486CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
487 const Arm64InstructionSetFeatures& isa_features,
488 const CompilerOptions& compiler_options)
Alexandre Rames5319def2014-10-23 10:03:10 +0100489 : CodeGenerator(graph,
490 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000491 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000492 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000493 callee_saved_core_registers.list(),
494 callee_saved_fp_registers.list(),
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000495 compiler_options),
Alexandre Rames5319def2014-10-23 10:03:10 +0100496 block_labels_(nullptr),
497 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000498 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000499 move_resolver_(graph->GetArena(), this),
500 isa_features_(isa_features) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000501 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000502 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000503}
Alexandre Rames5319def2014-10-23 10:03:10 +0100504
Alexandre Rames67555f72014-11-18 10:55:16 +0000505#undef __
506#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100507
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000508void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
509 // Ensure we emit the literal pool.
510 __ FinalizeCode();
511 CodeGenerator::Finalize(allocator);
512}
513
Zheng Xuad4450e2015-04-17 18:48:56 +0800514void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
515 // Note: There are 6 kinds of moves:
516 // 1. constant -> GPR/FPR (non-cycle)
517 // 2. constant -> stack (non-cycle)
518 // 3. GPR/FPR -> GPR/FPR
519 // 4. GPR/FPR -> stack
520 // 5. stack -> GPR/FPR
521 // 6. stack -> stack (non-cycle)
522 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
523 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
524 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
525 // dependency.
526 vixl_temps_.Open(GetVIXLAssembler());
527}
528
529void ParallelMoveResolverARM64::FinishEmitNativeCode() {
530 vixl_temps_.Close();
531}
532
533Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
534 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
535 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
536 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
537 Location scratch = GetScratchLocation(kind);
538 if (!scratch.Equals(Location::NoLocation())) {
539 return scratch;
540 }
541 // Allocate from VIXL temp registers.
542 if (kind == Location::kRegister) {
543 scratch = LocationFrom(vixl_temps_.AcquireX());
544 } else {
545 DCHECK(kind == Location::kFpuRegister);
546 scratch = LocationFrom(vixl_temps_.AcquireD());
547 }
548 AddScratchLocation(scratch);
549 return scratch;
550}
551
552void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
553 if (loc.IsRegister()) {
554 vixl_temps_.Release(XRegisterFrom(loc));
555 } else {
556 DCHECK(loc.IsFpuRegister());
557 vixl_temps_.Release(DRegisterFrom(loc));
558 }
559 RemoveScratchLocation(loc);
560}
561
Alexandre Rames3e69f162014-12-10 10:36:50 +0000562void ParallelMoveResolverARM64::EmitMove(size_t index) {
563 MoveOperands* move = moves_.Get(index);
564 codegen_->MoveLocation(move->GetDestination(), move->GetSource());
565}
566
Alexandre Rames5319def2014-10-23 10:03:10 +0100567void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100568 MacroAssembler* masm = GetVIXLAssembler();
569 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +0000570 __ Bind(&frame_entry_label_);
571
Serban Constantinescu02164b32014-11-13 14:05:07 +0000572 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
573 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100574 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000575 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000576 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000577 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000578 __ Ldr(wzr, MemOperand(temp, 0));
579 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000580 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100581
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000582 if (!HasEmptyFrame()) {
583 int frame_size = GetFrameSize();
584 // Stack layout:
585 // sp[frame_size - 8] : lr.
586 // ... : other preserved core registers.
587 // ... : other preserved fp registers.
588 // ... : reserved frame space.
589 // sp[0] : current method.
590 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100591 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +0800592 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
593 frame_size - GetCoreSpillSize());
594 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
595 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000596 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100597}
598
599void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100600 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +0100601 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000602 if (!HasEmptyFrame()) {
603 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +0800604 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
605 frame_size - FrameEntrySpillSize());
606 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
607 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000608 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100609 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000610 }
David Srbeckyc34dc932015-04-12 09:27:43 +0100611 __ Ret();
612 GetAssembler()->cfi().RestoreState();
613 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +0100614}
615
Zheng Xuda403092015-04-24 17:35:39 +0800616vixl::CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
617 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
618 return vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize,
619 core_spill_mask_);
620}
621
622vixl::CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
623 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
624 GetNumberOfFloatingPointRegisters()));
625 return vixl::CPURegList(vixl::CPURegister::kFPRegister, vixl::kDRegSize,
626 fpu_spill_mask_);
627}
628
Alexandre Rames5319def2014-10-23 10:03:10 +0100629void CodeGeneratorARM64::Bind(HBasicBlock* block) {
630 __ Bind(GetLabelOf(block));
631}
632
Alexandre Rames5319def2014-10-23 10:03:10 +0100633void CodeGeneratorARM64::Move(HInstruction* instruction,
634 Location location,
635 HInstruction* move_for) {
636 LocationSummary* locations = instruction->GetLocations();
637 if (locations != nullptr && locations->Out().Equals(location)) {
638 return;
639 }
640
641 Primitive::Type type = instruction->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000642 DCHECK_NE(type, Primitive::kPrimVoid);
Alexandre Rames5319def2014-10-23 10:03:10 +0100643
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000644 if (instruction->IsIntConstant()
645 || instruction->IsLongConstant()
646 || instruction->IsNullConstant()) {
647 int64_t value = GetInt64ValueOf(instruction->AsConstant());
Alexandre Rames5319def2014-10-23 10:03:10 +0100648 if (location.IsRegister()) {
649 Register dst = RegisterFrom(location, type);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000650 DCHECK(((instruction->IsIntConstant() || instruction->IsNullConstant()) && dst.Is32Bits()) ||
Alexandre Rames5319def2014-10-23 10:03:10 +0100651 (instruction->IsLongConstant() && dst.Is64Bits()));
652 __ Mov(dst, value);
653 } else {
654 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000655 UseScratchRegisterScope temps(GetVIXLAssembler());
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000656 Register temp = (instruction->IsIntConstant() || instruction->IsNullConstant())
657 ? temps.AcquireW()
658 : temps.AcquireX();
Alexandre Rames5319def2014-10-23 10:03:10 +0100659 __ Mov(temp, value);
660 __ Str(temp, StackOperandFrom(location));
661 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000662 } else if (instruction->IsTemporary()) {
663 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000664 MoveLocation(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100665 } else if (instruction->IsLoadLocal()) {
666 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
Alexandre Rames542361f2015-01-29 16:57:31 +0000667 if (Primitive::Is64BitType(type)) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000668 MoveLocation(location, Location::DoubleStackSlot(stack_slot), type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000669 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000670 MoveLocation(location, Location::StackSlot(stack_slot), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100671 }
672
673 } else {
674 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000675 MoveLocation(location, locations->Out(), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100676 }
677}
678
Alexandre Rames5319def2014-10-23 10:03:10 +0100679Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
680 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000681
Alexandre Rames5319def2014-10-23 10:03:10 +0100682 switch (type) {
683 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000684 case Primitive::kPrimInt:
685 case Primitive::kPrimFloat:
686 return Location::StackSlot(GetStackSlot(load->GetLocal()));
687
688 case Primitive::kPrimLong:
689 case Primitive::kPrimDouble:
690 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
691
Alexandre Rames5319def2014-10-23 10:03:10 +0100692 case Primitive::kPrimBoolean:
693 case Primitive::kPrimByte:
694 case Primitive::kPrimChar:
695 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +0100696 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +0100697 LOG(FATAL) << "Unexpected type " << type;
698 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000699
Alexandre Rames5319def2014-10-23 10:03:10 +0100700 LOG(FATAL) << "Unreachable";
701 return Location::NoLocation();
702}
703
704void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000705 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100706 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000707 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +0100708 vixl::Label done;
709 __ Cbz(value, &done);
710 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
711 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000712 __ Strb(card, MemOperand(card, temp.X()));
Alexandre Rames5319def2014-10-23 10:03:10 +0100713 __ Bind(&done);
714}
715
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000716void CodeGeneratorARM64::SetupBlockedRegisters(bool is_baseline) const {
717 // Blocked core registers:
718 // lr : Runtime reserved.
719 // tr : Runtime reserved.
720 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
721 // ip1 : VIXL core temp.
722 // ip0 : VIXL core temp.
723 //
724 // Blocked fp registers:
725 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +0100726 CPURegList reserved_core_registers = vixl_reserved_core_registers;
727 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +0100728 while (!reserved_core_registers.IsEmpty()) {
729 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
730 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000731
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000732 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +0800733 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000734 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
735 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000736
737 if (is_baseline) {
738 CPURegList reserved_core_baseline_registers = callee_saved_core_registers;
739 while (!reserved_core_baseline_registers.IsEmpty()) {
740 blocked_core_registers_[reserved_core_baseline_registers.PopLowestIndex().code()] = true;
741 }
742
743 CPURegList reserved_fp_baseline_registers = callee_saved_fp_registers;
744 while (!reserved_fp_baseline_registers.IsEmpty()) {
745 blocked_fpu_registers_[reserved_fp_baseline_registers.PopLowestIndex().code()] = true;
746 }
747 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100748}
749
750Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
751 if (type == Primitive::kPrimVoid) {
752 LOG(FATAL) << "Unreachable type " << type;
753 }
754
Alexandre Rames542361f2015-01-29 16:57:31 +0000755 if (Primitive::IsFloatingPointType(type)) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000756 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
757 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100758 return Location::FpuRegisterLocation(reg);
759 } else {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000760 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
761 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100762 return Location::RegisterLocation(reg);
763 }
764}
765
Alexandre Rames3e69f162014-12-10 10:36:50 +0000766size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
767 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
768 __ Str(reg, MemOperand(sp, stack_index));
769 return kArm64WordSize;
770}
771
772size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
773 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
774 __ Ldr(reg, MemOperand(sp, stack_index));
775 return kArm64WordSize;
776}
777
778size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
779 FPRegister reg = FPRegister(reg_id, kDRegSize);
780 __ Str(reg, MemOperand(sp, stack_index));
781 return kArm64WordSize;
782}
783
784size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
785 FPRegister reg = FPRegister(reg_id, kDRegSize);
786 __ Ldr(reg, MemOperand(sp, stack_index));
787 return kArm64WordSize;
788}
789
Alexandre Rames5319def2014-10-23 10:03:10 +0100790void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100791 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +0100792}
793
794void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100795 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +0100796}
797
Alexandre Rames67555f72014-11-18 10:55:16 +0000798void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000799 if (constant->IsIntConstant()) {
800 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
801 } else if (constant->IsLongConstant()) {
802 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
803 } else if (constant->IsNullConstant()) {
804 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +0000805 } else if (constant->IsFloatConstant()) {
806 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
807 } else {
808 DCHECK(constant->IsDoubleConstant());
809 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
810 }
811}
812
Alexandre Rames3e69f162014-12-10 10:36:50 +0000813
814static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
815 DCHECK(constant.IsConstant());
816 HConstant* cst = constant.GetConstant();
817 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000818 // Null is mapped to a core W register, which we associate with kPrimInt.
819 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +0000820 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
821 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
822 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
823}
824
825void CodeGeneratorARM64::MoveLocation(Location destination, Location source, Primitive::Type type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000826 if (source.Equals(destination)) {
827 return;
828 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000829
830 // A valid move can always be inferred from the destination and source
831 // locations. When moving from and to a register, the argument type can be
832 // used to generate 32bit instead of 64bit moves. In debug mode we also
833 // checks the coherency of the locations and the type.
834 bool unspecified_type = (type == Primitive::kPrimVoid);
835
836 if (destination.IsRegister() || destination.IsFpuRegister()) {
837 if (unspecified_type) {
838 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
839 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000840 (src_cst != nullptr && (src_cst->IsIntConstant()
841 || src_cst->IsFloatConstant()
842 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000843 // For stack slots and 32bit constants, a 64bit type is appropriate.
844 type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +0000845 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000846 // If the source is a double stack slot or a 64bit constant, a 64bit
847 // type is appropriate. Else the source is a register, and since the
848 // type has not been specified, we chose a 64bit type to force a 64bit
849 // move.
850 type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +0000851 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000852 }
Alexandre Rames542361f2015-01-29 16:57:31 +0000853 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(type)) ||
854 (destination.IsRegister() && !Primitive::IsFloatingPointType(type)));
Alexandre Rames3e69f162014-12-10 10:36:50 +0000855 CPURegister dst = CPURegisterFrom(destination, type);
856 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
857 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
858 __ Ldr(dst, StackOperandFrom(source));
859 } else if (source.IsConstant()) {
860 DCHECK(CoherentConstantAndType(source, type));
861 MoveConstant(dst, source.GetConstant());
862 } else {
863 if (destination.IsRegister()) {
864 __ Mov(Register(dst), RegisterFrom(source, type));
865 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +0800866 DCHECK(destination.IsFpuRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000867 __ Fmov(FPRegister(dst), FPRegisterFrom(source, type));
868 }
869 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000870 } else { // The destination is not a register. It must be a stack slot.
871 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
872 if (source.IsRegister() || source.IsFpuRegister()) {
873 if (unspecified_type) {
874 if (source.IsRegister()) {
875 type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
876 } else {
877 type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
878 }
879 }
Alexandre Rames542361f2015-01-29 16:57:31 +0000880 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(type)) &&
881 (source.IsFpuRegister() == Primitive::IsFloatingPointType(type)));
Alexandre Rames3e69f162014-12-10 10:36:50 +0000882 __ Str(CPURegisterFrom(source, type), StackOperandFrom(destination));
883 } else if (source.IsConstant()) {
884 DCHECK(unspecified_type || CoherentConstantAndType(source, type));
885 UseScratchRegisterScope temps(GetVIXLAssembler());
886 HConstant* src_cst = source.GetConstant();
887 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000888 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000889 temp = temps.AcquireW();
890 } else if (src_cst->IsLongConstant()) {
891 temp = temps.AcquireX();
892 } else if (src_cst->IsFloatConstant()) {
893 temp = temps.AcquireS();
894 } else {
895 DCHECK(src_cst->IsDoubleConstant());
896 temp = temps.AcquireD();
897 }
898 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000899 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000900 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +0000901 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000902 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000903 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000904 // There is generally less pressure on FP registers.
905 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000906 __ Ldr(temp, StackOperandFrom(source));
907 __ Str(temp, StackOperandFrom(destination));
908 }
909 }
910}
911
912void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000913 CPURegister dst,
914 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000915 switch (type) {
916 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +0000917 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000918 break;
919 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +0000920 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000921 break;
922 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +0000923 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000924 break;
925 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +0000926 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000927 break;
928 case Primitive::kPrimInt:
929 case Primitive::kPrimNot:
930 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000931 case Primitive::kPrimFloat:
932 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +0000933 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +0000934 __ Ldr(dst, src);
935 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000936 case Primitive::kPrimVoid:
937 LOG(FATAL) << "Unreachable type " << type;
938 }
939}
940
Calin Juravle77520bc2015-01-12 18:45:46 +0000941void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000942 CPURegister dst,
943 const MemOperand& src) {
Alexandre Ramesd921d642015-04-16 15:07:16 +0100944 MacroAssembler* masm = GetVIXLAssembler();
945 BlockPoolsScope block_pools(masm);
946 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000947 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +0000948 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000949
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000950 DCHECK(!src.IsPreIndex());
951 DCHECK(!src.IsPostIndex());
952
953 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800954 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000955 MemOperand base = MemOperand(temp_base);
956 switch (type) {
957 case Primitive::kPrimBoolean:
958 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000959 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000960 break;
961 case Primitive::kPrimByte:
962 __ Ldarb(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000963 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000964 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
965 break;
966 case Primitive::kPrimChar:
967 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000968 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000969 break;
970 case Primitive::kPrimShort:
971 __ Ldarh(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000972 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000973 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
974 break;
975 case Primitive::kPrimInt:
976 case Primitive::kPrimNot:
977 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +0000978 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000979 __ Ldar(Register(dst), base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000980 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000981 break;
982 case Primitive::kPrimFloat:
983 case Primitive::kPrimDouble: {
984 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +0000985 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000986
987 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
988 __ Ldar(temp, base);
Calin Juravle77520bc2015-01-12 18:45:46 +0000989 MaybeRecordImplicitNullCheck(instruction);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000990 __ Fmov(FPRegister(dst), temp);
991 break;
992 }
993 case Primitive::kPrimVoid:
994 LOG(FATAL) << "Unreachable type " << type;
995 }
996}
997
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000998void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000999 CPURegister src,
1000 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001001 switch (type) {
1002 case Primitive::kPrimBoolean:
1003 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001004 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001005 break;
1006 case Primitive::kPrimChar:
1007 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001008 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001009 break;
1010 case Primitive::kPrimInt:
1011 case Primitive::kPrimNot:
1012 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001013 case Primitive::kPrimFloat:
1014 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001015 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001016 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001017 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001018 case Primitive::kPrimVoid:
1019 LOG(FATAL) << "Unreachable type " << type;
1020 }
1021}
1022
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001023void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1024 CPURegister src,
1025 const MemOperand& dst) {
1026 UseScratchRegisterScope temps(GetVIXLAssembler());
1027 Register temp_base = temps.AcquireX();
1028
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001029 DCHECK(!dst.IsPreIndex());
1030 DCHECK(!dst.IsPostIndex());
1031
1032 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001033 Operand op = OperandFromMemOperand(dst);
1034 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001035 MemOperand base = MemOperand(temp_base);
1036 switch (type) {
1037 case Primitive::kPrimBoolean:
1038 case Primitive::kPrimByte:
1039 __ Stlrb(Register(src), base);
1040 break;
1041 case Primitive::kPrimChar:
1042 case Primitive::kPrimShort:
1043 __ Stlrh(Register(src), base);
1044 break;
1045 case Primitive::kPrimInt:
1046 case Primitive::kPrimNot:
1047 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001048 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001049 __ Stlr(Register(src), base);
1050 break;
1051 case Primitive::kPrimFloat:
1052 case Primitive::kPrimDouble: {
1053 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001054 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001055
1056 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1057 __ Fmov(temp, FPRegister(src));
1058 __ Stlr(temp, base);
1059 break;
1060 }
1061 case Primitive::kPrimVoid:
1062 LOG(FATAL) << "Unreachable type " << type;
1063 }
1064}
1065
Alexandre Rames67555f72014-11-18 10:55:16 +00001066void CodeGeneratorARM64::LoadCurrentMethod(vixl::Register current_method) {
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001067 DCHECK(RequiresCurrentMethod());
Alexandre Rames67555f72014-11-18 10:55:16 +00001068 DCHECK(current_method.IsW());
1069 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1070}
1071
1072void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1073 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001074 uint32_t dex_pc,
1075 SlowPathCode* slow_path) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001076 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001077 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1078 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001079 RecordPcInfo(instruction, dex_pc, slow_path);
1080 DCHECK(instruction->IsSuspendCheck()
1081 || instruction->IsBoundsCheck()
1082 || instruction->IsNullCheck()
1083 || instruction->IsDivZeroCheck()
1084 || !IsLeafMethod());
Alexandre Rames67555f72014-11-18 10:55:16 +00001085}
1086
1087void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1088 vixl::Register class_reg) {
1089 UseScratchRegisterScope temps(GetVIXLAssembler());
1090 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001091 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
Serban Constantinescu579885a2015-02-22 20:51:33 +00001092 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001093
Serban Constantinescu02164b32014-11-13 14:05:07 +00001094 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu579885a2015-02-22 20:51:33 +00001095 if (use_acquire_release) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001096 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1097 __ Add(temp, class_reg, status_offset);
1098 __ Ldar(temp, HeapOperand(temp));
1099 __ Cmp(temp, mirror::Class::kStatusInitialized);
1100 __ B(lt, slow_path->GetEntryLabel());
1101 } else {
1102 __ Ldr(temp, HeapOperand(class_reg, status_offset));
1103 __ Cmp(temp, mirror::Class::kStatusInitialized);
1104 __ B(lt, slow_path->GetEntryLabel());
1105 __ Dmb(InnerShareable, BarrierReads);
1106 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001107 __ Bind(slow_path->GetExitLabel());
1108}
Alexandre Rames5319def2014-10-23 10:03:10 +01001109
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001110void InstructionCodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
1111 BarrierType type = BarrierAll;
1112
1113 switch (kind) {
1114 case MemBarrierKind::kAnyAny:
1115 case MemBarrierKind::kAnyStore: {
1116 type = BarrierAll;
1117 break;
1118 }
1119 case MemBarrierKind::kLoadAny: {
1120 type = BarrierReads;
1121 break;
1122 }
1123 case MemBarrierKind::kStoreStore: {
1124 type = BarrierWrites;
1125 break;
1126 }
1127 default:
1128 LOG(FATAL) << "Unexpected memory barrier " << kind;
1129 }
1130 __ Dmb(InnerShareable, type);
1131}
1132
Serban Constantinescu02164b32014-11-13 14:05:07 +00001133void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1134 HBasicBlock* successor) {
1135 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001136 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1137 if (slow_path == nullptr) {
1138 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1139 instruction->SetSlowPath(slow_path);
1140 codegen_->AddSlowPath(slow_path);
1141 if (successor != nullptr) {
1142 DCHECK(successor->IsLoopHeader());
1143 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1144 }
1145 } else {
1146 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1147 }
1148
Serban Constantinescu02164b32014-11-13 14:05:07 +00001149 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1150 Register temp = temps.AcquireW();
1151
1152 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1153 if (successor == nullptr) {
1154 __ Cbnz(temp, slow_path->GetEntryLabel());
1155 __ Bind(slow_path->GetReturnLabel());
1156 } else {
1157 __ Cbz(temp, codegen_->GetLabelOf(successor));
1158 __ B(slow_path->GetEntryLabel());
1159 // slow_path will return to GetLabelOf(successor).
1160 }
1161}
1162
Alexandre Rames5319def2014-10-23 10:03:10 +01001163InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1164 CodeGeneratorARM64* codegen)
1165 : HGraphVisitor(graph),
1166 assembler_(codegen->GetAssembler()),
1167 codegen_(codegen) {}
1168
1169#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001170 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001171
1172#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1173
1174enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001175 // Using a base helps identify when we hit such breakpoints.
1176 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001177#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1178 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1179#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1180};
1181
1182#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
1183 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001184 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +01001185 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1186 } \
1187 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1188 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1189 locations->SetOut(Location::Any()); \
1190 }
1191 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1192#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1193
1194#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001195#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001196
Alexandre Rames67555f72014-11-18 10:55:16 +00001197void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001198 DCHECK_EQ(instr->InputCount(), 2U);
1199 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1200 Primitive::Type type = instr->GetResultType();
1201 switch (type) {
1202 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001203 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001204 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001205 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001206 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001207 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001208
1209 case Primitive::kPrimFloat:
1210 case Primitive::kPrimDouble:
1211 locations->SetInAt(0, Location::RequiresFpuRegister());
1212 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001213 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001214 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001215
Alexandre Rames5319def2014-10-23 10:03:10 +01001216 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001217 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001218 }
1219}
1220
Alexandre Rames09a99962015-04-15 11:47:56 +01001221void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
1222 LocationSummary* locations =
1223 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1224 locations->SetInAt(0, Location::RequiresRegister());
1225 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1226 locations->SetOut(Location::RequiresFpuRegister());
1227 } else {
1228 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1229 }
1230}
1231
1232void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1233 const FieldInfo& field_info) {
1234 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001235 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001236
1237 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
1238 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
1239
1240 if (field_info.IsVolatile()) {
1241 if (use_acquire_release) {
1242 // NB: LoadAcquire will record the pc info if needed.
1243 codegen_->LoadAcquire(instruction, OutputCPURegister(instruction), field);
1244 } else {
1245 codegen_->Load(field_info.GetFieldType(), OutputCPURegister(instruction), field);
1246 codegen_->MaybeRecordImplicitNullCheck(instruction);
1247 // For IRIW sequential consistency kLoadAny is not sufficient.
1248 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1249 }
1250 } else {
1251 codegen_->Load(field_info.GetFieldType(), OutputCPURegister(instruction), field);
1252 codegen_->MaybeRecordImplicitNullCheck(instruction);
1253 }
1254}
1255
1256void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1257 LocationSummary* locations =
1258 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1259 locations->SetInAt(0, Location::RequiresRegister());
1260 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1261 locations->SetInAt(1, Location::RequiresFpuRegister());
1262 } else {
1263 locations->SetInAt(1, Location::RequiresRegister());
1264 }
1265}
1266
1267void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
1268 const FieldInfo& field_info) {
1269 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001270 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001271
1272 Register obj = InputRegisterAt(instruction, 0);
1273 CPURegister value = InputCPURegisterAt(instruction, 1);
1274 Offset offset = field_info.GetFieldOffset();
1275 Primitive::Type field_type = field_info.GetFieldType();
1276 bool use_acquire_release = codegen_->GetInstructionSetFeatures().PreferAcquireRelease();
1277
1278 if (field_info.IsVolatile()) {
1279 if (use_acquire_release) {
1280 codegen_->StoreRelease(field_type, value, HeapOperand(obj, offset));
1281 codegen_->MaybeRecordImplicitNullCheck(instruction);
1282 } else {
1283 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
1284 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1285 codegen_->MaybeRecordImplicitNullCheck(instruction);
1286 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1287 }
1288 } else {
1289 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1290 codegen_->MaybeRecordImplicitNullCheck(instruction);
1291 }
1292
1293 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
1294 codegen_->MarkGCCard(obj, Register(value));
1295 }
1296}
1297
Alexandre Rames67555f72014-11-18 10:55:16 +00001298void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001299 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001300
1301 switch (type) {
1302 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001303 case Primitive::kPrimLong: {
1304 Register dst = OutputRegister(instr);
1305 Register lhs = InputRegisterAt(instr, 0);
1306 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001307 if (instr->IsAdd()) {
1308 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001309 } else if (instr->IsAnd()) {
1310 __ And(dst, lhs, rhs);
1311 } else if (instr->IsOr()) {
1312 __ Orr(dst, lhs, rhs);
1313 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001314 __ Sub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001315 } else {
1316 DCHECK(instr->IsXor());
1317 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001318 }
1319 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001320 }
1321 case Primitive::kPrimFloat:
1322 case Primitive::kPrimDouble: {
1323 FPRegister dst = OutputFPRegister(instr);
1324 FPRegister lhs = InputFPRegisterAt(instr, 0);
1325 FPRegister rhs = InputFPRegisterAt(instr, 1);
1326 if (instr->IsAdd()) {
1327 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001328 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001329 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001330 } else {
1331 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001332 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001333 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001334 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001335 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001336 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001337 }
1338}
1339
Serban Constantinescu02164b32014-11-13 14:05:07 +00001340void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1341 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1342
1343 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1344 Primitive::Type type = instr->GetResultType();
1345 switch (type) {
1346 case Primitive::kPrimInt:
1347 case Primitive::kPrimLong: {
1348 locations->SetInAt(0, Location::RequiresRegister());
1349 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1350 locations->SetOut(Location::RequiresRegister());
1351 break;
1352 }
1353 default:
1354 LOG(FATAL) << "Unexpected shift type " << type;
1355 }
1356}
1357
1358void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1359 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1360
1361 Primitive::Type type = instr->GetType();
1362 switch (type) {
1363 case Primitive::kPrimInt:
1364 case Primitive::kPrimLong: {
1365 Register dst = OutputRegister(instr);
1366 Register lhs = InputRegisterAt(instr, 0);
1367 Operand rhs = InputOperandAt(instr, 1);
1368 if (rhs.IsImmediate()) {
1369 uint32_t shift_value = (type == Primitive::kPrimInt)
1370 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1371 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1372 if (instr->IsShl()) {
1373 __ Lsl(dst, lhs, shift_value);
1374 } else if (instr->IsShr()) {
1375 __ Asr(dst, lhs, shift_value);
1376 } else {
1377 __ Lsr(dst, lhs, shift_value);
1378 }
1379 } else {
1380 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1381
1382 if (instr->IsShl()) {
1383 __ Lsl(dst, lhs, rhs_reg);
1384 } else if (instr->IsShr()) {
1385 __ Asr(dst, lhs, rhs_reg);
1386 } else {
1387 __ Lsr(dst, lhs, rhs_reg);
1388 }
1389 }
1390 break;
1391 }
1392 default:
1393 LOG(FATAL) << "Unexpected shift operation type " << type;
1394 }
1395}
1396
Alexandre Rames5319def2014-10-23 10:03:10 +01001397void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001398 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001399}
1400
1401void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001402 HandleBinaryOp(instruction);
1403}
1404
1405void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1406 HandleBinaryOp(instruction);
1407}
1408
1409void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1410 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001411}
1412
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001413void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
1414 LocationSummary* locations =
1415 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1416 locations->SetInAt(0, Location::RequiresRegister());
1417 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01001418 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1419 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1420 } else {
1421 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1422 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001423}
1424
1425void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
1426 LocationSummary* locations = instruction->GetLocations();
1427 Primitive::Type type = instruction->GetType();
1428 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001429 Location index = locations->InAt(1);
1430 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001431 MemOperand source = HeapOperand(obj);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001432 MacroAssembler* masm = GetVIXLAssembler();
1433 UseScratchRegisterScope temps(masm);
1434 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001435
1436 if (index.IsConstant()) {
1437 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001438 source = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001439 } else {
1440 Register temp = temps.AcquireSameSizeAs(obj);
1441 Register index_reg = RegisterFrom(index, Primitive::kPrimInt);
1442 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001443 source = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001444 }
1445
Alexandre Rames67555f72014-11-18 10:55:16 +00001446 codegen_->Load(type, OutputCPURegister(instruction), source);
Calin Juravle77520bc2015-01-12 18:45:46 +00001447 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001448}
1449
Alexandre Rames5319def2014-10-23 10:03:10 +01001450void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
1451 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1452 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001453 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001454}
1455
1456void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001457 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001458 __ Ldr(OutputRegister(instruction),
1459 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
Calin Juravle77520bc2015-01-12 18:45:46 +00001460 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001461}
1462
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001463void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Alexandre Rames97833a02015-04-16 15:07:12 +01001464 if (instruction->NeedsTypeCheck()) {
1465 LocationSummary* locations =
1466 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001467 InvokeRuntimeCallingConvention calling_convention;
1468 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1469 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1470 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1471 } else {
Alexandre Rames97833a02015-04-16 15:07:12 +01001472 LocationSummary* locations =
1473 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001474 locations->SetInAt(0, Location::RequiresRegister());
1475 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01001476 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
1477 locations->SetInAt(2, Location::RequiresFpuRegister());
1478 } else {
1479 locations->SetInAt(2, Location::RequiresRegister());
1480 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001481 }
1482}
1483
1484void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
1485 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01001486 LocationSummary* locations = instruction->GetLocations();
1487 bool needs_runtime_call = locations->WillCall();
1488
1489 if (needs_runtime_call) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001490 codegen_->InvokeRuntime(
1491 QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001492 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001493 } else {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001494 Register obj = InputRegisterAt(instruction, 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001495 CPURegister value = InputCPURegisterAt(instruction, 2);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001496 Location index = locations->InAt(1);
1497 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001498 MemOperand destination = HeapOperand(obj);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001499 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001500 BlockPoolsScope block_pools(masm);
Alexandre Rames97833a02015-04-16 15:07:12 +01001501 {
1502 // We use a block to end the scratch scope before the write barrier, thus
1503 // freeing the temporary registers so they can be used in `MarkGCCard`.
1504 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001505
Alexandre Rames97833a02015-04-16 15:07:12 +01001506 if (index.IsConstant()) {
1507 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
1508 destination = HeapOperand(obj, offset);
1509 } else {
1510 Register temp = temps.AcquireSameSizeAs(obj);
1511 Register index_reg = InputRegisterAt(instruction, 1);
1512 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(value_type)));
1513 destination = HeapOperand(temp, offset);
1514 }
1515
1516 codegen_->Store(value_type, value, destination);
1517 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001518 }
Alexandre Rames97833a02015-04-16 15:07:12 +01001519 if (CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue())) {
1520 codegen_->MarkGCCard(obj, value.W());
1521 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001522 }
1523}
1524
Alexandre Rames67555f72014-11-18 10:55:16 +00001525void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1526 LocationSummary* locations =
1527 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1528 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00001529 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00001530 if (instruction->HasUses()) {
1531 locations->SetOut(Location::SameAsFirstInput());
1532 }
1533}
1534
1535void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001536 LocationSummary* locations = instruction->GetLocations();
1537 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1538 instruction, locations->InAt(0), locations->InAt(1));
Alexandre Rames67555f72014-11-18 10:55:16 +00001539 codegen_->AddSlowPath(slow_path);
1540
1541 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1542 __ B(slow_path->GetEntryLabel(), hs);
1543}
1544
1545void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
1546 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1547 instruction, LocationSummary::kCallOnSlowPath);
1548 locations->SetInAt(0, Location::RequiresRegister());
1549 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001550 locations->AddTemp(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001551}
1552
1553void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001554 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +00001555 Register obj = InputRegisterAt(instruction, 0);;
1556 Register cls = InputRegisterAt(instruction, 1);;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001557 Register obj_cls = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
Alexandre Rames67555f72014-11-18 10:55:16 +00001558
Alexandre Rames3e69f162014-12-10 10:36:50 +00001559 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1560 instruction, locations->InAt(1), LocationFrom(obj_cls), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001561 codegen_->AddSlowPath(slow_path);
1562
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01001563 // Avoid null check if we know obj is not null.
1564 if (instruction->MustDoNullCheck()) {
1565 __ Cbz(obj, slow_path->GetExitLabel());
1566 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001567 // Compare the class of `obj` with `cls`.
Alexandre Rames3e69f162014-12-10 10:36:50 +00001568 __ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
1569 __ Cmp(obj_cls, cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00001570 __ B(ne, slow_path->GetEntryLabel());
1571 __ Bind(slow_path->GetExitLabel());
1572}
1573
1574void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
1575 LocationSummary* locations =
1576 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1577 locations->SetInAt(0, Location::RequiresRegister());
1578 if (check->HasUses()) {
1579 locations->SetOut(Location::SameAsFirstInput());
1580 }
1581}
1582
1583void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
1584 // We assume the class is not null.
1585 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1586 check->GetLoadClass(), check, check->GetDexPc(), true);
1587 codegen_->AddSlowPath(slow_path);
1588 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
1589}
1590
Serban Constantinescu02164b32014-11-13 14:05:07 +00001591void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001592 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00001593 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1594 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001595 switch (in_type) {
1596 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001597 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001598 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001599 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1600 break;
1601 }
1602 case Primitive::kPrimFloat:
1603 case Primitive::kPrimDouble: {
1604 locations->SetInAt(0, Location::RequiresFpuRegister());
Alexandre Rames93415462015-02-17 15:08:20 +00001605 HInstruction* right = compare->InputAt(1);
1606 if ((right->IsFloatConstant() && (right->AsFloatConstant()->GetValue() == 0.0f)) ||
1607 (right->IsDoubleConstant() && (right->AsDoubleConstant()->GetValue() == 0.0))) {
1608 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1609 } else {
1610 locations->SetInAt(1, Location::RequiresFpuRegister());
1611 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001612 locations->SetOut(Location::RequiresRegister());
1613 break;
1614 }
1615 default:
1616 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1617 }
1618}
1619
1620void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
1621 Primitive::Type in_type = compare->InputAt(0)->GetType();
1622
1623 // 0 if: left == right
1624 // 1 if: left > right
1625 // -1 if: left < right
1626 switch (in_type) {
1627 case Primitive::kPrimLong: {
1628 Register result = OutputRegister(compare);
1629 Register left = InputRegisterAt(compare, 0);
1630 Operand right = InputOperandAt(compare, 1);
1631
1632 __ Cmp(left, right);
1633 __ Cset(result, ne);
1634 __ Cneg(result, result, lt);
1635 break;
1636 }
1637 case Primitive::kPrimFloat:
1638 case Primitive::kPrimDouble: {
1639 Register result = OutputRegister(compare);
1640 FPRegister left = InputFPRegisterAt(compare, 0);
Alexandre Rames93415462015-02-17 15:08:20 +00001641 if (compare->GetLocations()->InAt(1).IsConstant()) {
1642 if (kIsDebugBuild) {
1643 HInstruction* right = compare->GetLocations()->InAt(1).GetConstant();
1644 DCHECK((right->IsFloatConstant() && (right->AsFloatConstant()->GetValue() == 0.0f)) ||
1645 (right->IsDoubleConstant() && (right->AsDoubleConstant()->GetValue() == 0.0)));
1646 }
1647 // 0.0 is the only immediate that can be encoded directly in a FCMP instruction.
1648 __ Fcmp(left, 0.0);
1649 } else {
1650 __ Fcmp(left, InputFPRegisterAt(compare, 1));
1651 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001652 if (compare->IsGtBias()) {
1653 __ Cset(result, ne);
1654 } else {
1655 __ Csetm(result, ne);
1656 }
1657 __ Cneg(result, result, compare->IsGtBias() ? mi : gt);
Alexandre Rames5319def2014-10-23 10:03:10 +01001658 break;
1659 }
1660 default:
1661 LOG(FATAL) << "Unimplemented compare type " << in_type;
1662 }
1663}
1664
1665void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
1666 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1667 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001668 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames5319def2014-10-23 10:03:10 +01001669 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001670 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001671 }
1672}
1673
1674void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
1675 if (!instruction->NeedsMaterialization()) {
1676 return;
1677 }
1678
1679 LocationSummary* locations = instruction->GetLocations();
1680 Register lhs = InputRegisterAt(instruction, 0);
1681 Operand rhs = InputOperandAt(instruction, 1);
1682 Register res = RegisterFrom(locations->Out(), instruction->GetType());
1683 Condition cond = ARM64Condition(instruction->GetCondition());
1684
1685 __ Cmp(lhs, rhs);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001686 __ Cset(res, cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001687}
1688
1689#define FOR_EACH_CONDITION_INSTRUCTION(M) \
1690 M(Equal) \
1691 M(NotEqual) \
1692 M(LessThan) \
1693 M(LessThanOrEqual) \
1694 M(GreaterThan) \
1695 M(GreaterThanOrEqual)
1696#define DEFINE_CONDITION_VISITORS(Name) \
1697void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
1698void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
1699FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00001700#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01001701#undef FOR_EACH_CONDITION_INSTRUCTION
1702
Zheng Xuc6667102015-05-15 16:08:45 +08001703void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
1704 DCHECK(instruction->IsDiv() || instruction->IsRem());
1705
1706 LocationSummary* locations = instruction->GetLocations();
1707 Location second = locations->InAt(1);
1708 DCHECK(second.IsConstant());
1709
1710 Register out = OutputRegister(instruction);
1711 Register dividend = InputRegisterAt(instruction, 0);
1712 int64_t imm = Int64FromConstant(second.GetConstant());
1713 DCHECK(imm == 1 || imm == -1);
1714
1715 if (instruction->IsRem()) {
1716 __ Mov(out, 0);
1717 } else {
1718 if (imm == 1) {
1719 __ Mov(out, dividend);
1720 } else {
1721 __ Neg(out, dividend);
1722 }
1723 }
1724}
1725
1726void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
1727 DCHECK(instruction->IsDiv() || instruction->IsRem());
1728
1729 LocationSummary* locations = instruction->GetLocations();
1730 Location second = locations->InAt(1);
1731 DCHECK(second.IsConstant());
1732
1733 Register out = OutputRegister(instruction);
1734 Register dividend = InputRegisterAt(instruction, 0);
1735 int64_t imm = Int64FromConstant(second.GetConstant());
1736 int64_t abs_imm = std::abs(imm);
1737 DCHECK(IsPowerOfTwo(abs_imm));
1738 int ctz_imm = CTZ(abs_imm);
1739
1740 UseScratchRegisterScope temps(GetVIXLAssembler());
1741 Register temp = temps.AcquireSameSizeAs(out);
1742
1743 if (instruction->IsDiv()) {
1744 __ Add(temp, dividend, abs_imm - 1);
1745 __ Cmp(dividend, 0);
1746 __ Csel(out, temp, dividend, lt);
1747 if (imm > 0) {
1748 __ Asr(out, out, ctz_imm);
1749 } else {
1750 __ Neg(out, Operand(out, ASR, ctz_imm));
1751 }
1752 } else {
1753 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
1754 __ Asr(temp, dividend, bits - 1);
1755 __ Lsr(temp, temp, bits - ctz_imm);
1756 __ Add(out, dividend, temp);
1757 __ And(out, out, abs_imm - 1);
1758 __ Sub(out, out, temp);
1759 }
1760}
1761
1762void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
1763 DCHECK(instruction->IsDiv() || instruction->IsRem());
1764
1765 LocationSummary* locations = instruction->GetLocations();
1766 Location second = locations->InAt(1);
1767 DCHECK(second.IsConstant());
1768
1769 Register out = OutputRegister(instruction);
1770 Register dividend = InputRegisterAt(instruction, 0);
1771 int64_t imm = Int64FromConstant(second.GetConstant());
1772
1773 Primitive::Type type = instruction->GetResultType();
1774 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1775
1776 int64_t magic;
1777 int shift;
1778 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
1779
1780 UseScratchRegisterScope temps(GetVIXLAssembler());
1781 Register temp = temps.AcquireSameSizeAs(out);
1782
1783 // temp = get_high(dividend * magic)
1784 __ Mov(temp, magic);
1785 if (type == Primitive::kPrimLong) {
1786 __ Smulh(temp, dividend, temp);
1787 } else {
1788 __ Smull(temp.X(), dividend, temp);
1789 __ Lsr(temp.X(), temp.X(), 32);
1790 }
1791
1792 if (imm > 0 && magic < 0) {
1793 __ Add(temp, temp, dividend);
1794 } else if (imm < 0 && magic > 0) {
1795 __ Sub(temp, temp, dividend);
1796 }
1797
1798 if (shift != 0) {
1799 __ Asr(temp, temp, shift);
1800 }
1801
1802 if (instruction->IsDiv()) {
1803 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
1804 } else {
1805 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
1806 // TODO: Strength reduction for msub.
1807 Register temp_imm = temps.AcquireSameSizeAs(out);
1808 __ Mov(temp_imm, imm);
1809 __ Msub(out, temp, temp_imm, dividend);
1810 }
1811}
1812
1813void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
1814 DCHECK(instruction->IsDiv() || instruction->IsRem());
1815 Primitive::Type type = instruction->GetResultType();
1816 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
1817
1818 LocationSummary* locations = instruction->GetLocations();
1819 Register out = OutputRegister(instruction);
1820 Location second = locations->InAt(1);
1821
1822 if (second.IsConstant()) {
1823 int64_t imm = Int64FromConstant(second.GetConstant());
1824
1825 if (imm == 0) {
1826 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
1827 } else if (imm == 1 || imm == -1) {
1828 DivRemOneOrMinusOne(instruction);
1829 } else if (IsPowerOfTwo(std::abs(imm))) {
1830 DivRemByPowerOfTwo(instruction);
1831 } else {
1832 DCHECK(imm <= -2 || imm >= 2);
1833 GenerateDivRemWithAnyConstant(instruction);
1834 }
1835 } else {
1836 Register dividend = InputRegisterAt(instruction, 0);
1837 Register divisor = InputRegisterAt(instruction, 1);
1838 if (instruction->IsDiv()) {
1839 __ Sdiv(out, dividend, divisor);
1840 } else {
1841 UseScratchRegisterScope temps(GetVIXLAssembler());
1842 Register temp = temps.AcquireSameSizeAs(out);
1843 __ Sdiv(temp, dividend, divisor);
1844 __ Msub(out, temp, divisor, dividend);
1845 }
1846 }
1847}
1848
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001849void LocationsBuilderARM64::VisitDiv(HDiv* div) {
1850 LocationSummary* locations =
1851 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1852 switch (div->GetResultType()) {
1853 case Primitive::kPrimInt:
1854 case Primitive::kPrimLong:
1855 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08001856 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001857 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1858 break;
1859
1860 case Primitive::kPrimFloat:
1861 case Primitive::kPrimDouble:
1862 locations->SetInAt(0, Location::RequiresFpuRegister());
1863 locations->SetInAt(1, Location::RequiresFpuRegister());
1864 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1865 break;
1866
1867 default:
1868 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1869 }
1870}
1871
1872void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
1873 Primitive::Type type = div->GetResultType();
1874 switch (type) {
1875 case Primitive::kPrimInt:
1876 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08001877 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001878 break;
1879
1880 case Primitive::kPrimFloat:
1881 case Primitive::kPrimDouble:
1882 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
1883 break;
1884
1885 default:
1886 LOG(FATAL) << "Unexpected div type " << type;
1887 }
1888}
1889
Alexandre Rames67555f72014-11-18 10:55:16 +00001890void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1891 LocationSummary* locations =
1892 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1893 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1894 if (instruction->HasUses()) {
1895 locations->SetOut(Location::SameAsFirstInput());
1896 }
1897}
1898
1899void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1900 SlowPathCodeARM64* slow_path =
1901 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
1902 codegen_->AddSlowPath(slow_path);
1903 Location value = instruction->GetLocations()->InAt(0);
1904
Alexandre Rames3e69f162014-12-10 10:36:50 +00001905 Primitive::Type type = instruction->GetType();
1906
1907 if ((type != Primitive::kPrimInt) && (type != Primitive::kPrimLong)) {
1908 LOG(FATAL) << "Unexpected type " << type << "for DivZeroCheck.";
1909 return;
1910 }
1911
Alexandre Rames67555f72014-11-18 10:55:16 +00001912 if (value.IsConstant()) {
1913 int64_t divisor = Int64ConstantFrom(value);
1914 if (divisor == 0) {
1915 __ B(slow_path->GetEntryLabel());
1916 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001917 // A division by a non-null constant is valid. We don't need to perform
1918 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00001919 }
1920 } else {
1921 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
1922 }
1923}
1924
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001925void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1926 LocationSummary* locations =
1927 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1928 locations->SetOut(Location::ConstantLocation(constant));
1929}
1930
1931void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1932 UNUSED(constant);
1933 // Will be generated at use site.
1934}
1935
Alexandre Rames5319def2014-10-23 10:03:10 +01001936void LocationsBuilderARM64::VisitExit(HExit* exit) {
1937 exit->SetLocations(nullptr);
1938}
1939
1940void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001941 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +01001942}
1943
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001944void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
1945 LocationSummary* locations =
1946 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1947 locations->SetOut(Location::ConstantLocation(constant));
1948}
1949
1950void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
1951 UNUSED(constant);
1952 // Will be generated at use site.
1953}
1954
Alexandre Rames5319def2014-10-23 10:03:10 +01001955void LocationsBuilderARM64::VisitGoto(HGoto* got) {
1956 got->SetLocations(nullptr);
1957}
1958
1959void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
1960 HBasicBlock* successor = got->GetSuccessor();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001961 DCHECK(!successor->IsExitBlock());
1962 HBasicBlock* block = got->GetBlock();
1963 HInstruction* previous = got->GetPrevious();
1964 HLoopInformation* info = block->GetLoopInformation();
1965
David Brazdil46e2a392015-03-16 17:31:52 +00001966 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001967 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
1968 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1969 return;
1970 }
1971 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1972 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1973 }
1974 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001975 __ B(codegen_->GetLabelOf(successor));
1976 }
1977}
1978
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001979void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
1980 vixl::Label* true_target,
1981 vixl::Label* false_target,
1982 vixl::Label* always_true_target) {
1983 HInstruction* cond = instruction->InputAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001984 HCondition* condition = cond->AsCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01001985
Serban Constantinescu02164b32014-11-13 14:05:07 +00001986 if (cond->IsIntConstant()) {
1987 int32_t cond_value = cond->AsIntConstant()->GetValue();
1988 if (cond_value == 1) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001989 if (always_true_target != nullptr) {
1990 __ B(always_true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001991 }
1992 return;
1993 } else {
1994 DCHECK_EQ(cond_value, 0);
1995 }
1996 } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001997 // The condition instruction has been materialized, compare the output to 0.
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001998 Location cond_val = instruction->GetLocations()->InAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001999 DCHECK(cond_val.IsRegister());
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002000 __ Cbnz(InputRegisterAt(instruction, 0), true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01002001 } else {
2002 // The condition instruction has not been materialized, use its inputs as
2003 // the comparison and its condition as the branch condition.
2004 Register lhs = InputRegisterAt(condition, 0);
2005 Operand rhs = InputOperandAt(condition, 1);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08002006 Condition arm64_cond = ARM64Condition(condition->GetCondition());
Alexandre Rames4388dcc2015-02-03 10:28:33 +00002007 if ((arm64_cond != gt && arm64_cond != le) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
2008 switch (arm64_cond) {
2009 case eq:
2010 __ Cbz(lhs, true_target);
2011 break;
2012 case ne:
2013 __ Cbnz(lhs, true_target);
2014 break;
2015 case lt:
2016 // Test the sign bit and branch accordingly.
2017 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, true_target);
2018 break;
2019 case ge:
2020 // Test the sign bit and branch accordingly.
2021 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, true_target);
2022 break;
2023 default:
2024 // Without the `static_cast` the compiler throws an error for
2025 // `-Werror=sign-promo`.
2026 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01002027 }
2028 } else {
2029 __ Cmp(lhs, rhs);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08002030 __ B(arm64_cond, true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01002031 }
2032 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002033 if (false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002034 __ B(false_target);
2035 }
2036}
2037
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002038void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2039 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
2040 HInstruction* cond = if_instr->InputAt(0);
2041 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
2042 locations->SetInAt(0, Location::RequiresRegister());
2043 }
2044}
2045
2046void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
2047 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
2048 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
2049 vixl::Label* always_true_target = true_target;
2050 if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
2051 if_instr->IfTrueSuccessor())) {
2052 always_true_target = nullptr;
2053 }
2054 if (codegen_->GoesToNextBlock(if_instr->GetBlock(),
2055 if_instr->IfFalseSuccessor())) {
2056 false_target = nullptr;
2057 }
2058 GenerateTestAndBranch(if_instr, true_target, false_target, always_true_target);
2059}
2060
2061void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2062 LocationSummary* locations = new (GetGraph()->GetArena())
2063 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
2064 HInstruction* cond = deoptimize->InputAt(0);
2065 DCHECK(cond->IsCondition());
2066 if (cond->AsCondition()->NeedsMaterialization()) {
2067 locations->SetInAt(0, Location::RequiresRegister());
2068 }
2069}
2070
2071void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2072 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
2073 DeoptimizationSlowPathARM64(deoptimize);
2074 codegen_->AddSlowPath(slow_path);
2075 vixl::Label* slow_path_entry = slow_path->GetEntryLabel();
2076 GenerateTestAndBranch(deoptimize, slow_path_entry, nullptr, slow_path_entry);
2077}
2078
Alexandre Rames5319def2014-10-23 10:03:10 +01002079void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002080 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002081}
2082
2083void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002084 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01002085}
2086
2087void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002088 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002089}
2090
2091void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002092 HandleFieldSet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01002093}
2094
Alexandre Rames67555f72014-11-18 10:55:16 +00002095void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
2096 LocationSummary::CallKind call_kind =
2097 instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
2098 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2099 locations->SetInAt(0, Location::RequiresRegister());
2100 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray6c2dff82015-01-21 14:56:54 +00002101 // The output does overlap inputs.
2102 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexandre Rames67555f72014-11-18 10:55:16 +00002103}
2104
2105void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
2106 LocationSummary* locations = instruction->GetLocations();
2107 Register obj = InputRegisterAt(instruction, 0);;
2108 Register cls = InputRegisterAt(instruction, 1);;
2109 Register out = OutputRegister(instruction);
2110
2111 vixl::Label done;
2112
2113 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01002114 // Avoid null check if we know `obj` is not null.
2115 if (instruction->MustDoNullCheck()) {
2116 __ Mov(out, 0);
2117 __ Cbz(obj, &done);
2118 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002119
2120 // Compare the class of `obj` with `cls`.
Serban Constantinescu02164b32014-11-13 14:05:07 +00002121 __ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
Alexandre Rames67555f72014-11-18 10:55:16 +00002122 __ Cmp(out, cls);
2123 if (instruction->IsClassFinal()) {
2124 // Classes must be equal for the instanceof to succeed.
2125 __ Cset(out, eq);
2126 } else {
2127 // If the classes are not equal, we go into a slow path.
2128 DCHECK(locations->OnlyCallsOnSlowPath());
2129 SlowPathCodeARM64* slow_path =
Alexandre Rames3e69f162014-12-10 10:36:50 +00002130 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
2131 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00002132 codegen_->AddSlowPath(slow_path);
2133 __ B(ne, slow_path->GetEntryLabel());
2134 __ Mov(out, 1);
2135 __ Bind(slow_path->GetExitLabel());
2136 }
2137
2138 __ Bind(&done);
2139}
2140
Alexandre Rames5319def2014-10-23 10:03:10 +01002141void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
2142 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2143 locations->SetOut(Location::ConstantLocation(constant));
2144}
2145
2146void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
2147 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002148 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01002149}
2150
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002151void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
2152 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2153 locations->SetOut(Location::ConstantLocation(constant));
2154}
2155
2156void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant) {
2157 // Will be generated at use site.
2158 UNUSED(constant);
2159}
2160
Alexandre Rames5319def2014-10-23 10:03:10 +01002161void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
2162 LocationSummary* locations =
2163 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
2164 locations->AddTemp(LocationFrom(x0));
2165
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002166 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillain3e3d7332015-04-28 11:00:54 +01002167 for (size_t i = 0; i < invoke->GetNumberOfArguments(); i++) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002168 HInstruction* input = invoke->InputAt(i);
2169 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
2170 }
2171
2172 Primitive::Type return_type = invoke->GetType();
2173 if (return_type != Primitive::kPrimVoid) {
2174 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
2175 }
2176}
2177
Alexandre Rames67555f72014-11-18 10:55:16 +00002178void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
2179 HandleInvoke(invoke);
2180}
2181
2182void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
2183 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
2184 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
2185 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
2186 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
2187 Location receiver = invoke->GetLocations()->InAt(0);
2188 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00002189 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00002190
2191 // The register ip1 is required to be used for the hidden argument in
2192 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002193 MacroAssembler* masm = GetVIXLAssembler();
2194 UseScratchRegisterScope scratch_scope(masm);
2195 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00002196 scratch_scope.Exclude(ip1);
2197 __ Mov(ip1, invoke->GetDexMethodIndex());
2198
2199 // temp = object->GetClass();
2200 if (receiver.IsStackSlot()) {
2201 __ Ldr(temp, StackOperandFrom(receiver));
2202 __ Ldr(temp, HeapOperand(temp, class_offset));
2203 } else {
2204 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
2205 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002206 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00002207 // temp = temp->GetImtEntryAt(method_offset);
2208 __ Ldr(temp, HeapOperand(temp, method_offset));
2209 // lr = temp->GetEntryPoint();
2210 __ Ldr(lr, HeapOperand(temp, entry_point));
2211 // lr();
2212 __ Blr(lr);
2213 DCHECK(!codegen_->IsLeafMethod());
2214 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2215}
2216
2217void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08002218 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
2219 if (intrinsic.TryDispatch(invoke)) {
2220 return;
2221 }
2222
Alexandre Rames67555f72014-11-18 10:55:16 +00002223 HandleInvoke(invoke);
2224}
2225
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002226void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Roland Levillain3e3d7332015-04-28 11:00:54 +01002227 // When we do not run baseline, explicit clinit checks triggered by static
2228 // invokes must have been pruned by art::PrepareForRegisterAllocation.
2229 DCHECK(codegen_->IsBaseline() || !invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002230
Andreas Gampe878d58c2015-01-15 23:24:00 -08002231 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
2232 if (intrinsic.TryDispatch(invoke)) {
2233 return;
2234 }
2235
Alexandre Rames67555f72014-11-18 10:55:16 +00002236 HandleInvoke(invoke);
2237}
2238
Andreas Gampe878d58c2015-01-15 23:24:00 -08002239static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
2240 if (invoke->GetLocations()->Intrinsified()) {
2241 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
2242 intrinsic.Dispatch(invoke);
2243 return true;
2244 }
2245 return false;
2246}
2247
2248void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Register temp) {
2249 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
2250 DCHECK(temp.Is(kArtMethodRegister));
Alexandre Rames5319def2014-10-23 10:03:10 +01002251 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
Andreas Gampe878d58c2015-01-15 23:24:00 -08002252 invoke->GetDexMethodIndex() * kHeapRefSize;
Alexandre Rames5319def2014-10-23 10:03:10 +01002253
2254 // TODO: Implement all kinds of calls:
2255 // 1) boot -> boot
2256 // 2) app -> boot
2257 // 3) app -> app
2258 //
2259 // Currently we implement the app -> app logic, which looks up in the resolve cache.
2260
Jeff Hao848f70a2014-01-15 13:49:50 -08002261 if (invoke->IsStringInit()) {
2262 // temp = thread->string_init_entrypoint
2263 __ Ldr(temp, HeapOperand(tr, invoke->GetStringInitOffset()));
2264 // LR = temp->entry_point_from_quick_compiled_code_;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002265 __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
2266 kArm64WordSize)));
Jeff Hao848f70a2014-01-15 13:49:50 -08002267 // lr()
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002268 __ Blr(lr);
2269 } else {
Jeff Hao848f70a2014-01-15 13:49:50 -08002270 // temp = method;
2271 LoadCurrentMethod(temp);
2272 if (!invoke->IsRecursive()) {
2273 // temp = temp->dex_cache_resolved_methods_;
2274 __ Ldr(temp, HeapOperand(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset()));
2275 // temp = temp[index_in_cache];
2276 __ Ldr(temp, HeapOperand(temp, index_in_cache));
2277 // lr = temp->entry_point_from_quick_compiled_code_;
2278 __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
2279 kArm64WordSize)));
2280 // lr();
2281 __ Blr(lr);
2282 } else {
2283 __ Bl(&frame_entry_label_);
2284 }
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002285 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002286
Andreas Gampe878d58c2015-01-15 23:24:00 -08002287 DCHECK(!IsLeafMethod());
2288}
2289
2290void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Roland Levillain3e3d7332015-04-28 11:00:54 +01002291 // When we do not run baseline, explicit clinit checks triggered by static
2292 // invokes must have been pruned by art::PrepareForRegisterAllocation.
2293 DCHECK(codegen_->IsBaseline() || !invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002294
Andreas Gampe878d58c2015-01-15 23:24:00 -08002295 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2296 return;
2297 }
2298
Alexandre Ramesd921d642015-04-16 15:07:16 +01002299 BlockPoolsScope block_pools(GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -08002300 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
2301 codegen_->GenerateStaticOrDirectCall(invoke, temp);
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00002302 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01002303}
2304
2305void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08002306 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2307 return;
2308 }
2309
Alexandre Rames5319def2014-10-23 10:03:10 +01002310 LocationSummary* locations = invoke->GetLocations();
2311 Location receiver = locations->InAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002312 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002313 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
2314 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
2315 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00002316 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames5319def2014-10-23 10:03:10 +01002317
Alexandre Ramesd921d642015-04-16 15:07:16 +01002318 BlockPoolsScope block_pools(GetVIXLAssembler());
2319
Alexandre Rames5319def2014-10-23 10:03:10 +01002320 // temp = object->GetClass();
2321 if (receiver.IsStackSlot()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002322 __ Ldr(temp, MemOperand(sp, receiver.GetStackIndex()));
2323 __ Ldr(temp, HeapOperand(temp, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002324 } else {
2325 DCHECK(receiver.IsRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002326 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002327 }
Calin Juravle77520bc2015-01-12 18:45:46 +00002328 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames5319def2014-10-23 10:03:10 +01002329 // temp = temp->GetMethodAt(method_offset);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002330 __ Ldr(temp, HeapOperand(temp, method_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002331 // lr = temp->GetEntryPoint();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002332 __ Ldr(lr, HeapOperand(temp, entry_point.SizeValue()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002333 // lr();
2334 __ Blr(lr);
2335 DCHECK(!codegen_->IsLeafMethod());
2336 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2337}
2338
Alexandre Rames67555f72014-11-18 10:55:16 +00002339void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
2340 LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath
2341 : LocationSummary::kNoCall;
2342 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2343 locations->SetOut(Location::RequiresRegister());
2344}
2345
2346void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
2347 Register out = OutputRegister(cls);
2348 if (cls->IsReferrersClass()) {
2349 DCHECK(!cls->CanCallRuntime());
2350 DCHECK(!cls->MustGenerateClinitCheck());
2351 codegen_->LoadCurrentMethod(out);
2352 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2353 } else {
2354 DCHECK(cls->CanCallRuntime());
2355 codegen_->LoadCurrentMethod(out);
2356 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheResolvedTypesOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002357 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002358
2359 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2360 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2361 codegen_->AddSlowPath(slow_path);
2362 __ Cbz(out, slow_path->GetEntryLabel());
2363 if (cls->MustGenerateClinitCheck()) {
2364 GenerateClassInitializationCheck(slow_path, out);
2365 } else {
2366 __ Bind(slow_path->GetExitLabel());
2367 }
2368 }
2369}
2370
2371void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
2372 LocationSummary* locations =
2373 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2374 locations->SetOut(Location::RequiresRegister());
2375}
2376
2377void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
2378 MemOperand exception = MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
2379 __ Ldr(OutputRegister(instruction), exception);
2380 __ Str(wzr, exception);
2381}
2382
Alexandre Rames5319def2014-10-23 10:03:10 +01002383void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
2384 load->SetLocations(nullptr);
2385}
2386
2387void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
2388 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002389 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +01002390}
2391
Alexandre Rames67555f72014-11-18 10:55:16 +00002392void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
2393 LocationSummary* locations =
2394 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2395 locations->SetOut(Location::RequiresRegister());
2396}
2397
2398void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
2399 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
2400 codegen_->AddSlowPath(slow_path);
2401
2402 Register out = OutputRegister(load);
2403 codegen_->LoadCurrentMethod(out);
Mathieu Chartiereace4582014-11-24 18:29:54 -08002404 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2405 __ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002406 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002407 __ Cbz(out, slow_path->GetEntryLabel());
2408 __ Bind(slow_path->GetExitLabel());
2409}
2410
Alexandre Rames5319def2014-10-23 10:03:10 +01002411void LocationsBuilderARM64::VisitLocal(HLocal* local) {
2412 local->SetLocations(nullptr);
2413}
2414
2415void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
2416 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
2417}
2418
2419void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
2420 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2421 locations->SetOut(Location::ConstantLocation(constant));
2422}
2423
2424void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
2425 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002426 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01002427}
2428
Alexandre Rames67555f72014-11-18 10:55:16 +00002429void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2430 LocationSummary* locations =
2431 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2432 InvokeRuntimeCallingConvention calling_convention;
2433 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2434}
2435
2436void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2437 codegen_->InvokeRuntime(instruction->IsEnter()
2438 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2439 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002440 instruction->GetDexPc(),
2441 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002442 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002443}
2444
Alexandre Rames42d641b2014-10-27 14:00:51 +00002445void LocationsBuilderARM64::VisitMul(HMul* mul) {
2446 LocationSummary* locations =
2447 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2448 switch (mul->GetResultType()) {
2449 case Primitive::kPrimInt:
2450 case Primitive::kPrimLong:
2451 locations->SetInAt(0, Location::RequiresRegister());
2452 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002453 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002454 break;
2455
2456 case Primitive::kPrimFloat:
2457 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002458 locations->SetInAt(0, Location::RequiresFpuRegister());
2459 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002460 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002461 break;
2462
2463 default:
2464 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2465 }
2466}
2467
2468void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
2469 switch (mul->GetResultType()) {
2470 case Primitive::kPrimInt:
2471 case Primitive::kPrimLong:
2472 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2473 break;
2474
2475 case Primitive::kPrimFloat:
2476 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002477 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00002478 break;
2479
2480 default:
2481 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2482 }
2483}
2484
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002485void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
2486 LocationSummary* locations =
2487 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2488 switch (neg->GetResultType()) {
2489 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00002490 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002491 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00002492 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002493 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002494
2495 case Primitive::kPrimFloat:
2496 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002497 locations->SetInAt(0, Location::RequiresFpuRegister());
2498 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002499 break;
2500
2501 default:
2502 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2503 }
2504}
2505
2506void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
2507 switch (neg->GetResultType()) {
2508 case Primitive::kPrimInt:
2509 case Primitive::kPrimLong:
2510 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
2511 break;
2512
2513 case Primitive::kPrimFloat:
2514 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002515 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002516 break;
2517
2518 default:
2519 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2520 }
2521}
2522
2523void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
2524 LocationSummary* locations =
2525 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2526 InvokeRuntimeCallingConvention calling_convention;
2527 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002528 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002529 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002530 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2531 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2532 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002533}
2534
2535void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
2536 LocationSummary* locations = instruction->GetLocations();
2537 InvokeRuntimeCallingConvention calling_convention;
2538 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2539 DCHECK(type_index.Is(w0));
2540 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002541 DCHECK(current_method.Is(w2));
Alexandre Rames67555f72014-11-18 10:55:16 +00002542 codegen_->LoadCurrentMethod(current_method);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002543 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002544 codegen_->InvokeRuntime(
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +00002545 GetThreadOffset<kArm64WordSize>(instruction->GetEntrypoint()).Int32Value(),
2546 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002547 instruction->GetDexPc(),
2548 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002549 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2550 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002551}
2552
Alexandre Rames5319def2014-10-23 10:03:10 +01002553void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
2554 LocationSummary* locations =
2555 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2556 InvokeRuntimeCallingConvention calling_convention;
2557 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2558 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2559 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002560 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002561}
2562
2563void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
2564 LocationSummary* locations = instruction->GetLocations();
2565 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2566 DCHECK(type_index.Is(w0));
2567 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2568 DCHECK(current_method.Is(w1));
Alexandre Rames67555f72014-11-18 10:55:16 +00002569 codegen_->LoadCurrentMethod(current_method);
Alexandre Rames5319def2014-10-23 10:03:10 +01002570 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002571 codegen_->InvokeRuntime(
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +00002572 GetThreadOffset<kArm64WordSize>(instruction->GetEntrypoint()).Int32Value(),
2573 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002574 instruction->GetDexPc(),
2575 nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002576 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002577}
2578
2579void LocationsBuilderARM64::VisitNot(HNot* instruction) {
2580 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00002581 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002582 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002583}
2584
2585void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00002586 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002587 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002588 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01002589 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002590 break;
2591
2592 default:
2593 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
2594 }
2595}
2596
David Brazdil66d126e2015-04-03 16:02:44 +01002597void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
2598 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2599 locations->SetInAt(0, Location::RequiresRegister());
2600 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2601}
2602
2603void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01002604 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
2605}
2606
Alexandre Rames5319def2014-10-23 10:03:10 +01002607void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
2608 LocationSummary* locations =
2609 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2610 locations->SetInAt(0, Location::RequiresRegister());
2611 if (instruction->HasUses()) {
2612 locations->SetOut(Location::SameAsFirstInput());
2613 }
2614}
2615
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002616void InstructionCodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
Calin Juravle77520bc2015-01-12 18:45:46 +00002617 if (codegen_->CanMoveNullCheckToUser(instruction)) {
2618 return;
2619 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002620
Alexandre Ramesd921d642015-04-16 15:07:16 +01002621 BlockPoolsScope block_pools(GetVIXLAssembler());
2622 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002623 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
2624 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2625}
2626
2627void InstructionCodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002628 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
2629 codegen_->AddSlowPath(slow_path);
2630
2631 LocationSummary* locations = instruction->GetLocations();
2632 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00002633
2634 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01002635}
2636
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002637void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
2638 if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2639 GenerateImplicitNullCheck(instruction);
2640 } else {
2641 GenerateExplicitNullCheck(instruction);
2642 }
2643}
2644
Alexandre Rames67555f72014-11-18 10:55:16 +00002645void LocationsBuilderARM64::VisitOr(HOr* instruction) {
2646 HandleBinaryOp(instruction);
2647}
2648
2649void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
2650 HandleBinaryOp(instruction);
2651}
2652
Alexandre Rames3e69f162014-12-10 10:36:50 +00002653void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
2654 LOG(FATAL) << "Unreachable";
2655}
2656
2657void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
2658 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2659}
2660
Alexandre Rames5319def2014-10-23 10:03:10 +01002661void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
2662 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2663 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2664 if (location.IsStackSlot()) {
2665 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2666 } else if (location.IsDoubleStackSlot()) {
2667 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2668 }
2669 locations->SetOut(location);
2670}
2671
2672void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
2673 // Nothing to do, the parameter is already at its location.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002674 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002675}
2676
2677void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
2678 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2679 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2680 locations->SetInAt(i, Location::Any());
2681 }
2682 locations->SetOut(Location::Any());
2683}
2684
2685void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002686 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002687 LOG(FATAL) << "Unreachable";
2688}
2689
Serban Constantinescu02164b32014-11-13 14:05:07 +00002690void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002691 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00002692 LocationSummary::CallKind call_kind =
2693 Primitive::IsFloatingPointType(type) ? LocationSummary::kCall : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002694 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2695
2696 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002697 case Primitive::kPrimInt:
2698 case Primitive::kPrimLong:
2699 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002700 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002701 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2702 break;
2703
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002704 case Primitive::kPrimFloat:
2705 case Primitive::kPrimDouble: {
2706 InvokeRuntimeCallingConvention calling_convention;
2707 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
2708 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
2709 locations->SetOut(calling_convention.GetReturnLocation(type));
2710
2711 break;
2712 }
2713
Serban Constantinescu02164b32014-11-13 14:05:07 +00002714 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002715 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00002716 }
2717}
2718
2719void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
2720 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002721
Serban Constantinescu02164b32014-11-13 14:05:07 +00002722 switch (type) {
2723 case Primitive::kPrimInt:
2724 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08002725 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002726 break;
2727 }
2728
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002729 case Primitive::kPrimFloat:
2730 case Primitive::kPrimDouble: {
2731 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
2732 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002733 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002734 break;
2735 }
2736
Serban Constantinescu02164b32014-11-13 14:05:07 +00002737 default:
2738 LOG(FATAL) << "Unexpected rem type " << type;
2739 }
2740}
2741
Calin Juravle27df7582015-04-17 19:12:31 +01002742void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2743 memory_barrier->SetLocations(nullptr);
2744}
2745
2746void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2747 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
2748}
2749
Alexandre Rames5319def2014-10-23 10:03:10 +01002750void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
2751 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2752 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002753 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01002754}
2755
2756void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002757 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002758 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01002759}
2760
2761void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
2762 instruction->SetLocations(nullptr);
2763}
2764
2765void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002766 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002767 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01002768}
2769
Serban Constantinescu02164b32014-11-13 14:05:07 +00002770void LocationsBuilderARM64::VisitShl(HShl* shl) {
2771 HandleShift(shl);
2772}
2773
2774void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
2775 HandleShift(shl);
2776}
2777
2778void LocationsBuilderARM64::VisitShr(HShr* shr) {
2779 HandleShift(shr);
2780}
2781
2782void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
2783 HandleShift(shr);
2784}
2785
Alexandre Rames5319def2014-10-23 10:03:10 +01002786void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
2787 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
2788 Primitive::Type field_type = store->InputAt(1)->GetType();
2789 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002790 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01002791 case Primitive::kPrimBoolean:
2792 case Primitive::kPrimByte:
2793 case Primitive::kPrimChar:
2794 case Primitive::kPrimShort:
2795 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002796 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01002797 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
2798 break;
2799
2800 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002801 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01002802 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
2803 break;
2804
2805 default:
2806 LOG(FATAL) << "Unimplemented local type " << field_type;
2807 }
2808}
2809
2810void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002811 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01002812}
2813
2814void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002815 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002816}
2817
2818void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002819 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002820}
2821
Alexandre Rames67555f72014-11-18 10:55:16 +00002822void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002823 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002824}
2825
2826void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002827 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00002828}
2829
2830void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002831 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002832}
2833
Alexandre Rames67555f72014-11-18 10:55:16 +00002834void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002835 HandleFieldSet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01002836}
2837
2838void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
2839 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2840}
2841
2842void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002843 HBasicBlock* block = instruction->GetBlock();
2844 if (block->GetLoopInformation() != nullptr) {
2845 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2846 // The back edge will generate the suspend check.
2847 return;
2848 }
2849 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2850 // The goto will generate the suspend check.
2851 return;
2852 }
2853 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01002854}
2855
2856void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
2857 temp->SetLocations(nullptr);
2858}
2859
2860void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
2861 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002862 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01002863}
2864
Alexandre Rames67555f72014-11-18 10:55:16 +00002865void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
2866 LocationSummary* locations =
2867 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2868 InvokeRuntimeCallingConvention calling_convention;
2869 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2870}
2871
2872void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
2873 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00002874 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002875 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002876}
2877
2878void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
2879 LocationSummary* locations =
2880 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2881 Primitive::Type input_type = conversion->GetInputType();
2882 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002883 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00002884 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
2885 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
2886 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
2887 }
2888
Alexandre Rames542361f2015-01-29 16:57:31 +00002889 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002890 locations->SetInAt(0, Location::RequiresFpuRegister());
2891 } else {
2892 locations->SetInAt(0, Location::RequiresRegister());
2893 }
2894
Alexandre Rames542361f2015-01-29 16:57:31 +00002895 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002896 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2897 } else {
2898 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2899 }
2900}
2901
2902void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
2903 Primitive::Type result_type = conversion->GetResultType();
2904 Primitive::Type input_type = conversion->GetInputType();
2905
2906 DCHECK_NE(input_type, result_type);
2907
Alexandre Rames542361f2015-01-29 16:57:31 +00002908 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002909 int result_size = Primitive::ComponentSize(result_type);
2910 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002911 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002912 Register output = OutputRegister(conversion);
2913 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002914 if ((result_type == Primitive::kPrimChar) && (input_size < result_size)) {
2915 __ Ubfx(output, source, 0, result_size * kBitsPerByte);
2916 } else if ((result_type == Primitive::kPrimChar) ||
2917 ((input_type == Primitive::kPrimChar) && (result_size > input_size))) {
2918 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002919 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002920 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002921 }
Alexandre Rames542361f2015-01-29 16:57:31 +00002922 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002923 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00002924 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002925 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
2926 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00002927 } else if (Primitive::IsFloatingPointType(result_type) &&
2928 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002929 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
2930 } else {
2931 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
2932 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00002933 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002934}
Alexandre Rames67555f72014-11-18 10:55:16 +00002935
Serban Constantinescu02164b32014-11-13 14:05:07 +00002936void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
2937 HandleShift(ushr);
2938}
2939
2940void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
2941 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00002942}
2943
2944void LocationsBuilderARM64::VisitXor(HXor* instruction) {
2945 HandleBinaryOp(instruction);
2946}
2947
2948void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
2949 HandleBinaryOp(instruction);
2950}
2951
Calin Juravleb1498f62015-02-16 13:13:29 +00002952void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction) {
2953 // Nothing to do, this should be removed during prepare for register allocator.
2954 UNUSED(instruction);
2955 LOG(FATAL) << "Unreachable";
2956}
2957
2958void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction) {
2959 // Nothing to do, this should be removed during prepare for register allocator.
2960 UNUSED(instruction);
2961 LOG(FATAL) << "Unreachable";
2962}
2963
Alexandre Rames67555f72014-11-18 10:55:16 +00002964#undef __
2965#undef QUICK_ENTRY_POINT
2966
Alexandre Rames5319def2014-10-23 10:03:10 +01002967} // namespace arm64
2968} // namespace art