blob: c13d0c99a50a7677dd53223b73fe6d6f22c544db [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Alexandre Rames5319def2014-10-23 10:03:10 +010037
38#ifdef __
39#error "ARM64 Codegen VIXL macro-assembler macro already defined."
40#endif
41
Alexandre Rames5319def2014-10-23 10:03:10 +010042namespace art {
43
Roland Levillain22ccc3a2015-11-24 13:10:05 +000044template<class MirrorType>
45class GcRoot;
46
Alexandre Rames5319def2014-10-23 10:03:10 +010047namespace arm64 {
48
Andreas Gampe878d58c2015-01-15 23:24:00 -080049using helpers::CPURegisterFrom;
50using helpers::DRegisterFrom;
51using helpers::FPRegisterFrom;
52using helpers::HeapOperand;
53using helpers::HeapOperandFrom;
54using helpers::InputCPURegisterAt;
55using helpers::InputFPRegisterAt;
56using helpers::InputRegisterAt;
57using helpers::InputOperandAt;
58using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080059using helpers::LocationFrom;
60using helpers::OperandFromMemOperand;
61using helpers::OutputCPURegister;
62using helpers::OutputFPRegister;
63using helpers::OutputRegister;
64using helpers::RegisterFrom;
65using helpers::StackOperandFrom;
66using helpers::VIXLRegCodeFromART;
67using helpers::WRegisterFrom;
68using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000069using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080070using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080071
Alexandre Rames5319def2014-10-23 10:03:10 +010072static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000073// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080074// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
75// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000076static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010077
Alexandre Rames5319def2014-10-23 10:03:10 +010078inline Condition ARM64Condition(IfCondition cond) {
79 switch (cond) {
80 case kCondEQ: return eq;
81 case kCondNE: return ne;
82 case kCondLT: return lt;
83 case kCondLE: return le;
84 case kCondGT: return gt;
85 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070086 case kCondB: return lo;
87 case kCondBE: return ls;
88 case kCondA: return hi;
89 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010090 }
Roland Levillain7f63c522015-07-13 15:54:55 +000091 LOG(FATAL) << "Unreachable";
92 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010093}
94
Vladimir Markod6e069b2016-01-18 11:11:01 +000095inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
96 // The ARM64 condition codes can express all the necessary branches, see the
97 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
98 // There is no dex instruction or HIR that would need the missing conditions
99 // "equal or unordered" or "not equal".
100 switch (cond) {
101 case kCondEQ: return eq;
102 case kCondNE: return ne /* unordered */;
103 case kCondLT: return gt_bias ? cc : lt /* unordered */;
104 case kCondLE: return gt_bias ? ls : le /* unordered */;
105 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
106 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
107 default:
108 LOG(FATAL) << "UNREACHABLE";
109 UNREACHABLE();
110 }
111}
112
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000113Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000114 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
115 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
116 // but we use the exact registers for clarity.
117 if (return_type == Primitive::kPrimFloat) {
118 return LocationFrom(s0);
119 } else if (return_type == Primitive::kPrimDouble) {
120 return LocationFrom(d0);
121 } else if (return_type == Primitive::kPrimLong) {
122 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100123 } else if (return_type == Primitive::kPrimVoid) {
124 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000125 } else {
126 return LocationFrom(w0);
127 }
128}
129
Alexandre Rames5319def2014-10-23 10:03:10 +0100130Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000131 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100132}
133
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700134// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
135#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700136#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100137
Zheng Xuda403092015-04-24 17:35:39 +0800138// Calculate memory accessing operand for save/restore live registers.
139static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
140 RegisterSet* register_set,
141 int64_t spill_offset,
142 bool is_save) {
143 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
144 codegen->GetNumberOfCoreRegisters(),
145 register_set->GetFloatingPointRegisters(),
146 codegen->GetNumberOfFloatingPointRegisters()));
147
148 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100149 register_set->GetCoreRegisters() & (~callee_saved_core_registers.GetList()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000150 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100151 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.GetList()));
Zheng Xuda403092015-04-24 17:35:39 +0800152
153 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
154 UseScratchRegisterScope temps(masm);
155
156 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100157 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
158 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800159 int64_t reg_size = kXRegSizeInBytes;
160 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
161 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100162 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800163 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
164 // If the offset does not fit in the instruction's immediate field, use an alternate register
165 // to compute the base address(float point registers spill base address).
166 Register new_base = temps.AcquireSameSizeAs(base);
167 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
168 base = new_base;
169 spill_offset = -core_spill_size;
170 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
171 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
172 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
173 }
174
175 if (is_save) {
176 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
177 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
178 } else {
179 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
180 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
181 }
182}
183
184void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
185 RegisterSet* register_set = locations->GetLiveRegisters();
186 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
187 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
188 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
189 // If the register holds an object, update the stack mask.
190 if (locations->RegisterContainsObject(i)) {
191 locations->SetStackBit(stack_offset / kVRegSize);
192 }
193 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
194 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
195 saved_core_stack_offsets_[i] = stack_offset;
196 stack_offset += kXRegSizeInBytes;
197 }
198 }
199
200 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
201 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
202 register_set->ContainsFloatingPointRegister(i)) {
203 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
204 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
205 saved_fpu_stack_offsets_[i] = stack_offset;
206 stack_offset += kDRegSizeInBytes;
207 }
208 }
209
210 SaveRestoreLiveRegistersHelper(codegen, register_set,
211 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
212}
213
214void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
215 RegisterSet* register_set = locations->GetLiveRegisters();
216 SaveRestoreLiveRegistersHelper(codegen, register_set,
217 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
218}
219
Alexandre Rames5319def2014-10-23 10:03:10 +0100220class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
221 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000222 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100223
Alexandre Rames67555f72014-11-18 10:55:16 +0000224 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100225 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000226 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227
Alexandre Rames5319def2014-10-23 10:03:10 +0100228 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000229 if (instruction_->CanThrowIntoCatchBlock()) {
230 // Live registers will be restored in the catch block if caught.
231 SaveLiveRegisters(codegen, instruction_->GetLocations());
232 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000233 // We're moving two locations to locations that could overlap, so we need a parallel
234 // move resolver.
235 InvokeRuntimeCallingConvention calling_convention;
236 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100237 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
238 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100239 uint32_t entry_point_offset = instruction_->AsBoundsCheck()->IsStringCharAt()
240 ? QUICK_ENTRY_POINT(pThrowStringBounds)
241 : QUICK_ENTRY_POINT(pThrowArrayBounds);
242 arm64_codegen->InvokeRuntime(entry_point_offset, instruction_, instruction_->GetDexPc(), this);
243 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800244 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100245 }
246
Alexandre Rames8158f282015-08-07 10:26:17 +0100247 bool IsFatal() const OVERRIDE { return true; }
248
Alexandre Rames9931f312015-06-19 14:47:01 +0100249 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
250
Alexandre Rames5319def2014-10-23 10:03:10 +0100251 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100252 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
253};
254
Alexandre Rames67555f72014-11-18 10:55:16 +0000255class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
256 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000257 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000258
259 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
260 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
261 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000262 if (instruction_->CanThrowIntoCatchBlock()) {
263 // Live registers will be restored in the catch block if caught.
264 SaveLiveRegisters(codegen, instruction_->GetLocations());
265 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000266 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000267 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800268 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000269 }
270
Alexandre Rames8158f282015-08-07 10:26:17 +0100271 bool IsFatal() const OVERRIDE { return true; }
272
Alexandre Rames9931f312015-06-19 14:47:01 +0100273 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
274
Alexandre Rames67555f72014-11-18 10:55:16 +0000275 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000276 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
277};
278
279class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
280 public:
281 LoadClassSlowPathARM64(HLoadClass* cls,
282 HInstruction* at,
283 uint32_t dex_pc,
284 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000285 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000286 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
287 }
288
289 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
290 LocationSummary* locations = at_->GetLocations();
291 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
292
293 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000294 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000295
296 InvokeRuntimeCallingConvention calling_convention;
297 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000298 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
299 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000300 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800301 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100302 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800303 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100304 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800305 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000306
307 // Move the class to the desired location.
308 Location out = locations->Out();
309 if (out.IsValid()) {
310 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
311 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000312 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000313 }
314
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000315 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000316 __ B(GetExitLabel());
317 }
318
Alexandre Rames9931f312015-06-19 14:47:01 +0100319 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
320
Alexandre Rames67555f72014-11-18 10:55:16 +0000321 private:
322 // The class this slow path will load.
323 HLoadClass* const cls_;
324
325 // The instruction where this slow path is happening.
326 // (Might be the load class or an initialization check).
327 HInstruction* const at_;
328
329 // The dex PC of `at_`.
330 const uint32_t dex_pc_;
331
332 // Whether to initialize the class.
333 const bool do_clinit_;
334
335 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
336};
337
338class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
339 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000340 explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000341
342 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
343 LocationSummary* locations = instruction_->GetLocations();
344 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
345 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
346
347 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000348 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000349
350 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000351 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
352 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
Alexandre Rames67555f72014-11-18 10:55:16 +0000353 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000354 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100355 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000356 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000357 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000358
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000359 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000360 __ B(GetExitLabel());
361 }
362
Alexandre Rames9931f312015-06-19 14:47:01 +0100363 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
364
Alexandre Rames67555f72014-11-18 10:55:16 +0000365 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000366 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
367};
368
Alexandre Rames5319def2014-10-23 10:03:10 +0100369class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
370 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000371 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100372
Alexandre Rames67555f72014-11-18 10:55:16 +0000373 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
374 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100375 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000376 if (instruction_->CanThrowIntoCatchBlock()) {
377 // Live registers will be restored in the catch block if caught.
378 SaveLiveRegisters(codegen, instruction_->GetLocations());
379 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000380 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000381 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800382 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100383 }
384
Alexandre Rames8158f282015-08-07 10:26:17 +0100385 bool IsFatal() const OVERRIDE { return true; }
386
Alexandre Rames9931f312015-06-19 14:47:01 +0100387 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
388
Alexandre Rames5319def2014-10-23 10:03:10 +0100389 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100390 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
391};
392
393class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
394 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100395 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000396 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100397
Alexandre Rames67555f72014-11-18 10:55:16 +0000398 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
399 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100400 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000401 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000402 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000403 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800404 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000405 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000406 if (successor_ == nullptr) {
407 __ B(GetReturnLabel());
408 } else {
409 __ B(arm64_codegen->GetLabelOf(successor_));
410 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100411 }
412
Scott Wakeling97c72b72016-06-24 16:19:36 +0100413 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100414 DCHECK(successor_ == nullptr);
415 return &return_label_;
416 }
417
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100418 HBasicBlock* GetSuccessor() const {
419 return successor_;
420 }
421
Alexandre Rames9931f312015-06-19 14:47:01 +0100422 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
423
Alexandre Rames5319def2014-10-23 10:03:10 +0100424 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100425 // If not null, the block to branch to after the suspend check.
426 HBasicBlock* const successor_;
427
428 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100429 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100430
431 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
432};
433
Alexandre Rames67555f72014-11-18 10:55:16 +0000434class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
435 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000436 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000437 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000438
439 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000440 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100441 Location class_to_check = locations->InAt(1);
442 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
443 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000444 DCHECK(instruction_->IsCheckCast()
445 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
446 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100447 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000448
Alexandre Rames67555f72014-11-18 10:55:16 +0000449 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000450
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000451 if (!is_fatal_) {
452 SaveLiveRegisters(codegen, locations);
453 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000454
455 // We're moving two locations to locations that could overlap, so we need a parallel
456 // move resolver.
457 InvokeRuntimeCallingConvention calling_convention;
458 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100459 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
460 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000461
462 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000463 arm64_codegen->InvokeRuntime(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100464 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc, this);
Andreas Gampe67409972016-07-19 22:34:53 -0700465 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t,
Roland Levillain888d0672015-11-23 18:53:50 +0000466 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000467 Primitive::Type ret_type = instruction_->GetType();
468 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
469 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
470 } else {
471 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100472 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800473 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000474 }
475
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000476 if (!is_fatal_) {
477 RestoreLiveRegisters(codegen, locations);
478 __ B(GetExitLabel());
479 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000480 }
481
Alexandre Rames9931f312015-06-19 14:47:01 +0100482 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000483 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100484
Alexandre Rames67555f72014-11-18 10:55:16 +0000485 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000486 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000487
Alexandre Rames67555f72014-11-18 10:55:16 +0000488 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
489};
490
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700491class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
492 public:
Aart Bik42249c32016-01-07 15:33:50 -0800493 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000494 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700495
496 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800497 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700498 __ Bind(GetEntryLabel());
499 SaveLiveRegisters(codegen, instruction_->GetLocations());
Aart Bik42249c32016-01-07 15:33:50 -0800500 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
501 instruction_,
502 instruction_->GetDexPc(),
503 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000504 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700505 }
506
Alexandre Rames9931f312015-06-19 14:47:01 +0100507 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
508
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700509 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700510 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
511};
512
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100513class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
514 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000515 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100516
517 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
518 LocationSummary* locations = instruction_->GetLocations();
519 __ Bind(GetEntryLabel());
520 SaveLiveRegisters(codegen, locations);
521
522 InvokeRuntimeCallingConvention calling_convention;
523 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
524 parallel_move.AddMove(
525 locations->InAt(0),
526 LocationFrom(calling_convention.GetRegisterAt(0)),
527 Primitive::kPrimNot,
528 nullptr);
529 parallel_move.AddMove(
530 locations->InAt(1),
531 LocationFrom(calling_convention.GetRegisterAt(1)),
532 Primitive::kPrimInt,
533 nullptr);
534 parallel_move.AddMove(
535 locations->InAt(2),
536 LocationFrom(calling_convention.GetRegisterAt(2)),
537 Primitive::kPrimNot,
538 nullptr);
539 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
540
541 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
542 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
543 instruction_,
544 instruction_->GetDexPc(),
545 this);
546 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
547 RestoreLiveRegisters(codegen, locations);
548 __ B(GetExitLabel());
549 }
550
551 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
552
553 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100554 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
555};
556
Zheng Xu3927c8b2015-11-18 17:46:25 +0800557void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
558 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000559 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800560
561 // We are about to use the assembler to place literals directly. Make sure we have enough
562 // underlying code buffer and we have generated the jump table with right size.
563 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
564 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
565
566 __ Bind(&table_start_);
567 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
568 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100569 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800570 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100571 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800572 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
573 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
574 Literal<int32_t> literal(jump_offset);
575 __ place(&literal);
576 }
577}
578
Roland Levillain44015862016-01-22 11:47:17 +0000579// Slow path marking an object during a read barrier.
580class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
581 public:
Roland Levillain02b75802016-07-13 11:54:35 +0100582 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location obj)
583 : SlowPathCodeARM64(instruction), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000584 DCHECK(kEmitCompilerReadBarrier);
585 }
586
587 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
588
589 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
590 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain44015862016-01-22 11:47:17 +0000591 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100592 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(obj_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000593 DCHECK(instruction_->IsInstanceFieldGet() ||
594 instruction_->IsStaticFieldGet() ||
595 instruction_->IsArrayGet() ||
596 instruction_->IsLoadClass() ||
597 instruction_->IsLoadString() ||
598 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100599 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100600 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain44015862016-01-22 11:47:17 +0000601 << "Unexpected instruction in read barrier marking slow path: "
602 << instruction_->DebugName();
Roland Levillain12ecf082016-08-08 10:18:37 +0100603 // The read barrier instrumentation of object ArrayGet
604 // instructions does not support the HIntermediateAddress
605 // instruction.
606 DCHECK(!(instruction_->IsArrayGet() &&
607 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain44015862016-01-22 11:47:17 +0000608
609 __ Bind(GetEntryLabel());
Roland Levillain4359e612016-07-20 11:32:19 +0100610 // No need to save live registers; it's taken care of by the
611 // entrypoint. Also, there is no need to update the stack mask,
612 // as this runtime call will not trigger a garbage collection.
Roland Levillain44015862016-01-22 11:47:17 +0000613 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100614 DCHECK_NE(obj_.reg(), LR);
615 DCHECK_NE(obj_.reg(), WSP);
616 DCHECK_NE(obj_.reg(), WZR);
617 DCHECK(0 <= obj_.reg() && obj_.reg() < kNumberOfWRegisters) << obj_.reg();
618 // "Compact" slow path, saving two moves.
619 //
620 // Instead of using the standard runtime calling convention (input
621 // and output in W0):
622 //
623 // W0 <- obj
624 // W0 <- ReadBarrierMark(W0)
625 // obj <- W0
626 //
627 // we just use rX (the register holding `obj`) as input and output
628 // of a dedicated entrypoint:
629 //
630 // rX <- ReadBarrierMarkRegX(rX)
631 //
632 int32_t entry_point_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -0700633 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(obj_.reg());
Roland Levillaindec8f632016-07-22 17:10:06 +0100634 // This runtime call does not require a stack map.
635 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain44015862016-01-22 11:47:17 +0000636 __ B(GetExitLabel());
637 }
638
639 private:
Roland Levillain44015862016-01-22 11:47:17 +0000640 const Location obj_;
641
642 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
643};
644
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000645// Slow path generating a read barrier for a heap reference.
646class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
647 public:
648 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
649 Location out,
650 Location ref,
651 Location obj,
652 uint32_t offset,
653 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000654 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000655 out_(out),
656 ref_(ref),
657 obj_(obj),
658 offset_(offset),
659 index_(index) {
660 DCHECK(kEmitCompilerReadBarrier);
661 // If `obj` is equal to `out` or `ref`, it means the initial object
662 // has been overwritten by (or after) the heap object reference load
663 // to be instrumented, e.g.:
664 //
665 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000666 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000667 //
668 // In that case, we have lost the information about the original
669 // object, and the emitted read barrier cannot work properly.
670 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
671 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
672 }
673
674 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
675 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
676 LocationSummary* locations = instruction_->GetLocations();
677 Primitive::Type type = Primitive::kPrimNot;
678 DCHECK(locations->CanCall());
679 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100680 DCHECK(instruction_->IsInstanceFieldGet() ||
681 instruction_->IsStaticFieldGet() ||
682 instruction_->IsArrayGet() ||
683 instruction_->IsInstanceOf() ||
684 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100685 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain44015862016-01-22 11:47:17 +0000686 << "Unexpected instruction in read barrier for heap reference slow path: "
687 << instruction_->DebugName();
Roland Levillain12ecf082016-08-08 10:18:37 +0100688 // The read barrier instrumentation of object ArrayGet
689 // instructions does not support the HIntermediateAddress
690 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000691 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100692 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000693
694 __ Bind(GetEntryLabel());
695
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000696 SaveLiveRegisters(codegen, locations);
697
698 // We may have to change the index's value, but as `index_` is a
699 // constant member (like other "inputs" of this slow path),
700 // introduce a copy of it, `index`.
701 Location index = index_;
702 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100703 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000704 if (instruction_->IsArrayGet()) {
705 // Compute the actual memory offset and store it in `index`.
706 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
707 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
708 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
709 // We are about to change the value of `index_reg` (see the
710 // calls to vixl::MacroAssembler::Lsl and
711 // vixl::MacroAssembler::Mov below), but it has
712 // not been saved by the previous call to
713 // art::SlowPathCode::SaveLiveRegisters, as it is a
714 // callee-save register --
715 // art::SlowPathCode::SaveLiveRegisters does not consider
716 // callee-save registers, as it has been designed with the
717 // assumption that callee-save registers are supposed to be
718 // handled by the called function. So, as a callee-save
719 // register, `index_reg` _would_ eventually be saved onto
720 // the stack, but it would be too late: we would have
721 // changed its value earlier. Therefore, we manually save
722 // it here into another freely available register,
723 // `free_reg`, chosen of course among the caller-save
724 // registers (as a callee-save `free_reg` register would
725 // exhibit the same problem).
726 //
727 // Note we could have requested a temporary register from
728 // the register allocator instead; but we prefer not to, as
729 // this is a slow path, and we know we can find a
730 // caller-save register that is available.
731 Register free_reg = FindAvailableCallerSaveRegister(codegen);
732 __ Mov(free_reg.W(), index_reg);
733 index_reg = free_reg;
734 index = LocationFrom(index_reg);
735 } else {
736 // The initial register stored in `index_` has already been
737 // saved in the call to art::SlowPathCode::SaveLiveRegisters
738 // (as it is not a callee-save register), so we can freely
739 // use it.
740 }
741 // Shifting the index value contained in `index_reg` by the scale
742 // factor (2) cannot overflow in practice, as the runtime is
743 // unable to allocate object arrays with a size larger than
744 // 2^26 - 1 (that is, 2^28 - 4 bytes).
745 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
746 static_assert(
747 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
748 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
749 __ Add(index_reg, index_reg, Operand(offset_));
750 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100751 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
752 // intrinsics, `index_` is not shifted by a scale factor of 2
753 // (as in the case of ArrayGet), as it is actually an offset
754 // to an object field within an object.
755 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000756 DCHECK(instruction_->GetLocations()->Intrinsified());
757 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
758 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
759 << instruction_->AsInvoke()->GetIntrinsic();
760 DCHECK_EQ(offset_, 0U);
761 DCHECK(index_.IsRegisterPair());
762 // UnsafeGet's offset location is a register pair, the low
763 // part contains the correct offset.
764 index = index_.ToLow();
765 }
766 }
767
768 // We're moving two or three locations to locations that could
769 // overlap, so we need a parallel move resolver.
770 InvokeRuntimeCallingConvention calling_convention;
771 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
772 parallel_move.AddMove(ref_,
773 LocationFrom(calling_convention.GetRegisterAt(0)),
774 type,
775 nullptr);
776 parallel_move.AddMove(obj_,
777 LocationFrom(calling_convention.GetRegisterAt(1)),
778 type,
779 nullptr);
780 if (index.IsValid()) {
781 parallel_move.AddMove(index,
782 LocationFrom(calling_convention.GetRegisterAt(2)),
783 Primitive::kPrimInt,
784 nullptr);
785 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
786 } else {
787 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
788 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
789 }
790 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
791 instruction_,
792 instruction_->GetDexPc(),
793 this);
794 CheckEntrypointTypes<
795 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
796 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
797
798 RestoreLiveRegisters(codegen, locations);
799
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000800 __ B(GetExitLabel());
801 }
802
803 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
804
805 private:
806 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100807 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
808 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000809 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
810 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
811 return Register(VIXLRegCodeFromART(i), kXRegSize);
812 }
813 }
814 // We shall never fail to find a free caller-save register, as
815 // there are more than two core caller-save registers on ARM64
816 // (meaning it is possible to find one which is different from
817 // `ref` and `obj`).
818 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
819 LOG(FATAL) << "Could not find a free register";
820 UNREACHABLE();
821 }
822
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000823 const Location out_;
824 const Location ref_;
825 const Location obj_;
826 const uint32_t offset_;
827 // An additional location containing an index to an array.
828 // Only used for HArrayGet and the UnsafeGetObject &
829 // UnsafeGetObjectVolatile intrinsics.
830 const Location index_;
831
832 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
833};
834
835// Slow path generating a read barrier for a GC root.
836class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
837 public:
838 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000839 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000840 DCHECK(kEmitCompilerReadBarrier);
841 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000842
843 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
844 LocationSummary* locations = instruction_->GetLocations();
845 Primitive::Type type = Primitive::kPrimNot;
846 DCHECK(locations->CanCall());
847 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000848 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
849 << "Unexpected instruction in read barrier for GC root slow path: "
850 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000851
852 __ Bind(GetEntryLabel());
853 SaveLiveRegisters(codegen, locations);
854
855 InvokeRuntimeCallingConvention calling_convention;
856 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
857 // The argument of the ReadBarrierForRootSlow is not a managed
858 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
859 // thus we need a 64-bit move here, and we cannot use
860 //
861 // arm64_codegen->MoveLocation(
862 // LocationFrom(calling_convention.GetRegisterAt(0)),
863 // root_,
864 // type);
865 //
866 // which would emit a 32-bit move, as `type` is a (32-bit wide)
867 // reference type (`Primitive::kPrimNot`).
868 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
869 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
870 instruction_,
871 instruction_->GetDexPc(),
872 this);
873 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
874 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
875
876 RestoreLiveRegisters(codegen, locations);
877 __ B(GetExitLabel());
878 }
879
880 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
881
882 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000883 const Location out_;
884 const Location root_;
885
886 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
887};
888
Alexandre Rames5319def2014-10-23 10:03:10 +0100889#undef __
890
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100891Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100892 Location next_location;
893 if (type == Primitive::kPrimVoid) {
894 LOG(FATAL) << "Unreachable type " << type;
895 }
896
Alexandre Rames542361f2015-01-29 16:57:31 +0000897 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100898 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
899 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000900 } else if (!Primitive::IsFloatingPointType(type) &&
901 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000902 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
903 } else {
904 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000905 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
906 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100907 }
908
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000909 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000910 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100911 return next_location;
912}
913
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100914Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100915 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100916}
917
Serban Constantinescu579885a2015-02-22 20:51:33 +0000918CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
919 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100920 const CompilerOptions& compiler_options,
921 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100922 : CodeGenerator(graph,
923 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000924 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000925 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100926 callee_saved_core_registers.GetList(),
927 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100928 compiler_options,
929 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100930 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800931 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100932 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000933 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000934 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100935 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000936 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000937 uint32_literals_(std::less<uint32_t>(),
938 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100939 uint64_literals_(std::less<uint64_t>(),
940 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
941 method_patches_(MethodReferenceComparator(),
942 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
943 call_patches_(MethodReferenceComparator(),
944 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
945 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000946 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
947 boot_image_string_patches_(StringReferenceValueComparator(),
948 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
949 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100950 boot_image_type_patches_(TypeReferenceValueComparator(),
951 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
952 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000953 boot_image_address_patches_(std::less<uint32_t>(),
954 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000955 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000956 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000957}
Alexandre Rames5319def2014-10-23 10:03:10 +0100958
Alexandre Rames67555f72014-11-18 10:55:16 +0000959#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100960
Zheng Xu3927c8b2015-11-18 17:46:25 +0800961void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100962 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800963 jump_table->EmitTable(this);
964 }
965}
966
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000967void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800968 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000969 // Ensure we emit the literal pool.
970 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000971
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000972 CodeGenerator::Finalize(allocator);
973}
974
Zheng Xuad4450e2015-04-17 18:48:56 +0800975void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
976 // Note: There are 6 kinds of moves:
977 // 1. constant -> GPR/FPR (non-cycle)
978 // 2. constant -> stack (non-cycle)
979 // 3. GPR/FPR -> GPR/FPR
980 // 4. GPR/FPR -> stack
981 // 5. stack -> GPR/FPR
982 // 6. stack -> stack (non-cycle)
983 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
984 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
985 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
986 // dependency.
987 vixl_temps_.Open(GetVIXLAssembler());
988}
989
990void ParallelMoveResolverARM64::FinishEmitNativeCode() {
991 vixl_temps_.Close();
992}
993
994Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
995 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
996 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
997 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
998 Location scratch = GetScratchLocation(kind);
999 if (!scratch.Equals(Location::NoLocation())) {
1000 return scratch;
1001 }
1002 // Allocate from VIXL temp registers.
1003 if (kind == Location::kRegister) {
1004 scratch = LocationFrom(vixl_temps_.AcquireX());
1005 } else {
1006 DCHECK(kind == Location::kFpuRegister);
1007 scratch = LocationFrom(vixl_temps_.AcquireD());
1008 }
1009 AddScratchLocation(scratch);
1010 return scratch;
1011}
1012
1013void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1014 if (loc.IsRegister()) {
1015 vixl_temps_.Release(XRegisterFrom(loc));
1016 } else {
1017 DCHECK(loc.IsFpuRegister());
1018 vixl_temps_.Release(DRegisterFrom(loc));
1019 }
1020 RemoveScratchLocation(loc);
1021}
1022
Alexandre Rames3e69f162014-12-10 10:36:50 +00001023void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001024 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001025 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001026}
1027
Alexandre Rames5319def2014-10-23 10:03:10 +01001028void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001029 MacroAssembler* masm = GetVIXLAssembler();
1030 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001031 __ Bind(&frame_entry_label_);
1032
Serban Constantinescu02164b32014-11-13 14:05:07 +00001033 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1034 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001035 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001036 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001037 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001038 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001039 __ Ldr(wzr, MemOperand(temp, 0));
1040 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001041 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001042
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001043 if (!HasEmptyFrame()) {
1044 int frame_size = GetFrameSize();
1045 // Stack layout:
1046 // sp[frame_size - 8] : lr.
1047 // ... : other preserved core registers.
1048 // ... : other preserved fp registers.
1049 // ... : reserved frame space.
1050 // sp[0] : current method.
1051 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001052 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001053 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1054 frame_size - GetCoreSpillSize());
1055 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1056 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001057 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001058}
1059
1060void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001061 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001062 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001063 if (!HasEmptyFrame()) {
1064 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001065 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1066 frame_size - FrameEntrySpillSize());
1067 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1068 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001069 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001070 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001071 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001072 __ Ret();
1073 GetAssembler()->cfi().RestoreState();
1074 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001075}
1076
Scott Wakeling97c72b72016-06-24 16:19:36 +01001077CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001078 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001079 return CPURegList(CPURegister::kRegister, kXRegSize,
1080 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001081}
1082
Scott Wakeling97c72b72016-06-24 16:19:36 +01001083CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001084 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1085 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001086 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1087 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001088}
1089
Alexandre Rames5319def2014-10-23 10:03:10 +01001090void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1091 __ Bind(GetLabelOf(block));
1092}
1093
Calin Juravle175dc732015-08-25 15:42:32 +01001094void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1095 DCHECK(location.IsRegister());
1096 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1097}
1098
Calin Juravlee460d1d2015-09-29 04:52:17 +01001099void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1100 if (location.IsRegister()) {
1101 locations->AddTemp(location);
1102 } else {
1103 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1104 }
1105}
1106
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001107void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001108 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001109 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001110 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001111 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001112 if (value_can_be_null) {
1113 __ Cbz(value, &done);
1114 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001115 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001116 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001117 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001118 if (value_can_be_null) {
1119 __ Bind(&done);
1120 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001121}
1122
David Brazdil58282f42016-01-14 12:45:10 +00001123void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001124 // Blocked core registers:
1125 // lr : Runtime reserved.
1126 // tr : Runtime reserved.
1127 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1128 // ip1 : VIXL core temp.
1129 // ip0 : VIXL core temp.
1130 //
1131 // Blocked fp registers:
1132 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001133 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1134 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001135 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001136 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001137 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001138
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001139 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001140 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001141 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001142 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001143
David Brazdil58282f42016-01-14 12:45:10 +00001144 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001145 // Stubs do not save callee-save floating point registers. If the graph
1146 // is debuggable, we need to deal with these registers differently. For
1147 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001148 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1149 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001150 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001151 }
1152 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001153}
1154
Alexandre Rames3e69f162014-12-10 10:36:50 +00001155size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1156 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1157 __ Str(reg, MemOperand(sp, stack_index));
1158 return kArm64WordSize;
1159}
1160
1161size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1162 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1163 __ Ldr(reg, MemOperand(sp, stack_index));
1164 return kArm64WordSize;
1165}
1166
1167size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1168 FPRegister reg = FPRegister(reg_id, kDRegSize);
1169 __ Str(reg, MemOperand(sp, stack_index));
1170 return kArm64WordSize;
1171}
1172
1173size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1174 FPRegister reg = FPRegister(reg_id, kDRegSize);
1175 __ Ldr(reg, MemOperand(sp, stack_index));
1176 return kArm64WordSize;
1177}
1178
Alexandre Rames5319def2014-10-23 10:03:10 +01001179void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001180 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001181}
1182
1183void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001184 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001185}
1186
Alexandre Rames67555f72014-11-18 10:55:16 +00001187void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001188 if (constant->IsIntConstant()) {
1189 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1190 } else if (constant->IsLongConstant()) {
1191 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1192 } else if (constant->IsNullConstant()) {
1193 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001194 } else if (constant->IsFloatConstant()) {
1195 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1196 } else {
1197 DCHECK(constant->IsDoubleConstant());
1198 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1199 }
1200}
1201
Alexandre Rames3e69f162014-12-10 10:36:50 +00001202
1203static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1204 DCHECK(constant.IsConstant());
1205 HConstant* cst = constant.GetConstant();
1206 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001207 // Null is mapped to a core W register, which we associate with kPrimInt.
1208 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001209 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1210 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1211 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1212}
1213
Calin Juravlee460d1d2015-09-29 04:52:17 +01001214void CodeGeneratorARM64::MoveLocation(Location destination,
1215 Location source,
1216 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001217 if (source.Equals(destination)) {
1218 return;
1219 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001220
1221 // A valid move can always be inferred from the destination and source
1222 // locations. When moving from and to a register, the argument type can be
1223 // used to generate 32bit instead of 64bit moves. In debug mode we also
1224 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001225 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001226
1227 if (destination.IsRegister() || destination.IsFpuRegister()) {
1228 if (unspecified_type) {
1229 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1230 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001231 (src_cst != nullptr && (src_cst->IsIntConstant()
1232 || src_cst->IsFloatConstant()
1233 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001234 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001235 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001236 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001237 // If the source is a double stack slot or a 64bit constant, a 64bit
1238 // type is appropriate. Else the source is a register, and since the
1239 // type has not been specified, we chose a 64bit type to force a 64bit
1240 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001241 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001242 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001243 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001244 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1245 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1246 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001247 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1248 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1249 __ Ldr(dst, StackOperandFrom(source));
1250 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001251 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001252 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001253 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001254 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001255 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001256 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001257 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001258 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1259 ? Primitive::kPrimLong
1260 : Primitive::kPrimInt;
1261 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1262 }
1263 } else {
1264 DCHECK(source.IsFpuRegister());
1265 if (destination.IsRegister()) {
1266 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1267 ? Primitive::kPrimDouble
1268 : Primitive::kPrimFloat;
1269 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1270 } else {
1271 DCHECK(destination.IsFpuRegister());
1272 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001273 }
1274 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001275 } else { // The destination is not a register. It must be a stack slot.
1276 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1277 if (source.IsRegister() || source.IsFpuRegister()) {
1278 if (unspecified_type) {
1279 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001280 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001281 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001282 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001283 }
1284 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001285 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1286 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1287 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001288 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001289 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1290 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001291 UseScratchRegisterScope temps(GetVIXLAssembler());
1292 HConstant* src_cst = source.GetConstant();
1293 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001294 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001295 temp = temps.AcquireW();
1296 } else if (src_cst->IsLongConstant()) {
1297 temp = temps.AcquireX();
1298 } else if (src_cst->IsFloatConstant()) {
1299 temp = temps.AcquireS();
1300 } else {
1301 DCHECK(src_cst->IsDoubleConstant());
1302 temp = temps.AcquireD();
1303 }
1304 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001305 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001306 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001307 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001308 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001309 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001310 // There is generally less pressure on FP registers.
1311 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001312 __ Ldr(temp, StackOperandFrom(source));
1313 __ Str(temp, StackOperandFrom(destination));
1314 }
1315 }
1316}
1317
1318void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001319 CPURegister dst,
1320 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001321 switch (type) {
1322 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001323 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001324 break;
1325 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001326 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001327 break;
1328 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001329 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001330 break;
1331 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001332 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001333 break;
1334 case Primitive::kPrimInt:
1335 case Primitive::kPrimNot:
1336 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001337 case Primitive::kPrimFloat:
1338 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001339 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001340 __ Ldr(dst, src);
1341 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001342 case Primitive::kPrimVoid:
1343 LOG(FATAL) << "Unreachable type " << type;
1344 }
1345}
1346
Calin Juravle77520bc2015-01-12 18:45:46 +00001347void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001348 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001349 const MemOperand& src,
1350 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001351 MacroAssembler* masm = GetVIXLAssembler();
1352 BlockPoolsScope block_pools(masm);
1353 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001354 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001355 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001356
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001357 DCHECK(!src.IsPreIndex());
1358 DCHECK(!src.IsPostIndex());
1359
1360 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001361 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001362 MemOperand base = MemOperand(temp_base);
1363 switch (type) {
1364 case Primitive::kPrimBoolean:
1365 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001366 if (needs_null_check) {
1367 MaybeRecordImplicitNullCheck(instruction);
1368 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001369 break;
1370 case Primitive::kPrimByte:
1371 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001372 if (needs_null_check) {
1373 MaybeRecordImplicitNullCheck(instruction);
1374 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001375 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1376 break;
1377 case Primitive::kPrimChar:
1378 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001379 if (needs_null_check) {
1380 MaybeRecordImplicitNullCheck(instruction);
1381 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001382 break;
1383 case Primitive::kPrimShort:
1384 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001385 if (needs_null_check) {
1386 MaybeRecordImplicitNullCheck(instruction);
1387 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001388 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1389 break;
1390 case Primitive::kPrimInt:
1391 case Primitive::kPrimNot:
1392 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001393 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001394 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001395 if (needs_null_check) {
1396 MaybeRecordImplicitNullCheck(instruction);
1397 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001398 break;
1399 case Primitive::kPrimFloat:
1400 case Primitive::kPrimDouble: {
1401 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001402 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001403
1404 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1405 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001406 if (needs_null_check) {
1407 MaybeRecordImplicitNullCheck(instruction);
1408 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001409 __ Fmov(FPRegister(dst), temp);
1410 break;
1411 }
1412 case Primitive::kPrimVoid:
1413 LOG(FATAL) << "Unreachable type " << type;
1414 }
1415}
1416
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001417void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001418 CPURegister src,
1419 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001420 switch (type) {
1421 case Primitive::kPrimBoolean:
1422 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001423 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001424 break;
1425 case Primitive::kPrimChar:
1426 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001427 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001428 break;
1429 case Primitive::kPrimInt:
1430 case Primitive::kPrimNot:
1431 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001432 case Primitive::kPrimFloat:
1433 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001434 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001435 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001436 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001437 case Primitive::kPrimVoid:
1438 LOG(FATAL) << "Unreachable type " << type;
1439 }
1440}
1441
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001442void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1443 CPURegister src,
1444 const MemOperand& dst) {
1445 UseScratchRegisterScope temps(GetVIXLAssembler());
1446 Register temp_base = temps.AcquireX();
1447
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001448 DCHECK(!dst.IsPreIndex());
1449 DCHECK(!dst.IsPostIndex());
1450
1451 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001452 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001453 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001454 MemOperand base = MemOperand(temp_base);
1455 switch (type) {
1456 case Primitive::kPrimBoolean:
1457 case Primitive::kPrimByte:
1458 __ Stlrb(Register(src), base);
1459 break;
1460 case Primitive::kPrimChar:
1461 case Primitive::kPrimShort:
1462 __ Stlrh(Register(src), base);
1463 break;
1464 case Primitive::kPrimInt:
1465 case Primitive::kPrimNot:
1466 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001467 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001468 __ Stlr(Register(src), base);
1469 break;
1470 case Primitive::kPrimFloat:
1471 case Primitive::kPrimDouble: {
1472 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001473 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001474
1475 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1476 __ Fmov(temp, FPRegister(src));
1477 __ Stlr(temp, base);
1478 break;
1479 }
1480 case Primitive::kPrimVoid:
1481 LOG(FATAL) << "Unreachable type " << type;
1482 }
1483}
1484
Calin Juravle175dc732015-08-25 15:42:32 +01001485void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1486 HInstruction* instruction,
1487 uint32_t dex_pc,
1488 SlowPathCode* slow_path) {
Andreas Gampe542451c2016-07-26 09:02:02 -07001489 InvokeRuntime(GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value(),
Calin Juravle175dc732015-08-25 15:42:32 +01001490 instruction,
1491 dex_pc,
1492 slow_path);
1493}
1494
Alexandre Rames67555f72014-11-18 10:55:16 +00001495void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1496 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001497 uint32_t dex_pc,
1498 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001499 ValidateInvokeRuntime(instruction, slow_path);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001500 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001501 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1502 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001503 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001504}
1505
Roland Levillaindec8f632016-07-22 17:10:06 +01001506void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1507 HInstruction* instruction,
1508 SlowPathCode* slow_path) {
1509 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1510 BlockPoolsScope block_pools(GetVIXLAssembler());
1511 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1512 __ Blr(lr);
1513}
1514
Alexandre Rames67555f72014-11-18 10:55:16 +00001515void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001516 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001517 UseScratchRegisterScope temps(GetVIXLAssembler());
1518 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001519 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1520
Serban Constantinescu02164b32014-11-13 14:05:07 +00001521 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001522 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1523 __ Add(temp, class_reg, status_offset);
1524 __ Ldar(temp, HeapOperand(temp));
1525 __ Cmp(temp, mirror::Class::kStatusInitialized);
1526 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001527 __ Bind(slow_path->GetExitLabel());
1528}
Alexandre Rames5319def2014-10-23 10:03:10 +01001529
Roland Levillain44015862016-01-22 11:47:17 +00001530void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001531 BarrierType type = BarrierAll;
1532
1533 switch (kind) {
1534 case MemBarrierKind::kAnyAny:
1535 case MemBarrierKind::kAnyStore: {
1536 type = BarrierAll;
1537 break;
1538 }
1539 case MemBarrierKind::kLoadAny: {
1540 type = BarrierReads;
1541 break;
1542 }
1543 case MemBarrierKind::kStoreStore: {
1544 type = BarrierWrites;
1545 break;
1546 }
1547 default:
1548 LOG(FATAL) << "Unexpected memory barrier " << kind;
1549 }
1550 __ Dmb(InnerShareable, type);
1551}
1552
Serban Constantinescu02164b32014-11-13 14:05:07 +00001553void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1554 HBasicBlock* successor) {
1555 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001556 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1557 if (slow_path == nullptr) {
1558 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1559 instruction->SetSlowPath(slow_path);
1560 codegen_->AddSlowPath(slow_path);
1561 if (successor != nullptr) {
1562 DCHECK(successor->IsLoopHeader());
1563 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1564 }
1565 } else {
1566 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1567 }
1568
Serban Constantinescu02164b32014-11-13 14:05:07 +00001569 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1570 Register temp = temps.AcquireW();
1571
Andreas Gampe542451c2016-07-26 09:02:02 -07001572 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001573 if (successor == nullptr) {
1574 __ Cbnz(temp, slow_path->GetEntryLabel());
1575 __ Bind(slow_path->GetReturnLabel());
1576 } else {
1577 __ Cbz(temp, codegen_->GetLabelOf(successor));
1578 __ B(slow_path->GetEntryLabel());
1579 // slow_path will return to GetLabelOf(successor).
1580 }
1581}
1582
Alexandre Rames5319def2014-10-23 10:03:10 +01001583InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1584 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001585 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001586 assembler_(codegen->GetAssembler()),
1587 codegen_(codegen) {}
1588
1589#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001590 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001591
1592#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1593
1594enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001595 // Using a base helps identify when we hit such breakpoints.
1596 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001597#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1598 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1599#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1600};
1601
1602#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001603 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001604 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1605 } \
1606 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1607 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1608 locations->SetOut(Location::Any()); \
1609 }
1610 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1611#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1612
1613#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001614#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001615
Alexandre Rames67555f72014-11-18 10:55:16 +00001616void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001617 DCHECK_EQ(instr->InputCount(), 2U);
1618 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1619 Primitive::Type type = instr->GetResultType();
1620 switch (type) {
1621 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001622 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001623 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001624 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001625 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001626 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001627
1628 case Primitive::kPrimFloat:
1629 case Primitive::kPrimDouble:
1630 locations->SetInAt(0, Location::RequiresFpuRegister());
1631 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001632 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001633 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001634
Alexandre Rames5319def2014-10-23 10:03:10 +01001635 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001636 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001637 }
1638}
1639
Alexandre Rames09a99962015-04-15 11:47:56 +01001640void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001641 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1642
1643 bool object_field_get_with_read_barrier =
1644 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001645 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001646 new (GetGraph()->GetArena()) LocationSummary(instruction,
1647 object_field_get_with_read_barrier ?
1648 LocationSummary::kCallOnSlowPath :
1649 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001650 locations->SetInAt(0, Location::RequiresRegister());
1651 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1652 locations->SetOut(Location::RequiresFpuRegister());
1653 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001654 // The output overlaps for an object field get when read barriers
1655 // are enabled: we do not want the load to overwrite the object's
1656 // location, as we need it to emit the read barrier.
1657 locations->SetOut(
1658 Location::RequiresRegister(),
1659 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001660 }
1661}
1662
1663void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1664 const FieldInfo& field_info) {
1665 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001666 LocationSummary* locations = instruction->GetLocations();
1667 Location base_loc = locations->InAt(0);
1668 Location out = locations->Out();
1669 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001670 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001671 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001672 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001673
Roland Levillain44015862016-01-22 11:47:17 +00001674 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1675 // Object FieldGet with Baker's read barrier case.
1676 MacroAssembler* masm = GetVIXLAssembler();
1677 UseScratchRegisterScope temps(masm);
1678 // /* HeapReference<Object> */ out = *(base + offset)
1679 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1680 Register temp = temps.AcquireW();
1681 // Note that potential implicit null checks are handled in this
1682 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1683 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1684 instruction,
1685 out,
1686 base,
1687 offset,
1688 temp,
1689 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001690 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001691 } else {
1692 // General case.
1693 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001694 // Note that a potential implicit null check is handled in this
1695 // CodeGeneratorARM64::LoadAcquire call.
1696 // NB: LoadAcquire will record the pc info if needed.
1697 codegen_->LoadAcquire(
1698 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001699 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001700 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001701 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001702 }
Roland Levillain44015862016-01-22 11:47:17 +00001703 if (field_type == Primitive::kPrimNot) {
1704 // If read barriers are enabled, emit read barriers other than
1705 // Baker's using a slow path (and also unpoison the loaded
1706 // reference, if heap poisoning is enabled).
1707 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1708 }
Roland Levillain4d027112015-07-01 15:41:14 +01001709 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001710}
1711
1712void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1713 LocationSummary* locations =
1714 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1715 locations->SetInAt(0, Location::RequiresRegister());
1716 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1717 locations->SetInAt(1, Location::RequiresFpuRegister());
1718 } else {
1719 locations->SetInAt(1, Location::RequiresRegister());
1720 }
1721}
1722
1723void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001724 const FieldInfo& field_info,
1725 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001726 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001727 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001728
1729 Register obj = InputRegisterAt(instruction, 0);
1730 CPURegister value = InputCPURegisterAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001731 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001732 Offset offset = field_info.GetFieldOffset();
1733 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001734
Roland Levillain4d027112015-07-01 15:41:14 +01001735 {
1736 // We use a block to end the scratch scope before the write barrier, thus
1737 // freeing the temporary registers so they can be used in `MarkGCCard`.
1738 UseScratchRegisterScope temps(GetVIXLAssembler());
1739
1740 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1741 DCHECK(value.IsW());
1742 Register temp = temps.AcquireW();
1743 __ Mov(temp, value.W());
1744 GetAssembler()->PoisonHeapReference(temp.W());
1745 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001746 }
Roland Levillain4d027112015-07-01 15:41:14 +01001747
1748 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001749 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1750 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001751 } else {
1752 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1753 codegen_->MaybeRecordImplicitNullCheck(instruction);
1754 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001755 }
1756
1757 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001758 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001759 }
1760}
1761
Alexandre Rames67555f72014-11-18 10:55:16 +00001762void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001763 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001764
1765 switch (type) {
1766 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001767 case Primitive::kPrimLong: {
1768 Register dst = OutputRegister(instr);
1769 Register lhs = InputRegisterAt(instr, 0);
1770 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001771 if (instr->IsAdd()) {
1772 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001773 } else if (instr->IsAnd()) {
1774 __ And(dst, lhs, rhs);
1775 } else if (instr->IsOr()) {
1776 __ Orr(dst, lhs, rhs);
1777 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001778 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001779 } else if (instr->IsRor()) {
1780 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001781 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001782 __ Ror(dst, lhs, shift);
1783 } else {
1784 // Ensure shift distance is in the same size register as the result. If
1785 // we are rotating a long and the shift comes in a w register originally,
1786 // we don't need to sxtw for use as an x since the shift distances are
1787 // all & reg_bits - 1.
1788 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1789 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001790 } else {
1791 DCHECK(instr->IsXor());
1792 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001793 }
1794 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001795 }
1796 case Primitive::kPrimFloat:
1797 case Primitive::kPrimDouble: {
1798 FPRegister dst = OutputFPRegister(instr);
1799 FPRegister lhs = InputFPRegisterAt(instr, 0);
1800 FPRegister rhs = InputFPRegisterAt(instr, 1);
1801 if (instr->IsAdd()) {
1802 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001803 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001804 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001805 } else {
1806 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001807 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001808 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001809 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001810 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001811 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001812 }
1813}
1814
Serban Constantinescu02164b32014-11-13 14:05:07 +00001815void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1816 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1817
1818 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1819 Primitive::Type type = instr->GetResultType();
1820 switch (type) {
1821 case Primitive::kPrimInt:
1822 case Primitive::kPrimLong: {
1823 locations->SetInAt(0, Location::RequiresRegister());
1824 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1825 locations->SetOut(Location::RequiresRegister());
1826 break;
1827 }
1828 default:
1829 LOG(FATAL) << "Unexpected shift type " << type;
1830 }
1831}
1832
1833void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1834 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1835
1836 Primitive::Type type = instr->GetType();
1837 switch (type) {
1838 case Primitive::kPrimInt:
1839 case Primitive::kPrimLong: {
1840 Register dst = OutputRegister(instr);
1841 Register lhs = InputRegisterAt(instr, 0);
1842 Operand rhs = InputOperandAt(instr, 1);
1843 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001844 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00001845 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001846 if (instr->IsShl()) {
1847 __ Lsl(dst, lhs, shift_value);
1848 } else if (instr->IsShr()) {
1849 __ Asr(dst, lhs, shift_value);
1850 } else {
1851 __ Lsr(dst, lhs, shift_value);
1852 }
1853 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001854 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001855
1856 if (instr->IsShl()) {
1857 __ Lsl(dst, lhs, rhs_reg);
1858 } else if (instr->IsShr()) {
1859 __ Asr(dst, lhs, rhs_reg);
1860 } else {
1861 __ Lsr(dst, lhs, rhs_reg);
1862 }
1863 }
1864 break;
1865 }
1866 default:
1867 LOG(FATAL) << "Unexpected shift operation type " << type;
1868 }
1869}
1870
Alexandre Rames5319def2014-10-23 10:03:10 +01001871void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001872 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001873}
1874
1875void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001876 HandleBinaryOp(instruction);
1877}
1878
1879void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1880 HandleBinaryOp(instruction);
1881}
1882
1883void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1884 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001885}
1886
Artem Serov7fc63502016-02-09 17:15:29 +00001887void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001888 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1889 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1890 locations->SetInAt(0, Location::RequiresRegister());
1891 // There is no immediate variant of negated bitwise instructions in AArch64.
1892 locations->SetInAt(1, Location::RequiresRegister());
1893 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1894}
1895
Artem Serov7fc63502016-02-09 17:15:29 +00001896void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001897 Register dst = OutputRegister(instr);
1898 Register lhs = InputRegisterAt(instr, 0);
1899 Register rhs = InputRegisterAt(instr, 1);
1900
1901 switch (instr->GetOpKind()) {
1902 case HInstruction::kAnd:
1903 __ Bic(dst, lhs, rhs);
1904 break;
1905 case HInstruction::kOr:
1906 __ Orn(dst, lhs, rhs);
1907 break;
1908 case HInstruction::kXor:
1909 __ Eon(dst, lhs, rhs);
1910 break;
1911 default:
1912 LOG(FATAL) << "Unreachable";
1913 }
1914}
1915
Alexandre Rames8626b742015-11-25 16:28:08 +00001916void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1917 HArm64DataProcWithShifterOp* instruction) {
1918 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1919 instruction->GetType() == Primitive::kPrimLong);
1920 LocationSummary* locations =
1921 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1922 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1923 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1924 } else {
1925 locations->SetInAt(0, Location::RequiresRegister());
1926 }
1927 locations->SetInAt(1, Location::RequiresRegister());
1928 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1929}
1930
1931void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1932 HArm64DataProcWithShifterOp* instruction) {
1933 Primitive::Type type = instruction->GetType();
1934 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1935 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1936 Register out = OutputRegister(instruction);
1937 Register left;
1938 if (kind != HInstruction::kNeg) {
1939 left = InputRegisterAt(instruction, 0);
1940 }
1941 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1942 // shifter operand operation, the IR generating `right_reg` (input to the type
1943 // conversion) can have a different type from the current instruction's type,
1944 // so we manually indicate the type.
1945 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001946 int64_t shift_amount = instruction->GetShiftAmount() &
1947 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001948
1949 Operand right_operand(0);
1950
1951 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1952 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1953 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1954 } else {
1955 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1956 }
1957
1958 // Logical binary operations do not support extension operations in the
1959 // operand. Note that VIXL would still manage if it was passed by generating
1960 // the extension as a separate instruction.
1961 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1962 DCHECK(!right_operand.IsExtendedRegister() ||
1963 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1964 kind != HInstruction::kNeg));
1965 switch (kind) {
1966 case HInstruction::kAdd:
1967 __ Add(out, left, right_operand);
1968 break;
1969 case HInstruction::kAnd:
1970 __ And(out, left, right_operand);
1971 break;
1972 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001973 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001974 __ Neg(out, right_operand);
1975 break;
1976 case HInstruction::kOr:
1977 __ Orr(out, left, right_operand);
1978 break;
1979 case HInstruction::kSub:
1980 __ Sub(out, left, right_operand);
1981 break;
1982 case HInstruction::kXor:
1983 __ Eor(out, left, right_operand);
1984 break;
1985 default:
1986 LOG(FATAL) << "Unexpected operation kind: " << kind;
1987 UNREACHABLE();
1988 }
1989}
1990
Artem Serov328429f2016-07-06 16:23:04 +01001991void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001992 LocationSummary* locations =
1993 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1994 locations->SetInAt(0, Location::RequiresRegister());
1995 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1996 locations->SetOut(Location::RequiresRegister());
1997}
1998
Roland Levillain12ecf082016-08-08 10:18:37 +01001999void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002000 __ Add(OutputRegister(instruction),
2001 InputRegisterAt(instruction, 0),
2002 Operand(InputOperandAt(instruction, 1)));
2003}
2004
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002005void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002006 LocationSummary* locations =
2007 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002008 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2009 if (instr->GetOpKind() == HInstruction::kSub &&
2010 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002011 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002012 // Don't allocate register for Mneg instruction.
2013 } else {
2014 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2015 Location::RequiresRegister());
2016 }
2017 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2018 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002019 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2020}
2021
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002022void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002023 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002024 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2025 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002026
2027 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2028 // This fixup should be carried out for all multiply-accumulate instructions:
2029 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2030 if (instr->GetType() == Primitive::kPrimLong &&
2031 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2032 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002033 vixl::aarch64::Instruction* prev =
2034 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002035 if (prev->IsLoadOrStore()) {
2036 // Make sure we emit only exactly one nop.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002037 vixl::aarch64::CodeBufferCheckScope scope(masm,
2038 kInstructionSize,
2039 vixl::aarch64::CodeBufferCheckScope::kCheck,
2040 vixl::aarch64::CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002041 __ nop();
2042 }
2043 }
2044
2045 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002046 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002047 __ Madd(res, mul_left, mul_right, accumulator);
2048 } else {
2049 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002050 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002051 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002052 __ Mneg(res, mul_left, mul_right);
2053 } else {
2054 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2055 __ Msub(res, mul_left, mul_right, accumulator);
2056 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002057 }
2058}
2059
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002060void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002061 bool object_array_get_with_read_barrier =
2062 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002063 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002064 new (GetGraph()->GetArena()) LocationSummary(instruction,
2065 object_array_get_with_read_barrier ?
2066 LocationSummary::kCallOnSlowPath :
2067 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002068 locations->SetInAt(0, Location::RequiresRegister());
2069 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002070 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2071 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2072 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002073 // The output overlaps in the case of an object array get with
2074 // read barriers enabled: we do not want the move to overwrite the
2075 // array's location, as we need it to emit the read barrier.
2076 locations->SetOut(
2077 Location::RequiresRegister(),
2078 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002079 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002080}
2081
2082void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002083 Primitive::Type type = instruction->GetType();
2084 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002085 LocationSummary* locations = instruction->GetLocations();
2086 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002087 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002088 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002089
Alexandre Ramesd921d642015-04-16 15:07:16 +01002090 MacroAssembler* masm = GetVIXLAssembler();
2091 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002092 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002093 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002094
Roland Levillain12ecf082016-08-08 10:18:37 +01002095 // The read barrier instrumentation of object ArrayGet instructions
2096 // does not support the HIntermediateAddress instruction.
2097 DCHECK(!((type == Primitive::kPrimNot) &&
2098 instruction->GetArray()->IsIntermediateAddress() &&
2099 kEmitCompilerReadBarrier));
2100
Roland Levillain44015862016-01-22 11:47:17 +00002101 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2102 // Object ArrayGet with Baker's read barrier case.
2103 Register temp = temps.AcquireW();
Roland Levillain44015862016-01-22 11:47:17 +00002104 // Note that a potential implicit null check is handled in the
2105 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2106 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2107 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002108 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002109 // General case.
2110 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002111 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002112 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2113 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002114 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002115 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002116 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002117 // We do not need to compute the intermediate address from the array: the
2118 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002119 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002120 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002121 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002122 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2123 }
2124 temp = obj;
2125 } else {
2126 __ Add(temp, obj, offset);
2127 }
2128 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2129 }
2130
2131 codegen_->Load(type, OutputCPURegister(instruction), source);
2132 codegen_->MaybeRecordImplicitNullCheck(instruction);
2133
2134 if (type == Primitive::kPrimNot) {
2135 static_assert(
2136 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2137 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2138 Location obj_loc = locations->InAt(0);
2139 if (index.IsConstant()) {
2140 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2141 } else {
2142 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2143 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002144 }
Roland Levillain4d027112015-07-01 15:41:14 +01002145 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002146}
2147
Alexandre Rames5319def2014-10-23 10:03:10 +01002148void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2149 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2150 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002151 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002152}
2153
2154void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002155 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002156 BlockPoolsScope block_pools(GetVIXLAssembler());
Vladimir Markodce016e2016-04-28 13:10:02 +01002157 __ Ldr(OutputRegister(instruction), HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002158 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002159}
2160
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002161void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002162 Primitive::Type value_type = instruction->GetComponentType();
2163
2164 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2165 bool object_array_set_with_read_barrier =
2166 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002167 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2168 instruction,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002169 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
2170 LocationSummary::kCallOnSlowPath :
2171 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002172 locations->SetInAt(0, Location::RequiresRegister());
2173 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002174 if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002175 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002176 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002177 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002178 }
2179}
2180
2181void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2182 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002183 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002184 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002185 bool needs_write_barrier =
2186 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002187
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002188 Register array = InputRegisterAt(instruction, 0);
2189 CPURegister value = InputCPURegisterAt(instruction, 2);
2190 CPURegister source = value;
2191 Location index = locations->InAt(1);
2192 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2193 MemOperand destination = HeapOperand(array);
2194 MacroAssembler* masm = GetVIXLAssembler();
2195 BlockPoolsScope block_pools(masm);
2196
2197 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002198 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002199 if (index.IsConstant()) {
2200 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2201 destination = HeapOperand(array, offset);
2202 } else {
2203 UseScratchRegisterScope temps(masm);
2204 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002205 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002206 // We do not need to compute the intermediate address from the array: the
2207 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002208 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002209 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002210 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002211 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2212 }
2213 temp = array;
2214 } else {
2215 __ Add(temp, array, offset);
2216 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002217 destination = HeapOperand(temp,
2218 XRegisterFrom(index),
2219 LSL,
2220 Primitive::ComponentSizeShift(value_type));
2221 }
2222 codegen_->Store(value_type, value, destination);
2223 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002224 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002225 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002226 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002227 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002228 {
2229 // We use a block to end the scratch scope before the write barrier, thus
2230 // freeing the temporary registers so they can be used in `MarkGCCard`.
2231 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002232 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002233 if (index.IsConstant()) {
2234 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002235 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002236 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002237 destination = HeapOperand(temp,
2238 XRegisterFrom(index),
2239 LSL,
2240 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002241 }
2242
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002243 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2244 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2245 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2246
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002247 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002248 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2249 codegen_->AddSlowPath(slow_path);
2250 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002251 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002252 __ Cbnz(Register(value), &non_zero);
2253 if (!index.IsConstant()) {
2254 __ Add(temp, array, offset);
2255 }
2256 __ Str(wzr, destination);
2257 codegen_->MaybeRecordImplicitNullCheck(instruction);
2258 __ B(&done);
2259 __ Bind(&non_zero);
2260 }
2261
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002262 if (kEmitCompilerReadBarrier) {
2263 // When read barriers are enabled, the type checking
2264 // instrumentation requires two read barriers:
2265 //
2266 // __ Mov(temp2, temp);
2267 // // /* HeapReference<Class> */ temp = temp->component_type_
2268 // __ Ldr(temp, HeapOperand(temp, component_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002269 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002270 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2271 //
2272 // // /* HeapReference<Class> */ temp2 = value->klass_
2273 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002274 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002275 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2276 //
2277 // __ Cmp(temp, temp2);
2278 //
2279 // However, the second read barrier may trash `temp`, as it
2280 // is a temporary register, and as such would not be saved
2281 // along with live registers before calling the runtime (nor
2282 // restored afterwards). So in this case, we bail out and
2283 // delegate the work to the array set slow path.
2284 //
2285 // TODO: Extend the register allocator to support a new
2286 // "(locally) live temp" location so as to avoid always
2287 // going into the slow path when read barriers are enabled.
2288 __ B(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002289 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002290 Register temp2 = temps.AcquireSameSizeAs(array);
2291 // /* HeapReference<Class> */ temp = array->klass_
2292 __ Ldr(temp, HeapOperand(array, class_offset));
2293 codegen_->MaybeRecordImplicitNullCheck(instruction);
2294 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2295
2296 // /* HeapReference<Class> */ temp = temp->component_type_
2297 __ Ldr(temp, HeapOperand(temp, component_offset));
2298 // /* HeapReference<Class> */ temp2 = value->klass_
2299 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2300 // If heap poisoning is enabled, no need to unpoison `temp`
2301 // nor `temp2`, as we are comparing two poisoned references.
2302 __ Cmp(temp, temp2);
2303
2304 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002305 vixl::aarch64::Label do_put;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002306 __ B(eq, &do_put);
2307 // If heap poisoning is enabled, the `temp` reference has
2308 // not been unpoisoned yet; unpoison it now.
2309 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2310
2311 // /* HeapReference<Class> */ temp = temp->super_class_
2312 __ Ldr(temp, HeapOperand(temp, super_offset));
2313 // If heap poisoning is enabled, no need to unpoison
2314 // `temp`, as we are comparing against null below.
2315 __ Cbnz(temp, slow_path->GetEntryLabel());
2316 __ Bind(&do_put);
2317 } else {
2318 __ B(ne, slow_path->GetEntryLabel());
2319 }
2320 temps.Release(temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002321 }
2322 }
2323
2324 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002325 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002326 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002327 __ Mov(temp2, value.W());
2328 GetAssembler()->PoisonHeapReference(temp2);
2329 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002330 }
2331
2332 if (!index.IsConstant()) {
2333 __ Add(temp, array, offset);
2334 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002335 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002336
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002337 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002338 codegen_->MaybeRecordImplicitNullCheck(instruction);
2339 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002340 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002341
2342 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2343
2344 if (done.IsLinked()) {
2345 __ Bind(&done);
2346 }
2347
2348 if (slow_path != nullptr) {
2349 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002350 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002351 }
2352}
2353
Alexandre Rames67555f72014-11-18 10:55:16 +00002354void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002355 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2356 ? LocationSummary::kCallOnSlowPath
2357 : LocationSummary::kNoCall;
2358 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002359 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002360 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002361 if (instruction->HasUses()) {
2362 locations->SetOut(Location::SameAsFirstInput());
2363 }
2364}
2365
2366void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002367 BoundsCheckSlowPathARM64* slow_path =
2368 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002369 codegen_->AddSlowPath(slow_path);
2370
2371 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2372 __ B(slow_path->GetEntryLabel(), hs);
2373}
2374
Alexandre Rames67555f72014-11-18 10:55:16 +00002375void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2376 LocationSummary* locations =
2377 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2378 locations->SetInAt(0, Location::RequiresRegister());
2379 if (check->HasUses()) {
2380 locations->SetOut(Location::SameAsFirstInput());
2381 }
2382}
2383
2384void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2385 // We assume the class is not null.
2386 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2387 check->GetLoadClass(), check, check->GetDexPc(), true);
2388 codegen_->AddSlowPath(slow_path);
2389 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2390}
2391
Roland Levillain1a653882016-03-18 18:05:57 +00002392static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2393 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2394 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2395}
2396
2397void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2398 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2399 Location rhs_loc = instruction->GetLocations()->InAt(1);
2400 if (rhs_loc.IsConstant()) {
2401 // 0.0 is the only immediate that can be encoded directly in
2402 // an FCMP instruction.
2403 //
2404 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2405 // specify that in a floating-point comparison, positive zero
2406 // and negative zero are considered equal, so we can use the
2407 // literal 0.0 for both cases here.
2408 //
2409 // Note however that some methods (Float.equal, Float.compare,
2410 // Float.compareTo, Double.equal, Double.compare,
2411 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2412 // StrictMath.min) consider 0.0 to be (strictly) greater than
2413 // -0.0. So if we ever translate calls to these methods into a
2414 // HCompare instruction, we must handle the -0.0 case with
2415 // care here.
2416 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2417 __ Fcmp(lhs_reg, 0.0);
2418 } else {
2419 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2420 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002421}
2422
Serban Constantinescu02164b32014-11-13 14:05:07 +00002423void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002424 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002425 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2426 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002427 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002428 case Primitive::kPrimBoolean:
2429 case Primitive::kPrimByte:
2430 case Primitive::kPrimShort:
2431 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002432 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002433 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002434 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002435 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002436 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2437 break;
2438 }
2439 case Primitive::kPrimFloat:
2440 case Primitive::kPrimDouble: {
2441 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002442 locations->SetInAt(1,
2443 IsFloatingPointZeroConstant(compare->InputAt(1))
2444 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2445 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002446 locations->SetOut(Location::RequiresRegister());
2447 break;
2448 }
2449 default:
2450 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2451 }
2452}
2453
2454void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2455 Primitive::Type in_type = compare->InputAt(0)->GetType();
2456
2457 // 0 if: left == right
2458 // 1 if: left > right
2459 // -1 if: left < right
2460 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002461 case Primitive::kPrimBoolean:
2462 case Primitive::kPrimByte:
2463 case Primitive::kPrimShort:
2464 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002465 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002466 case Primitive::kPrimLong: {
2467 Register result = OutputRegister(compare);
2468 Register left = InputRegisterAt(compare, 0);
2469 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002470 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002471 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2472 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002473 break;
2474 }
2475 case Primitive::kPrimFloat:
2476 case Primitive::kPrimDouble: {
2477 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002478 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002479 __ Cset(result, ne);
2480 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002481 break;
2482 }
2483 default:
2484 LOG(FATAL) << "Unimplemented compare type " << in_type;
2485 }
2486}
2487
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002488void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002489 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002490
2491 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2492 locations->SetInAt(0, Location::RequiresFpuRegister());
2493 locations->SetInAt(1,
2494 IsFloatingPointZeroConstant(instruction->InputAt(1))
2495 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2496 : Location::RequiresFpuRegister());
2497 } else {
2498 // Integer cases.
2499 locations->SetInAt(0, Location::RequiresRegister());
2500 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2501 }
2502
David Brazdilb3e773e2016-01-26 11:28:37 +00002503 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002504 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002505 }
2506}
2507
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002508void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002509 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002510 return;
2511 }
2512
2513 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002514 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002515 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002516
Roland Levillain7f63c522015-07-13 15:54:55 +00002517 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002518 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002519 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002520 } else {
2521 // Integer cases.
2522 Register lhs = InputRegisterAt(instruction, 0);
2523 Operand rhs = InputOperandAt(instruction, 1);
2524 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002525 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002526 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002527}
2528
2529#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2530 M(Equal) \
2531 M(NotEqual) \
2532 M(LessThan) \
2533 M(LessThanOrEqual) \
2534 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002535 M(GreaterThanOrEqual) \
2536 M(Below) \
2537 M(BelowOrEqual) \
2538 M(Above) \
2539 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002540#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002541void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2542void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002543FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002544#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002545#undef FOR_EACH_CONDITION_INSTRUCTION
2546
Zheng Xuc6667102015-05-15 16:08:45 +08002547void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2548 DCHECK(instruction->IsDiv() || instruction->IsRem());
2549
2550 LocationSummary* locations = instruction->GetLocations();
2551 Location second = locations->InAt(1);
2552 DCHECK(second.IsConstant());
2553
2554 Register out = OutputRegister(instruction);
2555 Register dividend = InputRegisterAt(instruction, 0);
2556 int64_t imm = Int64FromConstant(second.GetConstant());
2557 DCHECK(imm == 1 || imm == -1);
2558
2559 if (instruction->IsRem()) {
2560 __ Mov(out, 0);
2561 } else {
2562 if (imm == 1) {
2563 __ Mov(out, dividend);
2564 } else {
2565 __ Neg(out, dividend);
2566 }
2567 }
2568}
2569
2570void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2571 DCHECK(instruction->IsDiv() || instruction->IsRem());
2572
2573 LocationSummary* locations = instruction->GetLocations();
2574 Location second = locations->InAt(1);
2575 DCHECK(second.IsConstant());
2576
2577 Register out = OutputRegister(instruction);
2578 Register dividend = InputRegisterAt(instruction, 0);
2579 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002580 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002581 int ctz_imm = CTZ(abs_imm);
2582
2583 UseScratchRegisterScope temps(GetVIXLAssembler());
2584 Register temp = temps.AcquireSameSizeAs(out);
2585
2586 if (instruction->IsDiv()) {
2587 __ Add(temp, dividend, abs_imm - 1);
2588 __ Cmp(dividend, 0);
2589 __ Csel(out, temp, dividend, lt);
2590 if (imm > 0) {
2591 __ Asr(out, out, ctz_imm);
2592 } else {
2593 __ Neg(out, Operand(out, ASR, ctz_imm));
2594 }
2595 } else {
2596 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2597 __ Asr(temp, dividend, bits - 1);
2598 __ Lsr(temp, temp, bits - ctz_imm);
2599 __ Add(out, dividend, temp);
2600 __ And(out, out, abs_imm - 1);
2601 __ Sub(out, out, temp);
2602 }
2603}
2604
2605void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2606 DCHECK(instruction->IsDiv() || instruction->IsRem());
2607
2608 LocationSummary* locations = instruction->GetLocations();
2609 Location second = locations->InAt(1);
2610 DCHECK(second.IsConstant());
2611
2612 Register out = OutputRegister(instruction);
2613 Register dividend = InputRegisterAt(instruction, 0);
2614 int64_t imm = Int64FromConstant(second.GetConstant());
2615
2616 Primitive::Type type = instruction->GetResultType();
2617 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2618
2619 int64_t magic;
2620 int shift;
2621 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2622
2623 UseScratchRegisterScope temps(GetVIXLAssembler());
2624 Register temp = temps.AcquireSameSizeAs(out);
2625
2626 // temp = get_high(dividend * magic)
2627 __ Mov(temp, magic);
2628 if (type == Primitive::kPrimLong) {
2629 __ Smulh(temp, dividend, temp);
2630 } else {
2631 __ Smull(temp.X(), dividend, temp);
2632 __ Lsr(temp.X(), temp.X(), 32);
2633 }
2634
2635 if (imm > 0 && magic < 0) {
2636 __ Add(temp, temp, dividend);
2637 } else if (imm < 0 && magic > 0) {
2638 __ Sub(temp, temp, dividend);
2639 }
2640
2641 if (shift != 0) {
2642 __ Asr(temp, temp, shift);
2643 }
2644
2645 if (instruction->IsDiv()) {
2646 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2647 } else {
2648 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2649 // TODO: Strength reduction for msub.
2650 Register temp_imm = temps.AcquireSameSizeAs(out);
2651 __ Mov(temp_imm, imm);
2652 __ Msub(out, temp, temp_imm, dividend);
2653 }
2654}
2655
2656void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2657 DCHECK(instruction->IsDiv() || instruction->IsRem());
2658 Primitive::Type type = instruction->GetResultType();
2659 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2660
2661 LocationSummary* locations = instruction->GetLocations();
2662 Register out = OutputRegister(instruction);
2663 Location second = locations->InAt(1);
2664
2665 if (second.IsConstant()) {
2666 int64_t imm = Int64FromConstant(second.GetConstant());
2667
2668 if (imm == 0) {
2669 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2670 } else if (imm == 1 || imm == -1) {
2671 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002672 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002673 DivRemByPowerOfTwo(instruction);
2674 } else {
2675 DCHECK(imm <= -2 || imm >= 2);
2676 GenerateDivRemWithAnyConstant(instruction);
2677 }
2678 } else {
2679 Register dividend = InputRegisterAt(instruction, 0);
2680 Register divisor = InputRegisterAt(instruction, 1);
2681 if (instruction->IsDiv()) {
2682 __ Sdiv(out, dividend, divisor);
2683 } else {
2684 UseScratchRegisterScope temps(GetVIXLAssembler());
2685 Register temp = temps.AcquireSameSizeAs(out);
2686 __ Sdiv(temp, dividend, divisor);
2687 __ Msub(out, temp, divisor, dividend);
2688 }
2689 }
2690}
2691
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002692void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2693 LocationSummary* locations =
2694 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2695 switch (div->GetResultType()) {
2696 case Primitive::kPrimInt:
2697 case Primitive::kPrimLong:
2698 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002699 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002700 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2701 break;
2702
2703 case Primitive::kPrimFloat:
2704 case Primitive::kPrimDouble:
2705 locations->SetInAt(0, Location::RequiresFpuRegister());
2706 locations->SetInAt(1, Location::RequiresFpuRegister());
2707 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2708 break;
2709
2710 default:
2711 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2712 }
2713}
2714
2715void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2716 Primitive::Type type = div->GetResultType();
2717 switch (type) {
2718 case Primitive::kPrimInt:
2719 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002720 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002721 break;
2722
2723 case Primitive::kPrimFloat:
2724 case Primitive::kPrimDouble:
2725 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2726 break;
2727
2728 default:
2729 LOG(FATAL) << "Unexpected div type " << type;
2730 }
2731}
2732
Alexandre Rames67555f72014-11-18 10:55:16 +00002733void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002734 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2735 ? LocationSummary::kCallOnSlowPath
2736 : LocationSummary::kNoCall;
2737 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002738 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2739 if (instruction->HasUses()) {
2740 locations->SetOut(Location::SameAsFirstInput());
2741 }
2742}
2743
2744void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2745 SlowPathCodeARM64* slow_path =
2746 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2747 codegen_->AddSlowPath(slow_path);
2748 Location value = instruction->GetLocations()->InAt(0);
2749
Alexandre Rames3e69f162014-12-10 10:36:50 +00002750 Primitive::Type type = instruction->GetType();
2751
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002752 if (!Primitive::IsIntegralType(type)) {
2753 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002754 return;
2755 }
2756
Alexandre Rames67555f72014-11-18 10:55:16 +00002757 if (value.IsConstant()) {
2758 int64_t divisor = Int64ConstantFrom(value);
2759 if (divisor == 0) {
2760 __ B(slow_path->GetEntryLabel());
2761 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002762 // A division by a non-null constant is valid. We don't need to perform
2763 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002764 }
2765 } else {
2766 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2767 }
2768}
2769
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002770void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2771 LocationSummary* locations =
2772 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2773 locations->SetOut(Location::ConstantLocation(constant));
2774}
2775
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002776void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2777 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002778 // Will be generated at use site.
2779}
2780
Alexandre Rames5319def2014-10-23 10:03:10 +01002781void LocationsBuilderARM64::VisitExit(HExit* exit) {
2782 exit->SetLocations(nullptr);
2783}
2784
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002785void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002786}
2787
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002788void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2789 LocationSummary* locations =
2790 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2791 locations->SetOut(Location::ConstantLocation(constant));
2792}
2793
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002794void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002795 // Will be generated at use site.
2796}
2797
David Brazdilfc6a86a2015-06-26 10:33:45 +00002798void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002799 DCHECK(!successor->IsExitBlock());
2800 HBasicBlock* block = got->GetBlock();
2801 HInstruction* previous = got->GetPrevious();
2802 HLoopInformation* info = block->GetLoopInformation();
2803
David Brazdil46e2a392015-03-16 17:31:52 +00002804 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002805 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2806 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2807 return;
2808 }
2809 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2810 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2811 }
2812 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002813 __ B(codegen_->GetLabelOf(successor));
2814 }
2815}
2816
David Brazdilfc6a86a2015-06-26 10:33:45 +00002817void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2818 got->SetLocations(nullptr);
2819}
2820
2821void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2822 HandleGoto(got, got->GetSuccessor());
2823}
2824
2825void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2826 try_boundary->SetLocations(nullptr);
2827}
2828
2829void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2830 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2831 if (!successor->IsExitBlock()) {
2832 HandleGoto(try_boundary, successor);
2833 }
2834}
2835
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002836void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002837 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002838 vixl::aarch64::Label* true_target,
2839 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002840 // FP branching requires both targets to be explicit. If either of the targets
2841 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002842 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002843 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002844
David Brazdil0debae72015-11-12 18:37:00 +00002845 if (true_target == nullptr && false_target == nullptr) {
2846 // Nothing to do. The code always falls through.
2847 return;
2848 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002849 // Constant condition, statically compared against "true" (integer value 1).
2850 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002851 if (true_target != nullptr) {
2852 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002853 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002854 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002855 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002856 if (false_target != nullptr) {
2857 __ B(false_target);
2858 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002859 }
David Brazdil0debae72015-11-12 18:37:00 +00002860 return;
2861 }
2862
2863 // The following code generates these patterns:
2864 // (1) true_target == nullptr && false_target != nullptr
2865 // - opposite condition true => branch to false_target
2866 // (2) true_target != nullptr && false_target == nullptr
2867 // - condition true => branch to true_target
2868 // (3) true_target != nullptr && false_target != nullptr
2869 // - condition true => branch to true_target
2870 // - branch to false_target
2871 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002872 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002873 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002874 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002875 if (true_target == nullptr) {
2876 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2877 } else {
2878 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2879 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002880 } else {
2881 // The condition instruction has not been materialized, use its inputs as
2882 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002883 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002884
David Brazdil0debae72015-11-12 18:37:00 +00002885 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002886 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002887 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002888 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002889 IfCondition opposite_condition = condition->GetOppositeCondition();
2890 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002891 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002892 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002893 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002894 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002895 // Integer cases.
2896 Register lhs = InputRegisterAt(condition, 0);
2897 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002898
2899 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01002900 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002901 if (true_target == nullptr) {
2902 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2903 non_fallthrough_target = false_target;
2904 } else {
2905 arm64_cond = ARM64Condition(condition->GetCondition());
2906 non_fallthrough_target = true_target;
2907 }
2908
Aart Bik086d27e2016-01-20 17:02:00 -08002909 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01002910 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002911 switch (arm64_cond) {
2912 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002913 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002914 break;
2915 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002916 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002917 break;
2918 case lt:
2919 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002920 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002921 break;
2922 case ge:
2923 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002924 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002925 break;
2926 default:
2927 // Without the `static_cast` the compiler throws an error for
2928 // `-Werror=sign-promo`.
2929 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2930 }
2931 } else {
2932 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002933 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002934 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002935 }
2936 }
David Brazdil0debae72015-11-12 18:37:00 +00002937
2938 // If neither branch falls through (case 3), the conditional branch to `true_target`
2939 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2940 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002941 __ B(false_target);
2942 }
David Brazdil0debae72015-11-12 18:37:00 +00002943
2944 if (fallthrough_target.IsLinked()) {
2945 __ Bind(&fallthrough_target);
2946 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002947}
2948
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002949void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2950 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002951 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002952 locations->SetInAt(0, Location::RequiresRegister());
2953 }
2954}
2955
2956void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002957 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2958 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002959 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
2960 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
2961 true_target = nullptr;
2962 }
2963 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
2964 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
2965 false_target = nullptr;
2966 }
David Brazdil0debae72015-11-12 18:37:00 +00002967 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002968}
2969
2970void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2971 LocationSummary* locations = new (GetGraph()->GetArena())
2972 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00002973 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002974 locations->SetInAt(0, Location::RequiresRegister());
2975 }
2976}
2977
2978void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002979 SlowPathCodeARM64* slow_path =
2980 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002981 GenerateTestAndBranch(deoptimize,
2982 /* condition_input_index */ 0,
2983 slow_path->GetEntryLabel(),
2984 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002985}
2986
David Brazdilc0b601b2016-02-08 14:20:45 +00002987static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
2988 return condition->IsCondition() &&
2989 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
2990}
2991
Alexandre Rames880f1192016-06-13 16:04:50 +01002992static inline Condition GetConditionForSelect(HCondition* condition) {
2993 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00002994 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
2995 : ARM64Condition(cond);
2996}
2997
David Brazdil74eb1b22015-12-14 11:44:01 +00002998void LocationsBuilderARM64::VisitSelect(HSelect* select) {
2999 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01003000 if (Primitive::IsFloatingPointType(select->GetType())) {
3001 locations->SetInAt(0, Location::RequiresFpuRegister());
3002 locations->SetInAt(1, Location::RequiresFpuRegister());
3003 locations->SetOut(Location::RequiresFpuRegister());
3004 } else {
3005 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3006 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3007 bool is_true_value_constant = cst_true_value != nullptr;
3008 bool is_false_value_constant = cst_false_value != nullptr;
3009 // Ask VIXL whether we should synthesize constants in registers.
3010 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3011 Operand true_op = is_true_value_constant ?
3012 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3013 Operand false_op = is_false_value_constant ?
3014 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3015 bool true_value_in_register = false;
3016 bool false_value_in_register = false;
3017 MacroAssembler::GetCselSynthesisInformation(
3018 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3019 true_value_in_register |= !is_true_value_constant;
3020 false_value_in_register |= !is_false_value_constant;
3021
3022 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3023 : Location::ConstantLocation(cst_true_value));
3024 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3025 : Location::ConstantLocation(cst_false_value));
3026 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003027 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003028
David Brazdil74eb1b22015-12-14 11:44:01 +00003029 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3030 locations->SetInAt(2, Location::RequiresRegister());
3031 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003032}
3033
3034void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003035 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003036 Condition csel_cond;
3037
3038 if (IsBooleanValueOrMaterializedCondition(cond)) {
3039 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003040 // Use the condition flags set by the previous instruction.
3041 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003042 } else {
3043 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003044 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003045 }
3046 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003047 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003048 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003049 } else {
3050 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003051 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003052 }
3053
Alexandre Rames880f1192016-06-13 16:04:50 +01003054 if (Primitive::IsFloatingPointType(select->GetType())) {
3055 __ Fcsel(OutputFPRegister(select),
3056 InputFPRegisterAt(select, 1),
3057 InputFPRegisterAt(select, 0),
3058 csel_cond);
3059 } else {
3060 __ Csel(OutputRegister(select),
3061 InputOperandAt(select, 1),
3062 InputOperandAt(select, 0),
3063 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003064 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003065}
3066
David Srbecky0cf44932015-12-09 14:09:59 +00003067void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3068 new (GetGraph()->GetArena()) LocationSummary(info);
3069}
3070
David Srbeckyd28f4a02016-03-14 17:14:24 +00003071void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3072 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003073}
3074
3075void CodeGeneratorARM64::GenerateNop() {
3076 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003077}
3078
Alexandre Rames5319def2014-10-23 10:03:10 +01003079void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003080 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003081}
3082
3083void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003084 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003085}
3086
3087void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003088 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003089}
3090
3091void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003092 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003093}
3094
Roland Levillain44015862016-01-22 11:47:17 +00003095static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3096 return kEmitCompilerReadBarrier &&
3097 (kUseBakerReadBarrier ||
3098 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3099 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3100 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3101}
3102
Alexandre Rames67555f72014-11-18 10:55:16 +00003103void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003104 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003105 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3106 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003107 case TypeCheckKind::kExactCheck:
3108 case TypeCheckKind::kAbstractClassCheck:
3109 case TypeCheckKind::kClassHierarchyCheck:
3110 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003111 call_kind =
3112 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003113 break;
3114 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003115 case TypeCheckKind::kUnresolvedCheck:
3116 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003117 call_kind = LocationSummary::kCallOnSlowPath;
3118 break;
3119 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003120
Alexandre Rames67555f72014-11-18 10:55:16 +00003121 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003122 locations->SetInAt(0, Location::RequiresRegister());
3123 locations->SetInAt(1, Location::RequiresRegister());
3124 // The "out" register is used as a temporary, so it overlaps with the inputs.
3125 // Note that TypeCheckSlowPathARM64 uses this register too.
3126 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3127 // When read barriers are enabled, we need a temporary register for
3128 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003129 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003130 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003131 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003132}
3133
3134void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003135 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003136 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003137 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003138 Register obj = InputRegisterAt(instruction, 0);
3139 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003140 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003141 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003142 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3143 locations->GetTemp(0) :
3144 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003145 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3146 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3147 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3148 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003149
Scott Wakeling97c72b72016-06-24 16:19:36 +01003150 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003151 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003152
3153 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003154 // Avoid null check if we know `obj` is not null.
3155 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003156 __ Cbz(obj, &zero);
3157 }
3158
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003159 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003160 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003161
Roland Levillain44015862016-01-22 11:47:17 +00003162 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003163 case TypeCheckKind::kExactCheck: {
3164 __ Cmp(out, cls);
3165 __ Cset(out, eq);
3166 if (zero.IsLinked()) {
3167 __ B(&done);
3168 }
3169 break;
3170 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003171
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003172 case TypeCheckKind::kAbstractClassCheck: {
3173 // If the class is abstract, we eagerly fetch the super class of the
3174 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003175 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003176 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003177 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003178 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003179 // If `out` is null, we use it for the result, and jump to `done`.
3180 __ Cbz(out, &done);
3181 __ Cmp(out, cls);
3182 __ B(ne, &loop);
3183 __ Mov(out, 1);
3184 if (zero.IsLinked()) {
3185 __ B(&done);
3186 }
3187 break;
3188 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003189
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003190 case TypeCheckKind::kClassHierarchyCheck: {
3191 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003192 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003193 __ Bind(&loop);
3194 __ Cmp(out, cls);
3195 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003196 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003197 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003198 __ Cbnz(out, &loop);
3199 // If `out` is null, we use it for the result, and jump to `done`.
3200 __ B(&done);
3201 __ Bind(&success);
3202 __ Mov(out, 1);
3203 if (zero.IsLinked()) {
3204 __ B(&done);
3205 }
3206 break;
3207 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003208
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003209 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003210 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003211 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003212 __ Cmp(out, cls);
3213 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003214 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003215 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003216 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003217 // If `out` is null, we use it for the result, and jump to `done`.
3218 __ Cbz(out, &done);
3219 __ Ldrh(out, HeapOperand(out, primitive_offset));
3220 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3221 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003222 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003223 __ Mov(out, 1);
3224 __ B(&done);
3225 break;
3226 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003227
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003228 case TypeCheckKind::kArrayCheck: {
3229 __ Cmp(out, cls);
3230 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003231 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3232 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003233 codegen_->AddSlowPath(slow_path);
3234 __ B(ne, slow_path->GetEntryLabel());
3235 __ Mov(out, 1);
3236 if (zero.IsLinked()) {
3237 __ B(&done);
3238 }
3239 break;
3240 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003241
Calin Juravle98893e12015-10-02 21:05:03 +01003242 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003243 case TypeCheckKind::kInterfaceCheck: {
3244 // Note that we indeed only call on slow path, but we always go
3245 // into the slow path for the unresolved and interface check
3246 // cases.
3247 //
3248 // We cannot directly call the InstanceofNonTrivial runtime
3249 // entry point without resorting to a type checking slow path
3250 // here (i.e. by calling InvokeRuntime directly), as it would
3251 // require to assign fixed registers for the inputs of this
3252 // HInstanceOf instruction (following the runtime calling
3253 // convention), which might be cluttered by the potential first
3254 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003255 //
3256 // TODO: Introduce a new runtime entry point taking the object
3257 // to test (instead of its class) as argument, and let it deal
3258 // with the read barrier issues. This will let us refactor this
3259 // case of the `switch` code as it was previously (with a direct
3260 // call to the runtime not using a type checking slow path).
3261 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003262 DCHECK(locations->OnlyCallsOnSlowPath());
3263 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3264 /* is_fatal */ false);
3265 codegen_->AddSlowPath(slow_path);
3266 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003267 if (zero.IsLinked()) {
3268 __ B(&done);
3269 }
3270 break;
3271 }
3272 }
3273
3274 if (zero.IsLinked()) {
3275 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003276 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003277 }
3278
3279 if (done.IsLinked()) {
3280 __ Bind(&done);
3281 }
3282
3283 if (slow_path != nullptr) {
3284 __ Bind(slow_path->GetExitLabel());
3285 }
3286}
3287
3288void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3289 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3290 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3291
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003292 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3293 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003294 case TypeCheckKind::kExactCheck:
3295 case TypeCheckKind::kAbstractClassCheck:
3296 case TypeCheckKind::kClassHierarchyCheck:
3297 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003298 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3299 LocationSummary::kCallOnSlowPath :
3300 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003301 break;
3302 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003303 case TypeCheckKind::kUnresolvedCheck:
3304 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003305 call_kind = LocationSummary::kCallOnSlowPath;
3306 break;
3307 }
3308
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003309 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3310 locations->SetInAt(0, Location::RequiresRegister());
3311 locations->SetInAt(1, Location::RequiresRegister());
3312 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3313 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003314 // When read barriers are enabled, we need an additional temporary
3315 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003316 if (TypeCheckNeedsATemporary(type_check_kind)) {
3317 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003318 }
3319}
3320
3321void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003322 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003323 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003324 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003325 Register obj = InputRegisterAt(instruction, 0);
3326 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003327 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003328 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3329 locations->GetTemp(1) :
3330 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003331 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003332 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3333 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3334 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3335 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003336
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003337 bool is_type_check_slow_path_fatal =
3338 (type_check_kind == TypeCheckKind::kExactCheck ||
3339 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3340 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3341 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3342 !instruction->CanThrowIntoCatchBlock();
3343 SlowPathCodeARM64* type_check_slow_path =
3344 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3345 is_type_check_slow_path_fatal);
3346 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003347
Scott Wakeling97c72b72016-06-24 16:19:36 +01003348 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003349 // Avoid null check if we know obj is not null.
3350 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003351 __ Cbz(obj, &done);
3352 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003353
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003354 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003355 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003356
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003357 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003358 case TypeCheckKind::kExactCheck:
3359 case TypeCheckKind::kArrayCheck: {
3360 __ Cmp(temp, cls);
3361 // Jump to slow path for throwing the exception or doing a
3362 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003363 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003364 break;
3365 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003366
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003367 case TypeCheckKind::kAbstractClassCheck: {
3368 // If the class is abstract, we eagerly fetch the super class of the
3369 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003370 vixl::aarch64::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003371 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003372 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003373 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003374
3375 // If the class reference currently in `temp` is not null, jump
3376 // to the `compare_classes` label to compare it with the checked
3377 // class.
3378 __ Cbnz(temp, &compare_classes);
3379 // Otherwise, jump to the slow path to throw the exception.
3380 //
3381 // But before, move back the object's class into `temp` before
3382 // going into the slow path, as it has been overwritten in the
3383 // meantime.
3384 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003385 GenerateReferenceLoadTwoRegisters(
3386 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003387 __ B(type_check_slow_path->GetEntryLabel());
3388
3389 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003390 __ Cmp(temp, cls);
3391 __ B(ne, &loop);
3392 break;
3393 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003394
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003395 case TypeCheckKind::kClassHierarchyCheck: {
3396 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003397 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003398 __ Bind(&loop);
3399 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003400 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003401
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003402 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003403 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003404
3405 // If the class reference currently in `temp` is not null, jump
3406 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003407 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003408 // Otherwise, jump to the slow path to throw the exception.
3409 //
3410 // But before, move back the object's class into `temp` before
3411 // going into the slow path, as it has been overwritten in the
3412 // meantime.
3413 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003414 GenerateReferenceLoadTwoRegisters(
3415 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003416 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003417 break;
3418 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003419
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003420 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003421 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003422 vixl::aarch64::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003423 __ Cmp(temp, cls);
3424 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003425
3426 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003427 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003428 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003429
3430 // If the component type is not null (i.e. the object is indeed
3431 // an array), jump to label `check_non_primitive_component_type`
3432 // to further check that this component type is not a primitive
3433 // type.
3434 __ Cbnz(temp, &check_non_primitive_component_type);
3435 // Otherwise, jump to the slow path to throw the exception.
3436 //
3437 // But before, move back the object's class into `temp` before
3438 // going into the slow path, as it has been overwritten in the
3439 // meantime.
3440 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003441 GenerateReferenceLoadTwoRegisters(
3442 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003443 __ B(type_check_slow_path->GetEntryLabel());
3444
3445 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003446 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3447 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003448 __ Cbz(temp, &done);
3449 // Same comment as above regarding `temp` and the slow path.
3450 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003451 GenerateReferenceLoadTwoRegisters(
3452 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003453 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003454 break;
3455 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003456
Calin Juravle98893e12015-10-02 21:05:03 +01003457 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003458 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003459 // We always go into the type check slow path for the unresolved
3460 // and interface check cases.
3461 //
3462 // We cannot directly call the CheckCast runtime entry point
3463 // without resorting to a type checking slow path here (i.e. by
3464 // calling InvokeRuntime directly), as it would require to
3465 // assign fixed registers for the inputs of this HInstanceOf
3466 // instruction (following the runtime calling convention), which
3467 // might be cluttered by the potential first read barrier
3468 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003469 //
3470 // TODO: Introduce a new runtime entry point taking the object
3471 // to test (instead of its class) as argument, and let it deal
3472 // with the read barrier issues. This will let us refactor this
3473 // case of the `switch` code as it was previously (with a direct
3474 // call to the runtime not using a type checking slow path).
3475 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003476 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003477 break;
3478 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003479 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003480
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003481 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003482}
3483
Alexandre Rames5319def2014-10-23 10:03:10 +01003484void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3485 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3486 locations->SetOut(Location::ConstantLocation(constant));
3487}
3488
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003489void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003490 // Will be generated at use site.
3491}
3492
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003493void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3494 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3495 locations->SetOut(Location::ConstantLocation(constant));
3496}
3497
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003498void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003499 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003500}
3501
Calin Juravle175dc732015-08-25 15:42:32 +01003502void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3503 // The trampoline uses the same calling convention as dex calling conventions,
3504 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3505 // the method_idx.
3506 HandleInvoke(invoke);
3507}
3508
3509void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3510 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3511}
3512
Alexandre Rames5319def2014-10-23 10:03:10 +01003513void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003514 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003515 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003516}
3517
Alexandre Rames67555f72014-11-18 10:55:16 +00003518void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3519 HandleInvoke(invoke);
3520}
3521
3522void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3523 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003524 LocationSummary* locations = invoke->GetLocations();
3525 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003526 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003527 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003528 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003529
3530 // The register ip1 is required to be used for the hidden argument in
3531 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003532 MacroAssembler* masm = GetVIXLAssembler();
3533 UseScratchRegisterScope scratch_scope(masm);
3534 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003535 scratch_scope.Exclude(ip1);
3536 __ Mov(ip1, invoke->GetDexMethodIndex());
3537
Alexandre Rames67555f72014-11-18 10:55:16 +00003538 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003539 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003540 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003541 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003542 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003543 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003544 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003545 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003546 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003547 // Instead of simply (possibly) unpoisoning `temp` here, we should
3548 // emit a read barrier for the previous class reference load.
3549 // However this is not required in practice, as this is an
3550 // intermediate/temporary reference and because the current
3551 // concurrent copying collector keeps the from-space memory
3552 // intact/accessible until the end of the marking phase (the
3553 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003554 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003555 __ Ldr(temp,
3556 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3557 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003558 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003559 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003560 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003561 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003562 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003563 // lr();
3564 __ Blr(lr);
3565 DCHECK(!codegen_->IsLeafMethod());
3566 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3567}
3568
3569void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003570 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3571 if (intrinsic.TryDispatch(invoke)) {
3572 return;
3573 }
3574
Alexandre Rames67555f72014-11-18 10:55:16 +00003575 HandleInvoke(invoke);
3576}
3577
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003578void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003579 // Explicit clinit checks triggered by static invokes must have been pruned by
3580 // art::PrepareForRegisterAllocation.
3581 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003582
Andreas Gampe878d58c2015-01-15 23:24:00 -08003583 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3584 if (intrinsic.TryDispatch(invoke)) {
3585 return;
3586 }
3587
Alexandre Rames67555f72014-11-18 10:55:16 +00003588 HandleInvoke(invoke);
3589}
3590
Andreas Gampe878d58c2015-01-15 23:24:00 -08003591static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3592 if (invoke->GetLocations()->Intrinsified()) {
3593 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3594 intrinsic.Dispatch(invoke);
3595 return true;
3596 }
3597 return false;
3598}
3599
Vladimir Markodc151b22015-10-15 18:02:30 +01003600HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3601 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3602 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003603 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003604 return desired_dispatch_info;
3605}
3606
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003607void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003608 // For better instruction scheduling we load the direct code pointer before the method pointer.
3609 bool direct_code_loaded = false;
3610 switch (invoke->GetCodePtrLocation()) {
3611 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3612 // LR = code address from literal pool with link-time patch.
3613 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3614 direct_code_loaded = true;
3615 break;
3616 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3617 // LR = invoke->GetDirectCodePtr();
3618 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3619 direct_code_loaded = true;
3620 break;
3621 default:
3622 break;
3623 }
3624
Andreas Gampe878d58c2015-01-15 23:24:00 -08003625 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003626 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3627 switch (invoke->GetMethodLoadKind()) {
3628 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3629 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003630 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003631 break;
3632 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003633 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003634 break;
3635 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3636 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003637 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003638 break;
3639 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3640 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003641 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003642 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3643 break;
3644 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3645 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003646 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3647 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003648 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003649 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003650 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003651 __ Bind(adrp_label);
3652 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003653 }
Vladimir Marko58155012015-08-19 12:49:41 +00003654 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003655 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003656 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003657 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003658 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003659 __ Bind(ldr_label);
3660 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003661 }
Vladimir Marko58155012015-08-19 12:49:41 +00003662 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003663 }
Vladimir Marko58155012015-08-19 12:49:41 +00003664 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003665 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003666 Register reg = XRegisterFrom(temp);
3667 Register method_reg;
3668 if (current_method.IsRegister()) {
3669 method_reg = XRegisterFrom(current_method);
3670 } else {
3671 DCHECK(invoke->GetLocations()->Intrinsified());
3672 DCHECK(!current_method.IsValid());
3673 method_reg = reg;
3674 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3675 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003676
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003677 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003678 __ Ldr(reg.X(),
3679 MemOperand(method_reg.X(),
Andreas Gampe542451c2016-07-26 09:02:02 -07003680 ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003681 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003682 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3683 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003684 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3685 break;
3686 }
3687 }
3688
3689 switch (invoke->GetCodePtrLocation()) {
3690 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3691 __ Bl(&frame_entry_label_);
3692 break;
3693 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3694 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003695 vixl::aarch64::Label* label = &relative_call_patches_.back().label;
3696 SingleEmissionCheckScope guard(GetVIXLAssembler());
Alexandre Rames6dc01742015-11-12 14:44:19 +00003697 __ Bind(label);
3698 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003699 break;
3700 }
3701 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3702 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3703 // LR prepared above for better instruction scheduling.
3704 DCHECK(direct_code_loaded);
3705 // lr()
3706 __ Blr(lr);
3707 break;
3708 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3709 // LR = callee_method->entry_point_from_quick_compiled_code_;
3710 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003711 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07003712 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003713 // lr()
3714 __ Blr(lr);
3715 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003716 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003717
Andreas Gampe878d58c2015-01-15 23:24:00 -08003718 DCHECK(!IsLeafMethod());
3719}
3720
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003721void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003722 // Use the calling convention instead of the location of the receiver, as
3723 // intrinsics may have put the receiver in a different register. In the intrinsics
3724 // slow path, the arguments have been moved to the right place, so here we are
3725 // guaranteed that the receiver is the first register of the calling convention.
3726 InvokeDexCallingConvention calling_convention;
3727 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003728 Register temp = XRegisterFrom(temp_in);
3729 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3730 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3731 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003732 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003733
3734 BlockPoolsScope block_pools(GetVIXLAssembler());
3735
3736 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003737 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003738 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003739 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003740 // Instead of simply (possibly) unpoisoning `temp` here, we should
3741 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003742 // intermediate/temporary reference and because the current
3743 // concurrent copying collector keeps the from-space memory
3744 // intact/accessible until the end of the marking phase (the
3745 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003746 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3747 // temp = temp->GetMethodAt(method_offset);
3748 __ Ldr(temp, MemOperand(temp, method_offset));
3749 // lr = temp->GetEntryPoint();
3750 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3751 // lr();
3752 __ Blr(lr);
3753}
3754
Scott Wakeling97c72b72016-06-24 16:19:36 +01003755vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
3756 const DexFile& dex_file,
3757 uint32_t string_index,
3758 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003759 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3760}
3761
Scott Wakeling97c72b72016-06-24 16:19:36 +01003762vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
3763 const DexFile& dex_file,
3764 uint32_t type_index,
3765 vixl::aarch64::Label* adrp_label) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003766 return NewPcRelativePatch(dex_file, type_index, adrp_label, &pc_relative_type_patches_);
3767}
3768
Scott Wakeling97c72b72016-06-24 16:19:36 +01003769vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
3770 const DexFile& dex_file,
3771 uint32_t element_offset,
3772 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003773 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3774}
3775
Scott Wakeling97c72b72016-06-24 16:19:36 +01003776vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
3777 const DexFile& dex_file,
3778 uint32_t offset_or_index,
3779 vixl::aarch64::Label* adrp_label,
3780 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003781 // Add a patch entry and return the label.
3782 patches->emplace_back(dex_file, offset_or_index);
3783 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003784 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003785 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3786 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3787 return label;
3788}
3789
Scott Wakeling97c72b72016-06-24 16:19:36 +01003790vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003791 const DexFile& dex_file, uint32_t string_index) {
3792 return boot_image_string_patches_.GetOrCreate(
3793 StringReference(&dex_file, string_index),
3794 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3795}
3796
Scott Wakeling97c72b72016-06-24 16:19:36 +01003797vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003798 const DexFile& dex_file, uint32_t type_index) {
3799 return boot_image_type_patches_.GetOrCreate(
3800 TypeReference(&dex_file, type_index),
3801 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3802}
3803
Scott Wakeling97c72b72016-06-24 16:19:36 +01003804vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
3805 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003806 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3807 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3808 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3809}
3810
Scott Wakeling97c72b72016-06-24 16:19:36 +01003811vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(
3812 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003813 return DeduplicateUint64Literal(address);
3814}
3815
Vladimir Marko58155012015-08-19 12:49:41 +00003816void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3817 DCHECK(linker_patches->empty());
3818 size_t size =
3819 method_patches_.size() +
3820 call_patches_.size() +
3821 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003822 pc_relative_dex_cache_patches_.size() +
3823 boot_image_string_patches_.size() +
3824 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003825 boot_image_type_patches_.size() +
3826 pc_relative_type_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003827 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003828 linker_patches->reserve(size);
3829 for (const auto& entry : method_patches_) {
3830 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003831 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3832 linker_patches->push_back(LinkerPatch::MethodPatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003833 target_method.dex_file,
3834 target_method.dex_method_index));
3835 }
3836 for (const auto& entry : call_patches_) {
3837 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003838 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3839 linker_patches->push_back(LinkerPatch::CodePatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003840 target_method.dex_file,
3841 target_method.dex_method_index));
3842 }
Scott Wakeling97c72b72016-06-24 16:19:36 +01003843 for (const MethodPatchInfo<vixl::aarch64::Label>& info : relative_call_patches_) {
3844 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003845 info.target_method.dex_file,
3846 info.target_method.dex_method_index));
3847 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003848 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003849 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003850 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003851 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003852 info.offset_or_index));
3853 }
3854 for (const auto& entry : boot_image_string_patches_) {
3855 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003856 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3857 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003858 target_string.dex_file,
3859 target_string.string_index));
3860 }
3861 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003862 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003863 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003864 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003865 info.offset_or_index));
3866 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003867 for (const auto& entry : boot_image_type_patches_) {
3868 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003869 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3870 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003871 target_type.dex_file,
3872 target_type.type_index));
3873 }
3874 for (const PcRelativePatchInfo& info : pc_relative_type_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003875 linker_patches->push_back(LinkerPatch::RelativeTypePatch(info.label.GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003876 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003877 info.pc_insn_label->GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003878 info.offset_or_index));
3879 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003880 for (const auto& entry : boot_image_address_patches_) {
3881 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003882 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3883 linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003884 }
3885}
3886
Scott Wakeling97c72b72016-06-24 16:19:36 +01003887vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003888 Uint32ToLiteralMap* map) {
3889 return map->GetOrCreate(
3890 value,
3891 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3892}
3893
Scott Wakeling97c72b72016-06-24 16:19:36 +01003894vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003895 return uint64_literals_.GetOrCreate(
3896 value,
3897 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003898}
3899
Scott Wakeling97c72b72016-06-24 16:19:36 +01003900vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003901 MethodReference target_method,
3902 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003903 return map->GetOrCreate(
3904 target_method,
3905 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003906}
3907
Scott Wakeling97c72b72016-06-24 16:19:36 +01003908vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003909 MethodReference target_method) {
3910 return DeduplicateMethodLiteral(target_method, &method_patches_);
3911}
3912
Scott Wakeling97c72b72016-06-24 16:19:36 +01003913vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003914 MethodReference target_method) {
3915 return DeduplicateMethodLiteral(target_method, &call_patches_);
3916}
3917
3918
Andreas Gampe878d58c2015-01-15 23:24:00 -08003919void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003920 // Explicit clinit checks triggered by static invokes must have been pruned by
3921 // art::PrepareForRegisterAllocation.
3922 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003923
Andreas Gampe878d58c2015-01-15 23:24:00 -08003924 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3925 return;
3926 }
3927
Alexandre Ramesd921d642015-04-16 15:07:16 +01003928 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003929 LocationSummary* locations = invoke->GetLocations();
3930 codegen_->GenerateStaticOrDirectCall(
3931 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003932 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003933}
3934
3935void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003936 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3937 return;
3938 }
3939
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003940 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003941 DCHECK(!codegen_->IsLeafMethod());
3942 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3943}
3944
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003945HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
3946 HLoadClass::LoadKind desired_class_load_kind) {
3947 if (kEmitCompilerReadBarrier) {
3948 switch (desired_class_load_kind) {
3949 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3950 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3951 case HLoadClass::LoadKind::kBootImageAddress:
3952 // TODO: Implement for read barrier.
3953 return HLoadClass::LoadKind::kDexCacheViaMethod;
3954 default:
3955 break;
3956 }
3957 }
3958 switch (desired_class_load_kind) {
3959 case HLoadClass::LoadKind::kReferrersClass:
3960 break;
3961 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3962 DCHECK(!GetCompilerOptions().GetCompilePic());
3963 break;
3964 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3965 DCHECK(GetCompilerOptions().GetCompilePic());
3966 break;
3967 case HLoadClass::LoadKind::kBootImageAddress:
3968 break;
3969 case HLoadClass::LoadKind::kDexCacheAddress:
3970 DCHECK(Runtime::Current()->UseJitCompilation());
3971 break;
3972 case HLoadClass::LoadKind::kDexCachePcRelative:
3973 DCHECK(!Runtime::Current()->UseJitCompilation());
3974 break;
3975 case HLoadClass::LoadKind::kDexCacheViaMethod:
3976 break;
3977 }
3978 return desired_class_load_kind;
3979}
3980
Alexandre Rames67555f72014-11-18 10:55:16 +00003981void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003982 if (cls->NeedsAccessCheck()) {
3983 InvokeRuntimeCallingConvention calling_convention;
3984 CodeGenerator::CreateLoadClassLocationSummary(
3985 cls,
3986 LocationFrom(calling_convention.GetRegisterAt(0)),
Scott Wakeling97c72b72016-06-24 16:19:36 +01003987 LocationFrom(vixl::aarch64::x0),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003988 /* code_generator_supports_read_barrier */ true);
3989 return;
3990 }
3991
3992 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
3993 ? LocationSummary::kCallOnSlowPath
3994 : LocationSummary::kNoCall;
3995 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3996 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3997 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
3998 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
3999 locations->SetInAt(0, Location::RequiresRegister());
4000 }
4001 locations->SetOut(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004002}
4003
4004void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01004005 if (cls->NeedsAccessCheck()) {
4006 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
4007 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
4008 cls,
4009 cls->GetDexPc(),
4010 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004011 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01004012 return;
4013 }
4014
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004015 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004016 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004017
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004018 bool generate_null_check = false;
4019 switch (cls->GetLoadKind()) {
4020 case HLoadClass::LoadKind::kReferrersClass: {
4021 DCHECK(!cls->CanCallRuntime());
4022 DCHECK(!cls->MustGenerateClinitCheck());
4023 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4024 Register current_method = InputRegisterAt(cls, 0);
4025 GenerateGcRootFieldLoad(
4026 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4027 break;
4028 }
4029 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4030 DCHECK(!kEmitCompilerReadBarrier);
4031 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4032 cls->GetTypeIndex()));
4033 break;
4034 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
4035 DCHECK(!kEmitCompilerReadBarrier);
4036 // Add ADRP with its PC-relative type patch.
4037 const DexFile& dex_file = cls->GetDexFile();
4038 uint32_t type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004039 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004040 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004041 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004042 __ Bind(adrp_label);
4043 __ adrp(out.X(), /* offset placeholder */ 0);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004044 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004045 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004046 vixl::aarch64::Label* add_label =
4047 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004048 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004049 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004050 __ Bind(add_label);
4051 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004052 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004053 break;
4054 }
4055 case HLoadClass::LoadKind::kBootImageAddress: {
4056 DCHECK(!kEmitCompilerReadBarrier);
4057 DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress()));
4058 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress()));
4059 break;
4060 }
4061 case HLoadClass::LoadKind::kDexCacheAddress: {
4062 DCHECK_NE(cls->GetAddress(), 0u);
4063 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4064 // that gives a 16KiB range. To try and reduce the number of literals if we load
4065 // multiple types, simply split the dex cache address to a 16KiB aligned base
4066 // loaded from a literal and the remaining offset embedded in the load.
4067 static_assert(sizeof(GcRoot<mirror::Class>) == 4u, "Expected GC root to be 4 bytes.");
4068 DCHECK_ALIGNED(cls->GetAddress(), 4u);
4069 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4070 uint64_t base_address = cls->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4071 uint32_t offset = cls->GetAddress() & MaxInt<uint64_t>(offset_bits);
4072 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4073 // /* GcRoot<mirror::Class> */ out = *(base_address + offset)
4074 GenerateGcRootFieldLoad(cls, out_loc, out.X(), offset);
4075 generate_null_check = !cls->IsInDexCache();
4076 break;
4077 }
4078 case HLoadClass::LoadKind::kDexCachePcRelative: {
4079 // Add ADRP with its PC-relative DexCache access patch.
4080 const DexFile& dex_file = cls->GetDexFile();
4081 uint32_t element_offset = cls->GetDexCacheElementOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004082 vixl::aarch64::Label* adrp_label =
4083 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004084 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004085 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004086 __ Bind(adrp_label);
4087 __ adrp(out.X(), /* offset placeholder */ 0);
4088 }
4089 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004090 vixl::aarch64::Label* ldr_label =
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004091 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4092 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4093 GenerateGcRootFieldLoad(cls, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4094 generate_null_check = !cls->IsInDexCache();
4095 break;
4096 }
4097 case HLoadClass::LoadKind::kDexCacheViaMethod: {
4098 MemberOffset resolved_types_offset =
4099 ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
4100 // /* GcRoot<mirror::Class>[] */ out =
4101 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
4102 Register current_method = InputRegisterAt(cls, 0);
4103 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
4104 // /* GcRoot<mirror::Class> */ out = out[type_index]
4105 GenerateGcRootFieldLoad(
4106 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
4107 generate_null_check = !cls->IsInDexCache();
4108 break;
4109 }
4110 }
4111
4112 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4113 DCHECK(cls->CanCallRuntime());
4114 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
4115 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4116 codegen_->AddSlowPath(slow_path);
4117 if (generate_null_check) {
4118 __ Cbz(out, slow_path->GetEntryLabel());
4119 }
4120 if (cls->MustGenerateClinitCheck()) {
4121 GenerateClassInitializationCheck(slow_path, out);
4122 } else {
4123 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004124 }
4125 }
4126}
4127
David Brazdilcb1c0552015-08-04 16:22:25 +01004128static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004129 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004130}
4131
Alexandre Rames67555f72014-11-18 10:55:16 +00004132void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4133 LocationSummary* locations =
4134 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4135 locations->SetOut(Location::RequiresRegister());
4136}
4137
4138void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004139 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4140}
4141
4142void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4143 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4144}
4145
4146void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4147 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004148}
4149
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004150HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4151 HLoadString::LoadKind desired_string_load_kind) {
4152 if (kEmitCompilerReadBarrier) {
4153 switch (desired_string_load_kind) {
4154 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4155 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4156 case HLoadString::LoadKind::kBootImageAddress:
4157 // TODO: Implement for read barrier.
4158 return HLoadString::LoadKind::kDexCacheViaMethod;
4159 default:
4160 break;
4161 }
4162 }
4163 switch (desired_string_load_kind) {
4164 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4165 DCHECK(!GetCompilerOptions().GetCompilePic());
4166 break;
4167 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4168 DCHECK(GetCompilerOptions().GetCompilePic());
4169 break;
4170 case HLoadString::LoadKind::kBootImageAddress:
4171 break;
4172 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01004173 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004174 break;
4175 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01004176 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004177 break;
4178 case HLoadString::LoadKind::kDexCacheViaMethod:
4179 break;
4180 }
4181 return desired_string_load_kind;
4182}
4183
Alexandre Rames67555f72014-11-18 10:55:16 +00004184void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004185 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004186 ? LocationSummary::kCallOnSlowPath
4187 : LocationSummary::kNoCall;
4188 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004189 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4190 locations->SetInAt(0, Location::RequiresRegister());
4191 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004192 locations->SetOut(Location::RequiresRegister());
4193}
4194
4195void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004196 Location out_loc = load->GetLocations()->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00004197 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004198
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004199 switch (load->GetLoadKind()) {
4200 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4201 DCHECK(!kEmitCompilerReadBarrier);
4202 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4203 load->GetStringIndex()));
4204 return; // No dex cache slow path.
4205 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
4206 DCHECK(!kEmitCompilerReadBarrier);
4207 // Add ADRP with its PC-relative String patch.
4208 const DexFile& dex_file = load->GetDexFile();
4209 uint32_t string_index = load->GetStringIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004210 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004211 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004212 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004213 __ Bind(adrp_label);
4214 __ adrp(out.X(), /* offset placeholder */ 0);
4215 }
4216 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004217 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004218 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4219 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004220 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004221 __ Bind(add_label);
4222 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4223 }
4224 return; // No dex cache slow path.
4225 }
4226 case HLoadString::LoadKind::kBootImageAddress: {
4227 DCHECK(!kEmitCompilerReadBarrier);
4228 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4229 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4230 return; // No dex cache slow path.
4231 }
4232 case HLoadString::LoadKind::kDexCacheAddress: {
4233 DCHECK_NE(load->GetAddress(), 0u);
4234 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4235 // that gives a 16KiB range. To try and reduce the number of literals if we load
4236 // multiple strings, simply split the dex cache address to a 16KiB aligned base
4237 // loaded from a literal and the remaining offset embedded in the load.
4238 static_assert(sizeof(GcRoot<mirror::String>) == 4u, "Expected GC root to be 4 bytes.");
4239 DCHECK_ALIGNED(load->GetAddress(), 4u);
4240 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4241 uint64_t base_address = load->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4242 uint32_t offset = load->GetAddress() & MaxInt<uint64_t>(offset_bits);
4243 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004244 // /* GcRoot<mirror::String> */ out = *(base_address + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004245 GenerateGcRootFieldLoad(load, out_loc, out.X(), offset);
4246 break;
4247 }
4248 case HLoadString::LoadKind::kDexCachePcRelative: {
4249 // Add ADRP with its PC-relative DexCache access patch.
4250 const DexFile& dex_file = load->GetDexFile();
4251 uint32_t element_offset = load->GetDexCacheElementOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004252 vixl::aarch64::Label* adrp_label =
4253 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004254 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004255 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004256 __ Bind(adrp_label);
4257 __ adrp(out.X(), /* offset placeholder */ 0);
4258 }
4259 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004260 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004261 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004262 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004263 GenerateGcRootFieldLoad(load, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4264 break;
4265 }
4266 case HLoadString::LoadKind::kDexCacheViaMethod: {
4267 Register current_method = InputRegisterAt(load, 0);
4268 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4269 GenerateGcRootFieldLoad(
4270 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4271 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
4272 __ Ldr(out.X(), HeapOperand(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
4273 // /* GcRoot<mirror::String> */ out = out[string_index]
4274 GenerateGcRootFieldLoad(
4275 load, out_loc, out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex()));
4276 break;
4277 }
4278 default:
4279 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
4280 UNREACHABLE();
4281 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004282
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004283 if (!load->IsInDexCache()) {
4284 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
4285 codegen_->AddSlowPath(slow_path);
4286 __ Cbz(out, slow_path->GetEntryLabel());
4287 __ Bind(slow_path->GetExitLabel());
4288 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004289}
4290
Alexandre Rames5319def2014-10-23 10:03:10 +01004291void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4292 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4293 locations->SetOut(Location::ConstantLocation(constant));
4294}
4295
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004296void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004297 // Will be generated at use site.
4298}
4299
Alexandre Rames67555f72014-11-18 10:55:16 +00004300void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4301 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004302 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004303 InvokeRuntimeCallingConvention calling_convention;
4304 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4305}
4306
4307void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4308 codegen_->InvokeRuntime(instruction->IsEnter()
4309 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
4310 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004311 instruction->GetDexPc(),
4312 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004313 if (instruction->IsEnter()) {
4314 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4315 } else {
4316 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4317 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004318}
4319
Alexandre Rames42d641b2014-10-27 14:00:51 +00004320void LocationsBuilderARM64::VisitMul(HMul* mul) {
4321 LocationSummary* locations =
4322 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4323 switch (mul->GetResultType()) {
4324 case Primitive::kPrimInt:
4325 case Primitive::kPrimLong:
4326 locations->SetInAt(0, Location::RequiresRegister());
4327 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004328 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004329 break;
4330
4331 case Primitive::kPrimFloat:
4332 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004333 locations->SetInAt(0, Location::RequiresFpuRegister());
4334 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004335 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004336 break;
4337
4338 default:
4339 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4340 }
4341}
4342
4343void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4344 switch (mul->GetResultType()) {
4345 case Primitive::kPrimInt:
4346 case Primitive::kPrimLong:
4347 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4348 break;
4349
4350 case Primitive::kPrimFloat:
4351 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004352 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004353 break;
4354
4355 default:
4356 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4357 }
4358}
4359
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004360void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4361 LocationSummary* locations =
4362 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4363 switch (neg->GetResultType()) {
4364 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004365 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004366 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004367 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004368 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004369
4370 case Primitive::kPrimFloat:
4371 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004372 locations->SetInAt(0, Location::RequiresFpuRegister());
4373 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004374 break;
4375
4376 default:
4377 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4378 }
4379}
4380
4381void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4382 switch (neg->GetResultType()) {
4383 case Primitive::kPrimInt:
4384 case Primitive::kPrimLong:
4385 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4386 break;
4387
4388 case Primitive::kPrimFloat:
4389 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004390 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004391 break;
4392
4393 default:
4394 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4395 }
4396}
4397
4398void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4399 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004400 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004401 InvokeRuntimeCallingConvention calling_convention;
4402 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004403 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004404 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004405 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004406}
4407
4408void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4409 LocationSummary* locations = instruction->GetLocations();
4410 InvokeRuntimeCallingConvention calling_convention;
4411 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4412 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004413 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004414 // Note: if heap poisoning is enabled, the entry point takes cares
4415 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01004416 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4417 instruction,
4418 instruction->GetDexPc(),
4419 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004420 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004421}
4422
Alexandre Rames5319def2014-10-23 10:03:10 +01004423void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4424 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004425 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004426 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004427 if (instruction->IsStringAlloc()) {
4428 locations->AddTemp(LocationFrom(kArtMethodRegister));
4429 } else {
4430 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4431 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4432 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004433 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4434}
4435
4436void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004437 // Note: if heap poisoning is enabled, the entry point takes cares
4438 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004439 if (instruction->IsStringAlloc()) {
4440 // String is allocated through StringFactory. Call NewEmptyString entry point.
4441 Location temp = instruction->GetLocations()->GetTemp(0);
Andreas Gampe542451c2016-07-26 09:02:02 -07004442 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004443 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4444 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4445 __ Blr(lr);
4446 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4447 } else {
4448 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4449 instruction,
4450 instruction->GetDexPc(),
4451 nullptr);
4452 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4453 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004454}
4455
4456void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4457 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004458 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004459 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004460}
4461
4462void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004463 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004464 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004465 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004466 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004467 break;
4468
4469 default:
4470 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4471 }
4472}
4473
David Brazdil66d126e2015-04-03 16:02:44 +01004474void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4475 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4476 locations->SetInAt(0, Location::RequiresRegister());
4477 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4478}
4479
4480void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004481 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01004482}
4483
Alexandre Rames5319def2014-10-23 10:03:10 +01004484void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004485 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4486 ? LocationSummary::kCallOnSlowPath
4487 : LocationSummary::kNoCall;
4488 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004489 locations->SetInAt(0, Location::RequiresRegister());
4490 if (instruction->HasUses()) {
4491 locations->SetOut(Location::SameAsFirstInput());
4492 }
4493}
4494
Calin Juravle2ae48182016-03-16 14:05:09 +00004495void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4496 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004497 return;
4498 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004499
Alexandre Ramesd921d642015-04-16 15:07:16 +01004500 BlockPoolsScope block_pools(GetVIXLAssembler());
4501 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004502 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004503 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004504}
4505
Calin Juravle2ae48182016-03-16 14:05:09 +00004506void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004507 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004508 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004509
4510 LocationSummary* locations = instruction->GetLocations();
4511 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004512
4513 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004514}
4515
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004516void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004517 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004518}
4519
Alexandre Rames67555f72014-11-18 10:55:16 +00004520void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4521 HandleBinaryOp(instruction);
4522}
4523
4524void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4525 HandleBinaryOp(instruction);
4526}
4527
Alexandre Rames3e69f162014-12-10 10:36:50 +00004528void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4529 LOG(FATAL) << "Unreachable";
4530}
4531
4532void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4533 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4534}
4535
Alexandre Rames5319def2014-10-23 10:03:10 +01004536void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4537 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4538 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4539 if (location.IsStackSlot()) {
4540 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4541 } else if (location.IsDoubleStackSlot()) {
4542 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4543 }
4544 locations->SetOut(location);
4545}
4546
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004547void InstructionCodeGeneratorARM64::VisitParameterValue(
4548 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004549 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004550}
4551
4552void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4553 LocationSummary* locations =
4554 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004555 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004556}
4557
4558void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4559 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4560 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004561}
4562
4563void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4564 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004565 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004566 locations->SetInAt(i, Location::Any());
4567 }
4568 locations->SetOut(Location::Any());
4569}
4570
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004571void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004572 LOG(FATAL) << "Unreachable";
4573}
4574
Serban Constantinescu02164b32014-11-13 14:05:07 +00004575void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004576 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004577 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004578 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
4579 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004580 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4581
4582 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004583 case Primitive::kPrimInt:
4584 case Primitive::kPrimLong:
4585 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004586 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004587 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4588 break;
4589
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004590 case Primitive::kPrimFloat:
4591 case Primitive::kPrimDouble: {
4592 InvokeRuntimeCallingConvention calling_convention;
4593 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4594 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4595 locations->SetOut(calling_convention.GetReturnLocation(type));
4596
4597 break;
4598 }
4599
Serban Constantinescu02164b32014-11-13 14:05:07 +00004600 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004601 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004602 }
4603}
4604
4605void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4606 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004607
Serban Constantinescu02164b32014-11-13 14:05:07 +00004608 switch (type) {
4609 case Primitive::kPrimInt:
4610 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004611 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004612 break;
4613 }
4614
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004615 case Primitive::kPrimFloat:
4616 case Primitive::kPrimDouble: {
4617 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
4618 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004619 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004620 if (type == Primitive::kPrimFloat) {
4621 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4622 } else {
4623 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4624 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004625 break;
4626 }
4627
Serban Constantinescu02164b32014-11-13 14:05:07 +00004628 default:
4629 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004630 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004631 }
4632}
4633
Calin Juravle27df7582015-04-17 19:12:31 +01004634void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4635 memory_barrier->SetLocations(nullptr);
4636}
4637
4638void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004639 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004640}
4641
Alexandre Rames5319def2014-10-23 10:03:10 +01004642void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4643 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4644 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004645 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004646}
4647
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004648void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004649 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004650}
4651
4652void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4653 instruction->SetLocations(nullptr);
4654}
4655
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004656void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004657 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004658}
4659
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004660void LocationsBuilderARM64::VisitRor(HRor* ror) {
4661 HandleBinaryOp(ror);
4662}
4663
4664void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4665 HandleBinaryOp(ror);
4666}
4667
Serban Constantinescu02164b32014-11-13 14:05:07 +00004668void LocationsBuilderARM64::VisitShl(HShl* shl) {
4669 HandleShift(shl);
4670}
4671
4672void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4673 HandleShift(shl);
4674}
4675
4676void LocationsBuilderARM64::VisitShr(HShr* shr) {
4677 HandleShift(shr);
4678}
4679
4680void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4681 HandleShift(shr);
4682}
4683
Alexandre Rames5319def2014-10-23 10:03:10 +01004684void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004685 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004686}
4687
4688void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004689 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004690}
4691
Alexandre Rames67555f72014-11-18 10:55:16 +00004692void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004693 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004694}
4695
4696void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004697 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004698}
4699
4700void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004701 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004702}
4703
Alexandre Rames67555f72014-11-18 10:55:16 +00004704void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004705 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004706}
4707
Calin Juravlee460d1d2015-09-29 04:52:17 +01004708void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4709 HUnresolvedInstanceFieldGet* instruction) {
4710 FieldAccessCallingConventionARM64 calling_convention;
4711 codegen_->CreateUnresolvedFieldLocationSummary(
4712 instruction, instruction->GetFieldType(), calling_convention);
4713}
4714
4715void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4716 HUnresolvedInstanceFieldGet* instruction) {
4717 FieldAccessCallingConventionARM64 calling_convention;
4718 codegen_->GenerateUnresolvedFieldAccess(instruction,
4719 instruction->GetFieldType(),
4720 instruction->GetFieldIndex(),
4721 instruction->GetDexPc(),
4722 calling_convention);
4723}
4724
4725void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4726 HUnresolvedInstanceFieldSet* instruction) {
4727 FieldAccessCallingConventionARM64 calling_convention;
4728 codegen_->CreateUnresolvedFieldLocationSummary(
4729 instruction, instruction->GetFieldType(), calling_convention);
4730}
4731
4732void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4733 HUnresolvedInstanceFieldSet* instruction) {
4734 FieldAccessCallingConventionARM64 calling_convention;
4735 codegen_->GenerateUnresolvedFieldAccess(instruction,
4736 instruction->GetFieldType(),
4737 instruction->GetFieldIndex(),
4738 instruction->GetDexPc(),
4739 calling_convention);
4740}
4741
4742void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4743 HUnresolvedStaticFieldGet* instruction) {
4744 FieldAccessCallingConventionARM64 calling_convention;
4745 codegen_->CreateUnresolvedFieldLocationSummary(
4746 instruction, instruction->GetFieldType(), calling_convention);
4747}
4748
4749void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4750 HUnresolvedStaticFieldGet* instruction) {
4751 FieldAccessCallingConventionARM64 calling_convention;
4752 codegen_->GenerateUnresolvedFieldAccess(instruction,
4753 instruction->GetFieldType(),
4754 instruction->GetFieldIndex(),
4755 instruction->GetDexPc(),
4756 calling_convention);
4757}
4758
4759void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4760 HUnresolvedStaticFieldSet* instruction) {
4761 FieldAccessCallingConventionARM64 calling_convention;
4762 codegen_->CreateUnresolvedFieldLocationSummary(
4763 instruction, instruction->GetFieldType(), calling_convention);
4764}
4765
4766void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4767 HUnresolvedStaticFieldSet* instruction) {
4768 FieldAccessCallingConventionARM64 calling_convention;
4769 codegen_->GenerateUnresolvedFieldAccess(instruction,
4770 instruction->GetFieldType(),
4771 instruction->GetFieldIndex(),
4772 instruction->GetDexPc(),
4773 calling_convention);
4774}
4775
Alexandre Rames5319def2014-10-23 10:03:10 +01004776void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4777 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4778}
4779
4780void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004781 HBasicBlock* block = instruction->GetBlock();
4782 if (block->GetLoopInformation() != nullptr) {
4783 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4784 // The back edge will generate the suspend check.
4785 return;
4786 }
4787 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4788 // The goto will generate the suspend check.
4789 return;
4790 }
4791 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004792}
4793
Alexandre Rames67555f72014-11-18 10:55:16 +00004794void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4795 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004796 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004797 InvokeRuntimeCallingConvention calling_convention;
4798 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4799}
4800
4801void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
4802 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004803 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004804 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004805}
4806
4807void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4808 LocationSummary* locations =
4809 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4810 Primitive::Type input_type = conversion->GetInputType();
4811 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004812 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004813 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4814 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4815 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4816 }
4817
Alexandre Rames542361f2015-01-29 16:57:31 +00004818 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004819 locations->SetInAt(0, Location::RequiresFpuRegister());
4820 } else {
4821 locations->SetInAt(0, Location::RequiresRegister());
4822 }
4823
Alexandre Rames542361f2015-01-29 16:57:31 +00004824 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004825 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4826 } else {
4827 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4828 }
4829}
4830
4831void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4832 Primitive::Type result_type = conversion->GetResultType();
4833 Primitive::Type input_type = conversion->GetInputType();
4834
4835 DCHECK_NE(input_type, result_type);
4836
Alexandre Rames542361f2015-01-29 16:57:31 +00004837 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004838 int result_size = Primitive::ComponentSize(result_type);
4839 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004840 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004841 Register output = OutputRegister(conversion);
4842 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004843 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004844 // 'int' values are used directly as W registers, discarding the top
4845 // bits, so we don't need to sign-extend and can just perform a move.
4846 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4847 // top 32 bits of the target register. We theoretically could leave those
4848 // bits unchanged, but we would have to make sure that no code uses a
4849 // 32bit input value as a 64bit value assuming that the top 32 bits are
4850 // zero.
4851 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004852 } else if (result_type == Primitive::kPrimChar ||
4853 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4854 __ Ubfx(output,
4855 output.IsX() ? source.X() : source.W(),
4856 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004857 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004858 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004859 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004860 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004861 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004862 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004863 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4864 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004865 } else if (Primitive::IsFloatingPointType(result_type) &&
4866 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004867 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4868 } else {
4869 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4870 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004871 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004872}
Alexandre Rames67555f72014-11-18 10:55:16 +00004873
Serban Constantinescu02164b32014-11-13 14:05:07 +00004874void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4875 HandleShift(ushr);
4876}
4877
4878void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4879 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004880}
4881
4882void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4883 HandleBinaryOp(instruction);
4884}
4885
4886void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4887 HandleBinaryOp(instruction);
4888}
4889
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004890void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004891 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004892 LOG(FATAL) << "Unreachable";
4893}
4894
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004895void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004896 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004897 LOG(FATAL) << "Unreachable";
4898}
4899
Mark Mendellfe57faa2015-09-18 09:26:15 -04004900// Simple implementation of packed switch - generate cascaded compare/jumps.
4901void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4902 LocationSummary* locations =
4903 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4904 locations->SetInAt(0, Location::RequiresRegister());
4905}
4906
4907void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4908 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004909 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004910 Register value_reg = InputRegisterAt(switch_instr, 0);
4911 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4912
Zheng Xu3927c8b2015-11-18 17:46:25 +08004913 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004914 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08004915 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4916 // make sure we don't emit it if the target may run out of range.
4917 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4918 // ranges and emit the tables only as required.
4919 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004920
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004921 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004922 // Current instruction id is an upper bound of the number of HIRs in the graph.
4923 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4924 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004925 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4926 Register temp = temps.AcquireW();
4927 __ Subs(temp, value_reg, Operand(lower_bound));
4928
Zheng Xu3927c8b2015-11-18 17:46:25 +08004929 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004930 // Jump to successors[0] if value == lower_bound.
4931 __ B(eq, codegen_->GetLabelOf(successors[0]));
4932 int32_t last_index = 0;
4933 for (; num_entries - last_index > 2; last_index += 2) {
4934 __ Subs(temp, temp, Operand(2));
4935 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4936 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4937 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4938 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4939 }
4940 if (num_entries - last_index == 2) {
4941 // The last missing case_value.
4942 __ Cmp(temp, Operand(1));
4943 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004944 }
4945
4946 // And the default for any other value.
4947 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4948 __ B(codegen_->GetLabelOf(default_block));
4949 }
4950 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01004951 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004952
4953 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4954
4955 // Below instructions should use at most one blocked register. Since there are two blocked
4956 // registers, we are free to block one.
4957 Register temp_w = temps.AcquireW();
4958 Register index;
4959 // Remove the bias.
4960 if (lower_bound != 0) {
4961 index = temp_w;
4962 __ Sub(index, value_reg, Operand(lower_bound));
4963 } else {
4964 index = value_reg;
4965 }
4966
4967 // Jump to default block if index is out of the range.
4968 __ Cmp(index, Operand(num_entries));
4969 __ B(hs, codegen_->GetLabelOf(default_block));
4970
4971 // In current VIXL implementation, it won't require any blocked registers to encode the
4972 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4973 // register pressure.
4974 Register table_base = temps.AcquireX();
4975 // Load jump offset from the table.
4976 __ Adr(table_base, jump_table->GetTableStartLabel());
4977 Register jump_offset = temp_w;
4978 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4979
4980 // Jump to target block by branching to table_base(pc related) + offset.
4981 Register target_address = table_base;
4982 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4983 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004984 }
4985}
4986
Roland Levillain44015862016-01-22 11:47:17 +00004987void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4988 Location out,
4989 uint32_t offset,
4990 Location maybe_temp) {
4991 Primitive::Type type = Primitive::kPrimNot;
4992 Register out_reg = RegisterFrom(out, type);
4993 if (kEmitCompilerReadBarrier) {
4994 Register temp_reg = RegisterFrom(maybe_temp, type);
4995 if (kUseBakerReadBarrier) {
4996 // Load with fast path based Baker's read barrier.
4997 // /* HeapReference<Object> */ out = *(out + offset)
4998 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4999 out,
5000 out_reg,
5001 offset,
5002 temp_reg,
5003 /* needs_null_check */ false,
5004 /* use_load_acquire */ false);
5005 } else {
5006 // Load with slow path based read barrier.
5007 // Save the value of `out` into `maybe_temp` before overwriting it
5008 // in the following move operation, as we will need it for the
5009 // read barrier below.
5010 __ Mov(temp_reg, out_reg);
5011 // /* HeapReference<Object> */ out = *(out + offset)
5012 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5013 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5014 }
5015 } else {
5016 // Plain load with no read barrier.
5017 // /* HeapReference<Object> */ out = *(out + offset)
5018 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5019 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5020 }
5021}
5022
5023void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
5024 Location out,
5025 Location obj,
5026 uint32_t offset,
5027 Location maybe_temp) {
5028 Primitive::Type type = Primitive::kPrimNot;
5029 Register out_reg = RegisterFrom(out, type);
5030 Register obj_reg = RegisterFrom(obj, type);
5031 if (kEmitCompilerReadBarrier) {
5032 if (kUseBakerReadBarrier) {
5033 // Load with fast path based Baker's read barrier.
5034 Register temp_reg = RegisterFrom(maybe_temp, type);
5035 // /* HeapReference<Object> */ out = *(obj + offset)
5036 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5037 out,
5038 obj_reg,
5039 offset,
5040 temp_reg,
5041 /* needs_null_check */ false,
5042 /* use_load_acquire */ false);
5043 } else {
5044 // Load with slow path based read barrier.
5045 // /* HeapReference<Object> */ out = *(obj + offset)
5046 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5047 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5048 }
5049 } else {
5050 // Plain load with no read barrier.
5051 // /* HeapReference<Object> */ out = *(obj + offset)
5052 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5053 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5054 }
5055}
5056
5057void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
5058 Location root,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005059 Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005060 uint32_t offset,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005061 vixl::aarch64::Label* fixup_label) {
Roland Levillain44015862016-01-22 11:47:17 +00005062 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
5063 if (kEmitCompilerReadBarrier) {
5064 if (kUseBakerReadBarrier) {
5065 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5066 // Baker's read barrier are used:
5067 //
5068 // root = obj.field;
5069 // if (Thread::Current()->GetIsGcMarking()) {
5070 // root = ReadBarrier::Mark(root)
5071 // }
5072
5073 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005074 if (fixup_label == nullptr) {
5075 __ Ldr(root_reg, MemOperand(obj, offset));
5076 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005077 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005078 __ Bind(fixup_label);
5079 __ ldr(root_reg, MemOperand(obj, offset));
5080 }
Roland Levillain44015862016-01-22 11:47:17 +00005081 static_assert(
5082 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5083 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5084 "have different sizes.");
5085 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5086 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5087 "have different sizes.");
5088
5089 // Slow path used to mark the GC root `root`.
5090 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005091 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root);
Roland Levillain44015862016-01-22 11:47:17 +00005092 codegen_->AddSlowPath(slow_path);
5093
5094 MacroAssembler* masm = GetVIXLAssembler();
5095 UseScratchRegisterScope temps(masm);
5096 Register temp = temps.AcquireW();
5097 // temp = Thread::Current()->GetIsGcMarking()
Andreas Gampe542451c2016-07-26 09:02:02 -07005098 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64PointerSize>().Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00005099 __ Cbnz(temp, slow_path->GetEntryLabel());
5100 __ Bind(slow_path->GetExitLabel());
5101 } else {
5102 // GC root loaded through a slow path for read barriers other
5103 // than Baker's.
5104 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005105 if (fixup_label == nullptr) {
5106 __ Add(root_reg.X(), obj.X(), offset);
5107 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005108 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005109 __ Bind(fixup_label);
5110 __ add(root_reg.X(), obj.X(), offset);
5111 }
Roland Levillain44015862016-01-22 11:47:17 +00005112 // /* mirror::Object* */ root = root->Read()
5113 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5114 }
5115 } else {
5116 // Plain GC root load with no read barrier.
5117 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005118 if (fixup_label == nullptr) {
5119 __ Ldr(root_reg, MemOperand(obj, offset));
5120 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005121 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005122 __ Bind(fixup_label);
5123 __ ldr(root_reg, MemOperand(obj, offset));
5124 }
Roland Levillain44015862016-01-22 11:47:17 +00005125 // Note that GC roots are not affected by heap poisoning, thus we
5126 // do not have to unpoison `root_reg` here.
5127 }
5128}
5129
5130void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5131 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005132 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005133 uint32_t offset,
5134 Register temp,
5135 bool needs_null_check,
5136 bool use_load_acquire) {
5137 DCHECK(kEmitCompilerReadBarrier);
5138 DCHECK(kUseBakerReadBarrier);
5139
5140 // /* HeapReference<Object> */ ref = *(obj + offset)
5141 Location no_index = Location::NoLocation();
Roland Levillainbfea3352016-06-23 13:48:47 +01005142 size_t no_scale_factor = 0U;
5143 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5144 ref,
5145 obj,
5146 offset,
5147 no_index,
5148 no_scale_factor,
5149 temp,
5150 needs_null_check,
5151 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005152}
5153
5154void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5155 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005156 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005157 uint32_t data_offset,
5158 Location index,
5159 Register temp,
5160 bool needs_null_check) {
5161 DCHECK(kEmitCompilerReadBarrier);
5162 DCHECK(kUseBakerReadBarrier);
5163
5164 // Array cells are never volatile variables, therefore array loads
5165 // never use Load-Acquire instructions on ARM64.
5166 const bool use_load_acquire = false;
5167
Roland Levillainbfea3352016-06-23 13:48:47 +01005168 static_assert(
5169 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5170 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005171 // /* HeapReference<Object> */ ref =
5172 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005173 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5174 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5175 ref,
5176 obj,
5177 data_offset,
5178 index,
5179 scale_factor,
5180 temp,
5181 needs_null_check,
5182 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005183}
5184
5185void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5186 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005187 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005188 uint32_t offset,
5189 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005190 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005191 Register temp,
5192 bool needs_null_check,
5193 bool use_load_acquire) {
5194 DCHECK(kEmitCompilerReadBarrier);
5195 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005196 // If we are emitting an array load, we should not be using a
5197 // Load Acquire instruction. In other words:
5198 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5199 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005200
5201 MacroAssembler* masm = GetVIXLAssembler();
5202 UseScratchRegisterScope temps(masm);
5203
5204 // In slow path based read barriers, the read barrier call is
5205 // inserted after the original load. However, in fast path based
5206 // Baker's read barriers, we need to perform the load of
5207 // mirror::Object::monitor_ *before* the original reference load.
5208 // This load-load ordering is required by the read barrier.
5209 // The fast path/slow path (for Baker's algorithm) should look like:
5210 //
5211 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5212 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5213 // HeapReference<Object> ref = *src; // Original reference load.
5214 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5215 // if (is_gray) {
5216 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5217 // }
5218 //
5219 // Note: the original implementation in ReadBarrier::Barrier is
5220 // slightly more complex as it performs additional checks that we do
5221 // not do here for performance reasons.
5222
5223 Primitive::Type type = Primitive::kPrimNot;
5224 Register ref_reg = RegisterFrom(ref, type);
5225 DCHECK(obj.IsW());
5226 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5227
5228 // /* int32_t */ monitor = obj->monitor_
5229 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5230 if (needs_null_check) {
5231 MaybeRecordImplicitNullCheck(instruction);
5232 }
5233 // /* LockWord */ lock_word = LockWord(monitor)
5234 static_assert(sizeof(LockWord) == sizeof(int32_t),
5235 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005236
Vladimir Marko877a0332016-07-11 19:30:56 +01005237 // Introduce a dependency on the lock_word including rb_state,
5238 // to prevent load-load reordering, and without using
Roland Levillain44015862016-01-22 11:47:17 +00005239 // a memory barrier (which would be more expensive).
Vladimir Marko877a0332016-07-11 19:30:56 +01005240 // obj is unchanged by this operation, but its value now depends on temp.
5241 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
Roland Levillain44015862016-01-22 11:47:17 +00005242
5243 // The actual reference load.
5244 if (index.IsValid()) {
Roland Levillainbfea3352016-06-23 13:48:47 +01005245 // Load types involving an "index".
5246 if (use_load_acquire) {
5247 // UnsafeGetObjectVolatile intrinsic case.
5248 // Register `index` is not an index in an object array, but an
5249 // offset to an object reference field within object `obj`.
5250 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5251 DCHECK(instruction->GetLocations()->Intrinsified());
5252 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5253 << instruction->AsInvoke()->GetIntrinsic();
5254 DCHECK_EQ(offset, 0U);
5255 DCHECK_EQ(scale_factor, 0U);
5256 DCHECK_EQ(needs_null_check, 0U);
5257 // /* HeapReference<Object> */ ref = *(obj + index)
5258 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5259 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005260 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01005261 // ArrayGet and UnsafeGetObject intrinsics cases.
5262 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5263 if (index.IsConstant()) {
5264 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5265 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5266 } else {
Vladimir Marko877a0332016-07-11 19:30:56 +01005267 Register temp2 = temps.AcquireW();
Roland Levillainbfea3352016-06-23 13:48:47 +01005268 __ Add(temp2, obj, offset);
5269 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, scale_factor));
5270 temps.Release(temp2);
5271 }
Roland Levillain44015862016-01-22 11:47:17 +00005272 }
Roland Levillain44015862016-01-22 11:47:17 +00005273 } else {
5274 // /* HeapReference<Object> */ ref = *(obj + offset)
5275 MemOperand field = HeapOperand(obj, offset);
5276 if (use_load_acquire) {
5277 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5278 } else {
5279 Load(type, ref_reg, field);
5280 }
5281 }
5282
5283 // Object* ref = ref_addr->AsMirrorPtr()
5284 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5285
5286 // Slow path used to mark the object `ref` when it is gray.
5287 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005288 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref);
Roland Levillain44015862016-01-22 11:47:17 +00005289 AddSlowPath(slow_path);
5290
5291 // if (rb_state == ReadBarrier::gray_ptr_)
5292 // ref = ReadBarrier::Mark(ref);
Vladimir Marko877a0332016-07-11 19:30:56 +01005293 // Given the numeric representation, it's enough to check the low bit of the rb_state.
5294 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
5295 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
5296 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
5297 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005298 __ Bind(slow_path->GetExitLabel());
5299}
5300
5301void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5302 Location out,
5303 Location ref,
5304 Location obj,
5305 uint32_t offset,
5306 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005307 DCHECK(kEmitCompilerReadBarrier);
5308
Roland Levillain44015862016-01-22 11:47:17 +00005309 // Insert a slow path based read barrier *after* the reference load.
5310 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005311 // If heap poisoning is enabled, the unpoisoning of the loaded
5312 // reference will be carried out by the runtime within the slow
5313 // path.
5314 //
5315 // Note that `ref` currently does not get unpoisoned (when heap
5316 // poisoning is enabled), which is alright as the `ref` argument is
5317 // not used by the artReadBarrierSlow entry point.
5318 //
5319 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5320 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5321 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5322 AddSlowPath(slow_path);
5323
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005324 __ B(slow_path->GetEntryLabel());
5325 __ Bind(slow_path->GetExitLabel());
5326}
5327
Roland Levillain44015862016-01-22 11:47:17 +00005328void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5329 Location out,
5330 Location ref,
5331 Location obj,
5332 uint32_t offset,
5333 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005334 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005335 // Baker's read barriers shall be handled by the fast path
5336 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5337 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005338 // If heap poisoning is enabled, unpoisoning will be taken care of
5339 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005340 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005341 } else if (kPoisonHeapReferences) {
5342 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5343 }
5344}
5345
Roland Levillain44015862016-01-22 11:47:17 +00005346void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5347 Location out,
5348 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005349 DCHECK(kEmitCompilerReadBarrier);
5350
Roland Levillain44015862016-01-22 11:47:17 +00005351 // Insert a slow path based read barrier *after* the GC root load.
5352 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005353 // Note that GC roots are not affected by heap poisoning, so we do
5354 // not need to do anything special for this here.
5355 SlowPathCodeARM64* slow_path =
5356 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5357 AddSlowPath(slow_path);
5358
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005359 __ B(slow_path->GetEntryLabel());
5360 __ Bind(slow_path->GetExitLabel());
5361}
5362
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005363void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5364 LocationSummary* locations =
5365 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5366 locations->SetInAt(0, Location::RequiresRegister());
5367 locations->SetOut(Location::RequiresRegister());
5368}
5369
5370void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5371 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005372 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005373 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005374 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005375 __ Ldr(XRegisterFrom(locations->Out()),
5376 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005377 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005378 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005379 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005380 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5381 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005382 __ Ldr(XRegisterFrom(locations->Out()),
5383 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005384 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005385}
5386
5387
5388
Alexandre Rames67555f72014-11-18 10:55:16 +00005389#undef __
5390#undef QUICK_ENTRY_POINT
5391
Alexandre Rames5319def2014-10-23 10:03:10 +01005392} // namespace arm64
5393} // namespace art