blob: a4fc044f83dd7323adb33b85e2207e1ba12e491c [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Alexandre Rames5319def2014-10-23 10:03:10 +010037
38#ifdef __
39#error "ARM64 Codegen VIXL macro-assembler macro already defined."
40#endif
41
Alexandre Rames5319def2014-10-23 10:03:10 +010042namespace art {
43
Roland Levillain22ccc3a2015-11-24 13:10:05 +000044template<class MirrorType>
45class GcRoot;
46
Alexandre Rames5319def2014-10-23 10:03:10 +010047namespace arm64 {
48
Alexandre Ramesbe919d92016-08-23 18:33:36 +010049using helpers::ARM64EncodableConstantOrRegister;
50using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080051using helpers::CPURegisterFrom;
52using helpers::DRegisterFrom;
53using helpers::FPRegisterFrom;
54using helpers::HeapOperand;
55using helpers::HeapOperandFrom;
56using helpers::InputCPURegisterAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010057using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080058using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080059using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::InputRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080061using helpers::Int64ConstantFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010062using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080063using helpers::LocationFrom;
64using helpers::OperandFromMemOperand;
65using helpers::OutputCPURegister;
66using helpers::OutputFPRegister;
67using helpers::OutputRegister;
68using helpers::RegisterFrom;
69using helpers::StackOperandFrom;
70using helpers::VIXLRegCodeFromART;
71using helpers::WRegisterFrom;
72using helpers::XRegisterFrom;
73
Alexandre Rames5319def2014-10-23 10:03:10 +010074static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000075// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080076// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
77// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000078static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010079
Alexandre Rames5319def2014-10-23 10:03:10 +010080inline Condition ARM64Condition(IfCondition cond) {
81 switch (cond) {
82 case kCondEQ: return eq;
83 case kCondNE: return ne;
84 case kCondLT: return lt;
85 case kCondLE: return le;
86 case kCondGT: return gt;
87 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070088 case kCondB: return lo;
89 case kCondBE: return ls;
90 case kCondA: return hi;
91 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010092 }
Roland Levillain7f63c522015-07-13 15:54:55 +000093 LOG(FATAL) << "Unreachable";
94 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010095}
96
Vladimir Markod6e069b2016-01-18 11:11:01 +000097inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
98 // The ARM64 condition codes can express all the necessary branches, see the
99 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
100 // There is no dex instruction or HIR that would need the missing conditions
101 // "equal or unordered" or "not equal".
102 switch (cond) {
103 case kCondEQ: return eq;
104 case kCondNE: return ne /* unordered */;
105 case kCondLT: return gt_bias ? cc : lt /* unordered */;
106 case kCondLE: return gt_bias ? ls : le /* unordered */;
107 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
108 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
109 default:
110 LOG(FATAL) << "UNREACHABLE";
111 UNREACHABLE();
112 }
113}
114
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000116 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
117 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
118 // but we use the exact registers for clarity.
119 if (return_type == Primitive::kPrimFloat) {
120 return LocationFrom(s0);
121 } else if (return_type == Primitive::kPrimDouble) {
122 return LocationFrom(d0);
123 } else if (return_type == Primitive::kPrimLong) {
124 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100125 } else if (return_type == Primitive::kPrimVoid) {
126 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000127 } else {
128 return LocationFrom(w0);
129 }
130}
131
Alexandre Rames5319def2014-10-23 10:03:10 +0100132Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000133 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100134}
135
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100136// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
137#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700138#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100139
Zheng Xuda403092015-04-24 17:35:39 +0800140// Calculate memory accessing operand for save/restore live registers.
141static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
142 RegisterSet* register_set,
143 int64_t spill_offset,
144 bool is_save) {
145 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
146 codegen->GetNumberOfCoreRegisters(),
147 register_set->GetFloatingPointRegisters(),
148 codegen->GetNumberOfFloatingPointRegisters()));
149
150 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100151 register_set->GetCoreRegisters() & (~callee_saved_core_registers.GetList()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000152 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100153 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.GetList()));
Zheng Xuda403092015-04-24 17:35:39 +0800154
155 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
156 UseScratchRegisterScope temps(masm);
157
158 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100159 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
160 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800161 int64_t reg_size = kXRegSizeInBytes;
162 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
163 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100164 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800165 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
166 // If the offset does not fit in the instruction's immediate field, use an alternate register
167 // to compute the base address(float point registers spill base address).
168 Register new_base = temps.AcquireSameSizeAs(base);
169 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
170 base = new_base;
171 spill_offset = -core_spill_size;
172 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
173 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
174 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
175 }
176
177 if (is_save) {
178 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
179 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
180 } else {
181 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
182 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
183 }
184}
185
186void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
187 RegisterSet* register_set = locations->GetLiveRegisters();
188 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
189 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
190 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
191 // If the register holds an object, update the stack mask.
192 if (locations->RegisterContainsObject(i)) {
193 locations->SetStackBit(stack_offset / kVRegSize);
194 }
195 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
196 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
197 saved_core_stack_offsets_[i] = stack_offset;
198 stack_offset += kXRegSizeInBytes;
199 }
200 }
201
202 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
203 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
204 register_set->ContainsFloatingPointRegister(i)) {
205 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
206 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
207 saved_fpu_stack_offsets_[i] = stack_offset;
208 stack_offset += kDRegSizeInBytes;
209 }
210 }
211
212 SaveRestoreLiveRegistersHelper(codegen, register_set,
213 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
214}
215
216void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
217 RegisterSet* register_set = locations->GetLiveRegisters();
218 SaveRestoreLiveRegistersHelper(codegen, register_set,
219 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
220}
221
Alexandre Rames5319def2014-10-23 10:03:10 +0100222class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
223 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000224 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100225
Alexandre Rames67555f72014-11-18 10:55:16 +0000226 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000228 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100229
Alexandre Rames5319def2014-10-23 10:03:10 +0100230 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000231 if (instruction_->CanThrowIntoCatchBlock()) {
232 // Live registers will be restored in the catch block if caught.
233 SaveLiveRegisters(codegen, instruction_->GetLocations());
234 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000235 // We're moving two locations to locations that could overlap, so we need a parallel
236 // move resolver.
237 InvokeRuntimeCallingConvention calling_convention;
238 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100239 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
240 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000241 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
242 ? kQuickThrowStringBounds
243 : kQuickThrowArrayBounds;
244 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100245 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800246 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100247 }
248
Alexandre Rames8158f282015-08-07 10:26:17 +0100249 bool IsFatal() const OVERRIDE { return true; }
250
Alexandre Rames9931f312015-06-19 14:47:01 +0100251 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
252
Alexandre Rames5319def2014-10-23 10:03:10 +0100253 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100254 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
255};
256
Alexandre Rames67555f72014-11-18 10:55:16 +0000257class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
258 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000259 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000260
261 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
262 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
263 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000264 if (instruction_->CanThrowIntoCatchBlock()) {
265 // Live registers will be restored in the catch block if caught.
266 SaveLiveRegisters(codegen, instruction_->GetLocations());
267 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000268 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800269 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000270 }
271
Alexandre Rames8158f282015-08-07 10:26:17 +0100272 bool IsFatal() const OVERRIDE { return true; }
273
Alexandre Rames9931f312015-06-19 14:47:01 +0100274 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
275
Alexandre Rames67555f72014-11-18 10:55:16 +0000276 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000277 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
278};
279
280class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
281 public:
282 LoadClassSlowPathARM64(HLoadClass* cls,
283 HInstruction* at,
284 uint32_t dex_pc,
285 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000286 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000287 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
288 }
289
290 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
291 LocationSummary* locations = at_->GetLocations();
292 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
293
294 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000295 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000296
297 InvokeRuntimeCallingConvention calling_convention;
298 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000299 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
300 : kQuickInitializeType;
301 arm64_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800302 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100303 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800304 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100305 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800306 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000307
308 // Move the class to the desired location.
309 Location out = locations->Out();
310 if (out.IsValid()) {
311 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
312 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000313 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000314 }
315
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000316 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000317 __ B(GetExitLabel());
318 }
319
Alexandre Rames9931f312015-06-19 14:47:01 +0100320 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
321
Alexandre Rames67555f72014-11-18 10:55:16 +0000322 private:
323 // The class this slow path will load.
324 HLoadClass* const cls_;
325
326 // The instruction where this slow path is happening.
327 // (Might be the load class or an initialization check).
328 HInstruction* const at_;
329
330 // The dex PC of `at_`.
331 const uint32_t dex_pc_;
332
333 // Whether to initialize the class.
334 const bool do_clinit_;
335
336 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
337};
338
339class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
340 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000341 explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000342
343 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
344 LocationSummary* locations = instruction_->GetLocations();
345 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
346 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
347
348 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000349 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000350
351 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000352 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
353 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000354 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100355 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000356 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000357 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000358
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000359 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000360 __ B(GetExitLabel());
361 }
362
Alexandre Rames9931f312015-06-19 14:47:01 +0100363 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
364
Alexandre Rames67555f72014-11-18 10:55:16 +0000365 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000366 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
367};
368
Alexandre Rames5319def2014-10-23 10:03:10 +0100369class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
370 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000371 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100372
Alexandre Rames67555f72014-11-18 10:55:16 +0000373 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
374 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100375 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000376 if (instruction_->CanThrowIntoCatchBlock()) {
377 // Live registers will be restored in the catch block if caught.
378 SaveLiveRegisters(codegen, instruction_->GetLocations());
379 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000380 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
381 instruction_,
382 instruction_->GetDexPc(),
383 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800384 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100385 }
386
Alexandre Rames8158f282015-08-07 10:26:17 +0100387 bool IsFatal() const OVERRIDE { return true; }
388
Alexandre Rames9931f312015-06-19 14:47:01 +0100389 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
390
Alexandre Rames5319def2014-10-23 10:03:10 +0100391 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100392 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
393};
394
395class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
396 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100397 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000398 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100399
Alexandre Rames67555f72014-11-18 10:55:16 +0000400 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
401 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100402 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000403 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800404 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000405 if (successor_ == nullptr) {
406 __ B(GetReturnLabel());
407 } else {
408 __ B(arm64_codegen->GetLabelOf(successor_));
409 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100410 }
411
Scott Wakeling97c72b72016-06-24 16:19:36 +0100412 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100413 DCHECK(successor_ == nullptr);
414 return &return_label_;
415 }
416
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100417 HBasicBlock* GetSuccessor() const {
418 return successor_;
419 }
420
Alexandre Rames9931f312015-06-19 14:47:01 +0100421 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
422
Alexandre Rames5319def2014-10-23 10:03:10 +0100423 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100424 // If not null, the block to branch to after the suspend check.
425 HBasicBlock* const successor_;
426
427 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100428 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100429
430 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
431};
432
Alexandre Rames67555f72014-11-18 10:55:16 +0000433class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
434 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000435 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000436 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000437
438 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000439 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100440 Location class_to_check = locations->InAt(1);
441 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
442 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000443 DCHECK(instruction_->IsCheckCast()
444 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
445 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100446 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000447
Alexandre Rames67555f72014-11-18 10:55:16 +0000448 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000449
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000450 if (!is_fatal_) {
451 SaveLiveRegisters(codegen, locations);
452 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000453
454 // We're moving two locations to locations that could overlap, so we need a parallel
455 // move resolver.
456 InvokeRuntimeCallingConvention calling_convention;
457 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100458 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
459 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000460
461 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000462 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Andreas Gampe67409972016-07-19 22:34:53 -0700463 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t,
Roland Levillain888d0672015-11-23 18:53:50 +0000464 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000465 Primitive::Type ret_type = instruction_->GetType();
466 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
467 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
468 } else {
469 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000470 arm64_codegen->InvokeRuntime(kQuickCheckCast, instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800471 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000472 }
473
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000474 if (!is_fatal_) {
475 RestoreLiveRegisters(codegen, locations);
476 __ B(GetExitLabel());
477 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000478 }
479
Alexandre Rames9931f312015-06-19 14:47:01 +0100480 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000481 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100482
Alexandre Rames67555f72014-11-18 10:55:16 +0000483 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000484 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000485
Alexandre Rames67555f72014-11-18 10:55:16 +0000486 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
487};
488
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700489class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
490 public:
Aart Bik42249c32016-01-07 15:33:50 -0800491 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000492 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700493
494 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800495 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700496 __ Bind(GetEntryLabel());
497 SaveLiveRegisters(codegen, instruction_->GetLocations());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000498 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000499 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700500 }
501
Alexandre Rames9931f312015-06-19 14:47:01 +0100502 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
503
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700504 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700505 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
506};
507
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100508class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
509 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000510 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100511
512 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
513 LocationSummary* locations = instruction_->GetLocations();
514 __ Bind(GetEntryLabel());
515 SaveLiveRegisters(codegen, locations);
516
517 InvokeRuntimeCallingConvention calling_convention;
518 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
519 parallel_move.AddMove(
520 locations->InAt(0),
521 LocationFrom(calling_convention.GetRegisterAt(0)),
522 Primitive::kPrimNot,
523 nullptr);
524 parallel_move.AddMove(
525 locations->InAt(1),
526 LocationFrom(calling_convention.GetRegisterAt(1)),
527 Primitive::kPrimInt,
528 nullptr);
529 parallel_move.AddMove(
530 locations->InAt(2),
531 LocationFrom(calling_convention.GetRegisterAt(2)),
532 Primitive::kPrimNot,
533 nullptr);
534 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
535
536 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000537 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100538 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
539 RestoreLiveRegisters(codegen, locations);
540 __ B(GetExitLabel());
541 }
542
543 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
544
545 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100546 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
547};
548
Zheng Xu3927c8b2015-11-18 17:46:25 +0800549void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
550 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000551 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800552
553 // We are about to use the assembler to place literals directly. Make sure we have enough
554 // underlying code buffer and we have generated the jump table with right size.
555 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
556 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
557
558 __ Bind(&table_start_);
559 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
560 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100561 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800562 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100563 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800564 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
565 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
566 Literal<int32_t> literal(jump_offset);
567 __ place(&literal);
568 }
569}
570
Roland Levillain44015862016-01-22 11:47:17 +0000571// Slow path marking an object during a read barrier.
572class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
573 public:
Roland Levillain02b75802016-07-13 11:54:35 +0100574 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location obj)
575 : SlowPathCodeARM64(instruction), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000576 DCHECK(kEmitCompilerReadBarrier);
577 }
578
579 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
580
581 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
582 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain44015862016-01-22 11:47:17 +0000583 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100584 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(obj_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000585 DCHECK(instruction_->IsInstanceFieldGet() ||
586 instruction_->IsStaticFieldGet() ||
587 instruction_->IsArrayGet() ||
588 instruction_->IsLoadClass() ||
589 instruction_->IsLoadString() ||
590 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100591 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100592 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
593 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000594 << "Unexpected instruction in read barrier marking slow path: "
595 << instruction_->DebugName();
596
597 __ Bind(GetEntryLabel());
Roland Levillain4359e612016-07-20 11:32:19 +0100598 // No need to save live registers; it's taken care of by the
599 // entrypoint. Also, there is no need to update the stack mask,
600 // as this runtime call will not trigger a garbage collection.
Roland Levillain44015862016-01-22 11:47:17 +0000601 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100602 DCHECK_NE(obj_.reg(), LR);
603 DCHECK_NE(obj_.reg(), WSP);
604 DCHECK_NE(obj_.reg(), WZR);
Roland Levillain0b671c02016-08-19 12:02:34 +0100605 // IP0 is used internally by the ReadBarrierMarkRegX entry point
606 // as a temporary, it cannot be the entry point's input/output.
Mathieu Chartier36a270a2016-07-28 18:08:51 -0700607 DCHECK_NE(obj_.reg(), IP0);
Roland Levillain02b75802016-07-13 11:54:35 +0100608 DCHECK(0 <= obj_.reg() && obj_.reg() < kNumberOfWRegisters) << obj_.reg();
609 // "Compact" slow path, saving two moves.
610 //
611 // Instead of using the standard runtime calling convention (input
612 // and output in W0):
613 //
614 // W0 <- obj
615 // W0 <- ReadBarrierMark(W0)
616 // obj <- W0
617 //
618 // we just use rX (the register holding `obj`) as input and output
619 // of a dedicated entrypoint:
620 //
621 // rX <- ReadBarrierMarkRegX(rX)
622 //
623 int32_t entry_point_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -0700624 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(obj_.reg());
Roland Levillaindec8f632016-07-22 17:10:06 +0100625 // This runtime call does not require a stack map.
626 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain44015862016-01-22 11:47:17 +0000627 __ B(GetExitLabel());
628 }
629
630 private:
Roland Levillain44015862016-01-22 11:47:17 +0000631 const Location obj_;
632
633 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
634};
635
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000636// Slow path generating a read barrier for a heap reference.
637class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
638 public:
639 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
640 Location out,
641 Location ref,
642 Location obj,
643 uint32_t offset,
644 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000645 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000646 out_(out),
647 ref_(ref),
648 obj_(obj),
649 offset_(offset),
650 index_(index) {
651 DCHECK(kEmitCompilerReadBarrier);
652 // If `obj` is equal to `out` or `ref`, it means the initial object
653 // has been overwritten by (or after) the heap object reference load
654 // to be instrumented, e.g.:
655 //
656 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000657 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000658 //
659 // In that case, we have lost the information about the original
660 // object, and the emitted read barrier cannot work properly.
661 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
662 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
663 }
664
665 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
666 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
667 LocationSummary* locations = instruction_->GetLocations();
668 Primitive::Type type = Primitive::kPrimNot;
669 DCHECK(locations->CanCall());
670 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100671 DCHECK(instruction_->IsInstanceFieldGet() ||
672 instruction_->IsStaticFieldGet() ||
673 instruction_->IsArrayGet() ||
674 instruction_->IsInstanceOf() ||
675 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100676 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain44015862016-01-22 11:47:17 +0000677 << "Unexpected instruction in read barrier for heap reference slow path: "
678 << instruction_->DebugName();
Roland Levillain4a3aa572016-08-15 13:17:06 +0000679 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000680 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100681 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000682
683 __ Bind(GetEntryLabel());
684
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000685 SaveLiveRegisters(codegen, locations);
686
687 // We may have to change the index's value, but as `index_` is a
688 // constant member (like other "inputs" of this slow path),
689 // introduce a copy of it, `index`.
690 Location index = index_;
691 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100692 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000693 if (instruction_->IsArrayGet()) {
694 // Compute the actual memory offset and store it in `index`.
695 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
696 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
697 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
698 // We are about to change the value of `index_reg` (see the
699 // calls to vixl::MacroAssembler::Lsl and
700 // vixl::MacroAssembler::Mov below), but it has
701 // not been saved by the previous call to
702 // art::SlowPathCode::SaveLiveRegisters, as it is a
703 // callee-save register --
704 // art::SlowPathCode::SaveLiveRegisters does not consider
705 // callee-save registers, as it has been designed with the
706 // assumption that callee-save registers are supposed to be
707 // handled by the called function. So, as a callee-save
708 // register, `index_reg` _would_ eventually be saved onto
709 // the stack, but it would be too late: we would have
710 // changed its value earlier. Therefore, we manually save
711 // it here into another freely available register,
712 // `free_reg`, chosen of course among the caller-save
713 // registers (as a callee-save `free_reg` register would
714 // exhibit the same problem).
715 //
716 // Note we could have requested a temporary register from
717 // the register allocator instead; but we prefer not to, as
718 // this is a slow path, and we know we can find a
719 // caller-save register that is available.
720 Register free_reg = FindAvailableCallerSaveRegister(codegen);
721 __ Mov(free_reg.W(), index_reg);
722 index_reg = free_reg;
723 index = LocationFrom(index_reg);
724 } else {
725 // The initial register stored in `index_` has already been
726 // saved in the call to art::SlowPathCode::SaveLiveRegisters
727 // (as it is not a callee-save register), so we can freely
728 // use it.
729 }
730 // Shifting the index value contained in `index_reg` by the scale
731 // factor (2) cannot overflow in practice, as the runtime is
732 // unable to allocate object arrays with a size larger than
733 // 2^26 - 1 (that is, 2^28 - 4 bytes).
734 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
735 static_assert(
736 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
737 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
738 __ Add(index_reg, index_reg, Operand(offset_));
739 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100740 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
741 // intrinsics, `index_` is not shifted by a scale factor of 2
742 // (as in the case of ArrayGet), as it is actually an offset
743 // to an object field within an object.
744 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000745 DCHECK(instruction_->GetLocations()->Intrinsified());
746 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
747 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
748 << instruction_->AsInvoke()->GetIntrinsic();
749 DCHECK_EQ(offset_, 0U);
Roland Levillaina7426c62016-08-03 15:02:10 +0100750 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000751 }
752 }
753
754 // We're moving two or three locations to locations that could
755 // overlap, so we need a parallel move resolver.
756 InvokeRuntimeCallingConvention calling_convention;
757 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
758 parallel_move.AddMove(ref_,
759 LocationFrom(calling_convention.GetRegisterAt(0)),
760 type,
761 nullptr);
762 parallel_move.AddMove(obj_,
763 LocationFrom(calling_convention.GetRegisterAt(1)),
764 type,
765 nullptr);
766 if (index.IsValid()) {
767 parallel_move.AddMove(index,
768 LocationFrom(calling_convention.GetRegisterAt(2)),
769 Primitive::kPrimInt,
770 nullptr);
771 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
772 } else {
773 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
774 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
775 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000776 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000777 instruction_,
778 instruction_->GetDexPc(),
779 this);
780 CheckEntrypointTypes<
781 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
782 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
783
784 RestoreLiveRegisters(codegen, locations);
785
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000786 __ B(GetExitLabel());
787 }
788
789 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
790
791 private:
792 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100793 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
794 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000795 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
796 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
797 return Register(VIXLRegCodeFromART(i), kXRegSize);
798 }
799 }
800 // We shall never fail to find a free caller-save register, as
801 // there are more than two core caller-save registers on ARM64
802 // (meaning it is possible to find one which is different from
803 // `ref` and `obj`).
804 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
805 LOG(FATAL) << "Could not find a free register";
806 UNREACHABLE();
807 }
808
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000809 const Location out_;
810 const Location ref_;
811 const Location obj_;
812 const uint32_t offset_;
813 // An additional location containing an index to an array.
814 // Only used for HArrayGet and the UnsafeGetObject &
815 // UnsafeGetObjectVolatile intrinsics.
816 const Location index_;
817
818 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
819};
820
821// Slow path generating a read barrier for a GC root.
822class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
823 public:
824 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000825 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000826 DCHECK(kEmitCompilerReadBarrier);
827 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000828
829 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
830 LocationSummary* locations = instruction_->GetLocations();
831 Primitive::Type type = Primitive::kPrimNot;
832 DCHECK(locations->CanCall());
833 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000834 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
835 << "Unexpected instruction in read barrier for GC root slow path: "
836 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000837
838 __ Bind(GetEntryLabel());
839 SaveLiveRegisters(codegen, locations);
840
841 InvokeRuntimeCallingConvention calling_convention;
842 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
843 // The argument of the ReadBarrierForRootSlow is not a managed
844 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
845 // thus we need a 64-bit move here, and we cannot use
846 //
847 // arm64_codegen->MoveLocation(
848 // LocationFrom(calling_convention.GetRegisterAt(0)),
849 // root_,
850 // type);
851 //
852 // which would emit a 32-bit move, as `type` is a (32-bit wide)
853 // reference type (`Primitive::kPrimNot`).
854 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000855 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000856 instruction_,
857 instruction_->GetDexPc(),
858 this);
859 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
860 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
861
862 RestoreLiveRegisters(codegen, locations);
863 __ B(GetExitLabel());
864 }
865
866 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
867
868 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000869 const Location out_;
870 const Location root_;
871
872 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
873};
874
Alexandre Rames5319def2014-10-23 10:03:10 +0100875#undef __
876
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100877Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100878 Location next_location;
879 if (type == Primitive::kPrimVoid) {
880 LOG(FATAL) << "Unreachable type " << type;
881 }
882
Alexandre Rames542361f2015-01-29 16:57:31 +0000883 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100884 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
885 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000886 } else if (!Primitive::IsFloatingPointType(type) &&
887 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000888 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
889 } else {
890 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000891 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
892 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100893 }
894
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000895 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000896 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100897 return next_location;
898}
899
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100900Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100901 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100902}
903
Serban Constantinescu579885a2015-02-22 20:51:33 +0000904CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
905 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100906 const CompilerOptions& compiler_options,
907 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100908 : CodeGenerator(graph,
909 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000910 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000911 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100912 callee_saved_core_registers.GetList(),
913 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100914 compiler_options,
915 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100916 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800917 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100918 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000919 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000920 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100921 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000922 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000923 uint32_literals_(std::less<uint32_t>(),
924 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100925 uint64_literals_(std::less<uint64_t>(),
926 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
927 method_patches_(MethodReferenceComparator(),
928 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
929 call_patches_(MethodReferenceComparator(),
930 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
931 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000932 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
933 boot_image_string_patches_(StringReferenceValueComparator(),
934 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
935 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100936 boot_image_type_patches_(TypeReferenceValueComparator(),
937 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
938 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000939 boot_image_address_patches_(std::less<uint32_t>(),
940 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000941 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000942 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000943}
Alexandre Rames5319def2014-10-23 10:03:10 +0100944
Alexandre Rames67555f72014-11-18 10:55:16 +0000945#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100946
Zheng Xu3927c8b2015-11-18 17:46:25 +0800947void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100948 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800949 jump_table->EmitTable(this);
950 }
951}
952
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000953void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800954 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000955 // Ensure we emit the literal pool.
956 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000957
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000958 CodeGenerator::Finalize(allocator);
959}
960
Zheng Xuad4450e2015-04-17 18:48:56 +0800961void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
962 // Note: There are 6 kinds of moves:
963 // 1. constant -> GPR/FPR (non-cycle)
964 // 2. constant -> stack (non-cycle)
965 // 3. GPR/FPR -> GPR/FPR
966 // 4. GPR/FPR -> stack
967 // 5. stack -> GPR/FPR
968 // 6. stack -> stack (non-cycle)
969 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
970 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
971 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
972 // dependency.
973 vixl_temps_.Open(GetVIXLAssembler());
974}
975
976void ParallelMoveResolverARM64::FinishEmitNativeCode() {
977 vixl_temps_.Close();
978}
979
980Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
981 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
982 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
983 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
984 Location scratch = GetScratchLocation(kind);
985 if (!scratch.Equals(Location::NoLocation())) {
986 return scratch;
987 }
988 // Allocate from VIXL temp registers.
989 if (kind == Location::kRegister) {
990 scratch = LocationFrom(vixl_temps_.AcquireX());
991 } else {
992 DCHECK(kind == Location::kFpuRegister);
993 scratch = LocationFrom(vixl_temps_.AcquireD());
994 }
995 AddScratchLocation(scratch);
996 return scratch;
997}
998
999void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1000 if (loc.IsRegister()) {
1001 vixl_temps_.Release(XRegisterFrom(loc));
1002 } else {
1003 DCHECK(loc.IsFpuRegister());
1004 vixl_temps_.Release(DRegisterFrom(loc));
1005 }
1006 RemoveScratchLocation(loc);
1007}
1008
Alexandre Rames3e69f162014-12-10 10:36:50 +00001009void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001010 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001011 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001012}
1013
Alexandre Rames5319def2014-10-23 10:03:10 +01001014void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001015 MacroAssembler* masm = GetVIXLAssembler();
1016 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001017 __ Bind(&frame_entry_label_);
1018
Serban Constantinescu02164b32014-11-13 14:05:07 +00001019 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1020 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001021 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001022 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001023 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001024 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001025 __ Ldr(wzr, MemOperand(temp, 0));
1026 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001027 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001028
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001029 if (!HasEmptyFrame()) {
1030 int frame_size = GetFrameSize();
1031 // Stack layout:
1032 // sp[frame_size - 8] : lr.
1033 // ... : other preserved core registers.
1034 // ... : other preserved fp registers.
1035 // ... : reserved frame space.
1036 // sp[0] : current method.
1037 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001038 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001039 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1040 frame_size - GetCoreSpillSize());
1041 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1042 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001043 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001044}
1045
1046void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001047 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001048 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001049 if (!HasEmptyFrame()) {
1050 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001051 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1052 frame_size - FrameEntrySpillSize());
1053 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1054 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001055 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001056 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001057 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001058 __ Ret();
1059 GetAssembler()->cfi().RestoreState();
1060 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001061}
1062
Scott Wakeling97c72b72016-06-24 16:19:36 +01001063CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001064 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001065 return CPURegList(CPURegister::kRegister, kXRegSize,
1066 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001067}
1068
Scott Wakeling97c72b72016-06-24 16:19:36 +01001069CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001070 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1071 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001072 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1073 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001074}
1075
Alexandre Rames5319def2014-10-23 10:03:10 +01001076void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1077 __ Bind(GetLabelOf(block));
1078}
1079
Calin Juravle175dc732015-08-25 15:42:32 +01001080void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1081 DCHECK(location.IsRegister());
1082 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1083}
1084
Calin Juravlee460d1d2015-09-29 04:52:17 +01001085void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1086 if (location.IsRegister()) {
1087 locations->AddTemp(location);
1088 } else {
1089 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1090 }
1091}
1092
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001093void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001094 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001095 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001096 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001097 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001098 if (value_can_be_null) {
1099 __ Cbz(value, &done);
1100 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001101 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001102 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001103 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001104 if (value_can_be_null) {
1105 __ Bind(&done);
1106 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001107}
1108
David Brazdil58282f42016-01-14 12:45:10 +00001109void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001110 // Blocked core registers:
1111 // lr : Runtime reserved.
1112 // tr : Runtime reserved.
1113 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1114 // ip1 : VIXL core temp.
1115 // ip0 : VIXL core temp.
1116 //
1117 // Blocked fp registers:
1118 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001119 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1120 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001121 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001122 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001123 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001124
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001125 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001126 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001127 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001128 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001129
David Brazdil58282f42016-01-14 12:45:10 +00001130 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001131 // Stubs do not save callee-save floating point registers. If the graph
1132 // is debuggable, we need to deal with these registers differently. For
1133 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001134 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1135 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001136 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001137 }
1138 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001139}
1140
Alexandre Rames3e69f162014-12-10 10:36:50 +00001141size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1142 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1143 __ Str(reg, MemOperand(sp, stack_index));
1144 return kArm64WordSize;
1145}
1146
1147size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1148 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1149 __ Ldr(reg, MemOperand(sp, stack_index));
1150 return kArm64WordSize;
1151}
1152
1153size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1154 FPRegister reg = FPRegister(reg_id, kDRegSize);
1155 __ Str(reg, MemOperand(sp, stack_index));
1156 return kArm64WordSize;
1157}
1158
1159size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1160 FPRegister reg = FPRegister(reg_id, kDRegSize);
1161 __ Ldr(reg, MemOperand(sp, stack_index));
1162 return kArm64WordSize;
1163}
1164
Alexandre Rames5319def2014-10-23 10:03:10 +01001165void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001166 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001167}
1168
1169void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001170 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001171}
1172
Alexandre Rames67555f72014-11-18 10:55:16 +00001173void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001174 if (constant->IsIntConstant()) {
1175 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1176 } else if (constant->IsLongConstant()) {
1177 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1178 } else if (constant->IsNullConstant()) {
1179 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001180 } else if (constant->IsFloatConstant()) {
1181 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1182 } else {
1183 DCHECK(constant->IsDoubleConstant());
1184 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1185 }
1186}
1187
Alexandre Rames3e69f162014-12-10 10:36:50 +00001188
1189static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1190 DCHECK(constant.IsConstant());
1191 HConstant* cst = constant.GetConstant();
1192 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001193 // Null is mapped to a core W register, which we associate with kPrimInt.
1194 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001195 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1196 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1197 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1198}
1199
Calin Juravlee460d1d2015-09-29 04:52:17 +01001200void CodeGeneratorARM64::MoveLocation(Location destination,
1201 Location source,
1202 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001203 if (source.Equals(destination)) {
1204 return;
1205 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001206
1207 // A valid move can always be inferred from the destination and source
1208 // locations. When moving from and to a register, the argument type can be
1209 // used to generate 32bit instead of 64bit moves. In debug mode we also
1210 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001211 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001212
1213 if (destination.IsRegister() || destination.IsFpuRegister()) {
1214 if (unspecified_type) {
1215 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1216 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001217 (src_cst != nullptr && (src_cst->IsIntConstant()
1218 || src_cst->IsFloatConstant()
1219 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001220 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001221 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001222 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001223 // If the source is a double stack slot or a 64bit constant, a 64bit
1224 // type is appropriate. Else the source is a register, and since the
1225 // type has not been specified, we chose a 64bit type to force a 64bit
1226 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001227 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001228 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001229 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001230 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1231 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1232 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001233 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1234 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1235 __ Ldr(dst, StackOperandFrom(source));
1236 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001237 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001238 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001239 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001240 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001241 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001242 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001243 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001244 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1245 ? Primitive::kPrimLong
1246 : Primitive::kPrimInt;
1247 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1248 }
1249 } else {
1250 DCHECK(source.IsFpuRegister());
1251 if (destination.IsRegister()) {
1252 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1253 ? Primitive::kPrimDouble
1254 : Primitive::kPrimFloat;
1255 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1256 } else {
1257 DCHECK(destination.IsFpuRegister());
1258 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001259 }
1260 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001261 } else { // The destination is not a register. It must be a stack slot.
1262 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1263 if (source.IsRegister() || source.IsFpuRegister()) {
1264 if (unspecified_type) {
1265 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001266 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001267 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001268 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001269 }
1270 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001271 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1272 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1273 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001274 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001275 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1276 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001277 UseScratchRegisterScope temps(GetVIXLAssembler());
1278 HConstant* src_cst = source.GetConstant();
1279 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001280 if (src_cst->IsZeroBitPattern()) {
1281 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant()) ? xzr : wzr;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001282 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001283 if (src_cst->IsIntConstant()) {
1284 temp = temps.AcquireW();
1285 } else if (src_cst->IsLongConstant()) {
1286 temp = temps.AcquireX();
1287 } else if (src_cst->IsFloatConstant()) {
1288 temp = temps.AcquireS();
1289 } else {
1290 DCHECK(src_cst->IsDoubleConstant());
1291 temp = temps.AcquireD();
1292 }
1293 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001294 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001295 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001296 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001297 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001298 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001299 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001300 // There is generally less pressure on FP registers.
1301 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001302 __ Ldr(temp, StackOperandFrom(source));
1303 __ Str(temp, StackOperandFrom(destination));
1304 }
1305 }
1306}
1307
1308void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001309 CPURegister dst,
1310 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001311 switch (type) {
1312 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001313 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001314 break;
1315 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001316 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001317 break;
1318 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001319 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001320 break;
1321 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001322 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001323 break;
1324 case Primitive::kPrimInt:
1325 case Primitive::kPrimNot:
1326 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001327 case Primitive::kPrimFloat:
1328 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001329 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001330 __ Ldr(dst, src);
1331 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001332 case Primitive::kPrimVoid:
1333 LOG(FATAL) << "Unreachable type " << type;
1334 }
1335}
1336
Calin Juravle77520bc2015-01-12 18:45:46 +00001337void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001338 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001339 const MemOperand& src,
1340 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001341 MacroAssembler* masm = GetVIXLAssembler();
1342 BlockPoolsScope block_pools(masm);
1343 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001344 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001345 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001346
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001347 DCHECK(!src.IsPreIndex());
1348 DCHECK(!src.IsPostIndex());
1349
1350 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001351 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001352 MemOperand base = MemOperand(temp_base);
1353 switch (type) {
1354 case Primitive::kPrimBoolean:
1355 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001356 if (needs_null_check) {
1357 MaybeRecordImplicitNullCheck(instruction);
1358 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001359 break;
1360 case Primitive::kPrimByte:
1361 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001362 if (needs_null_check) {
1363 MaybeRecordImplicitNullCheck(instruction);
1364 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001365 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1366 break;
1367 case Primitive::kPrimChar:
1368 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001369 if (needs_null_check) {
1370 MaybeRecordImplicitNullCheck(instruction);
1371 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001372 break;
1373 case Primitive::kPrimShort:
1374 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001375 if (needs_null_check) {
1376 MaybeRecordImplicitNullCheck(instruction);
1377 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001378 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1379 break;
1380 case Primitive::kPrimInt:
1381 case Primitive::kPrimNot:
1382 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001383 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001384 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001385 if (needs_null_check) {
1386 MaybeRecordImplicitNullCheck(instruction);
1387 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001388 break;
1389 case Primitive::kPrimFloat:
1390 case Primitive::kPrimDouble: {
1391 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001392 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001393
1394 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1395 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001396 if (needs_null_check) {
1397 MaybeRecordImplicitNullCheck(instruction);
1398 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001399 __ Fmov(FPRegister(dst), temp);
1400 break;
1401 }
1402 case Primitive::kPrimVoid:
1403 LOG(FATAL) << "Unreachable type " << type;
1404 }
1405}
1406
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001407void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001408 CPURegister src,
1409 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001410 switch (type) {
1411 case Primitive::kPrimBoolean:
1412 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001413 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001414 break;
1415 case Primitive::kPrimChar:
1416 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001417 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001418 break;
1419 case Primitive::kPrimInt:
1420 case Primitive::kPrimNot:
1421 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001422 case Primitive::kPrimFloat:
1423 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001424 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001425 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001426 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001427 case Primitive::kPrimVoid:
1428 LOG(FATAL) << "Unreachable type " << type;
1429 }
1430}
1431
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001432void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1433 CPURegister src,
1434 const MemOperand& dst) {
1435 UseScratchRegisterScope temps(GetVIXLAssembler());
1436 Register temp_base = temps.AcquireX();
1437
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001438 DCHECK(!dst.IsPreIndex());
1439 DCHECK(!dst.IsPostIndex());
1440
1441 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001442 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001443 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001444 MemOperand base = MemOperand(temp_base);
1445 switch (type) {
1446 case Primitive::kPrimBoolean:
1447 case Primitive::kPrimByte:
1448 __ Stlrb(Register(src), base);
1449 break;
1450 case Primitive::kPrimChar:
1451 case Primitive::kPrimShort:
1452 __ Stlrh(Register(src), base);
1453 break;
1454 case Primitive::kPrimInt:
1455 case Primitive::kPrimNot:
1456 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001457 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001458 __ Stlr(Register(src), base);
1459 break;
1460 case Primitive::kPrimFloat:
1461 case Primitive::kPrimDouble: {
Alexandre Rames542361f2015-01-29 16:57:31 +00001462 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001463 Register temp_src;
1464 if (src.IsZero()) {
1465 // The zero register is used to avoid synthesizing zero constants.
1466 temp_src = Register(src);
1467 } else {
1468 DCHECK(src.IsFPRegister());
1469 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1470 __ Fmov(temp_src, FPRegister(src));
1471 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001472
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001473 __ Stlr(temp_src, base);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001474 break;
1475 }
1476 case Primitive::kPrimVoid:
1477 LOG(FATAL) << "Unreachable type " << type;
1478 }
1479}
1480
Calin Juravle175dc732015-08-25 15:42:32 +01001481void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1482 HInstruction* instruction,
1483 uint32_t dex_pc,
1484 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001485 ValidateInvokeRuntime(instruction, slow_path);
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001486 GenerateInvokeRuntime(GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value());
Roland Levillain896e32d2015-05-05 18:07:10 +01001487 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001488}
1489
Roland Levillaindec8f632016-07-22 17:10:06 +01001490void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1491 HInstruction* instruction,
1492 SlowPathCode* slow_path) {
1493 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001494 GenerateInvokeRuntime(entry_point_offset);
1495}
1496
1497void CodeGeneratorARM64::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001498 BlockPoolsScope block_pools(GetVIXLAssembler());
1499 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1500 __ Blr(lr);
1501}
1502
Alexandre Rames67555f72014-11-18 10:55:16 +00001503void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001504 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001505 UseScratchRegisterScope temps(GetVIXLAssembler());
1506 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001507 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1508
Serban Constantinescu02164b32014-11-13 14:05:07 +00001509 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001510 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1511 __ Add(temp, class_reg, status_offset);
1512 __ Ldar(temp, HeapOperand(temp));
1513 __ Cmp(temp, mirror::Class::kStatusInitialized);
1514 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001515 __ Bind(slow_path->GetExitLabel());
1516}
Alexandre Rames5319def2014-10-23 10:03:10 +01001517
Roland Levillain44015862016-01-22 11:47:17 +00001518void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001519 BarrierType type = BarrierAll;
1520
1521 switch (kind) {
1522 case MemBarrierKind::kAnyAny:
1523 case MemBarrierKind::kAnyStore: {
1524 type = BarrierAll;
1525 break;
1526 }
1527 case MemBarrierKind::kLoadAny: {
1528 type = BarrierReads;
1529 break;
1530 }
1531 case MemBarrierKind::kStoreStore: {
1532 type = BarrierWrites;
1533 break;
1534 }
1535 default:
1536 LOG(FATAL) << "Unexpected memory barrier " << kind;
1537 }
1538 __ Dmb(InnerShareable, type);
1539}
1540
Serban Constantinescu02164b32014-11-13 14:05:07 +00001541void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1542 HBasicBlock* successor) {
1543 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001544 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1545 if (slow_path == nullptr) {
1546 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1547 instruction->SetSlowPath(slow_path);
1548 codegen_->AddSlowPath(slow_path);
1549 if (successor != nullptr) {
1550 DCHECK(successor->IsLoopHeader());
1551 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1552 }
1553 } else {
1554 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1555 }
1556
Serban Constantinescu02164b32014-11-13 14:05:07 +00001557 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1558 Register temp = temps.AcquireW();
1559
Andreas Gampe542451c2016-07-26 09:02:02 -07001560 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001561 if (successor == nullptr) {
1562 __ Cbnz(temp, slow_path->GetEntryLabel());
1563 __ Bind(slow_path->GetReturnLabel());
1564 } else {
1565 __ Cbz(temp, codegen_->GetLabelOf(successor));
1566 __ B(slow_path->GetEntryLabel());
1567 // slow_path will return to GetLabelOf(successor).
1568 }
1569}
1570
Alexandre Rames5319def2014-10-23 10:03:10 +01001571InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1572 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001573 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001574 assembler_(codegen->GetAssembler()),
1575 codegen_(codegen) {}
1576
1577#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001578 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001579
1580#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1581
1582enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001583 // Using a base helps identify when we hit such breakpoints.
1584 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001585#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1586 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1587#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1588};
1589
1590#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001591 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001592 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1593 } \
1594 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1595 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1596 locations->SetOut(Location::Any()); \
1597 }
1598 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1599#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1600
1601#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001602#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001603
Alexandre Rames67555f72014-11-18 10:55:16 +00001604void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001605 DCHECK_EQ(instr->InputCount(), 2U);
1606 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1607 Primitive::Type type = instr->GetResultType();
1608 switch (type) {
1609 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001610 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001611 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001612 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001613 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001614 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001615
1616 case Primitive::kPrimFloat:
1617 case Primitive::kPrimDouble:
1618 locations->SetInAt(0, Location::RequiresFpuRegister());
1619 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001620 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001621 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001622
Alexandre Rames5319def2014-10-23 10:03:10 +01001623 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001624 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001625 }
1626}
1627
Alexandre Rames09a99962015-04-15 11:47:56 +01001628void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001629 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1630
1631 bool object_field_get_with_read_barrier =
1632 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001633 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001634 new (GetGraph()->GetArena()) LocationSummary(instruction,
1635 object_field_get_with_read_barrier ?
1636 LocationSummary::kCallOnSlowPath :
1637 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001638 locations->SetInAt(0, Location::RequiresRegister());
1639 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1640 locations->SetOut(Location::RequiresFpuRegister());
1641 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001642 // The output overlaps for an object field get when read barriers
1643 // are enabled: we do not want the load to overwrite the object's
1644 // location, as we need it to emit the read barrier.
1645 locations->SetOut(
1646 Location::RequiresRegister(),
1647 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001648 }
1649}
1650
1651void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1652 const FieldInfo& field_info) {
1653 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001654 LocationSummary* locations = instruction->GetLocations();
1655 Location base_loc = locations->InAt(0);
1656 Location out = locations->Out();
1657 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001658 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001659 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001660 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001661
Roland Levillain44015862016-01-22 11:47:17 +00001662 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1663 // Object FieldGet with Baker's read barrier case.
1664 MacroAssembler* masm = GetVIXLAssembler();
1665 UseScratchRegisterScope temps(masm);
1666 // /* HeapReference<Object> */ out = *(base + offset)
1667 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1668 Register temp = temps.AcquireW();
1669 // Note that potential implicit null checks are handled in this
1670 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1671 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1672 instruction,
1673 out,
1674 base,
1675 offset,
1676 temp,
1677 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001678 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001679 } else {
1680 // General case.
1681 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001682 // Note that a potential implicit null check is handled in this
1683 // CodeGeneratorARM64::LoadAcquire call.
1684 // NB: LoadAcquire will record the pc info if needed.
1685 codegen_->LoadAcquire(
1686 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001687 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001688 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001689 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001690 }
Roland Levillain44015862016-01-22 11:47:17 +00001691 if (field_type == Primitive::kPrimNot) {
1692 // If read barriers are enabled, emit read barriers other than
1693 // Baker's using a slow path (and also unpoison the loaded
1694 // reference, if heap poisoning is enabled).
1695 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1696 }
Roland Levillain4d027112015-07-01 15:41:14 +01001697 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001698}
1699
1700void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1701 LocationSummary* locations =
1702 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1703 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001704 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
1705 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
1706 } else if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001707 locations->SetInAt(1, Location::RequiresFpuRegister());
1708 } else {
1709 locations->SetInAt(1, Location::RequiresRegister());
1710 }
1711}
1712
1713void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001714 const FieldInfo& field_info,
1715 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001716 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001717 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001718
1719 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001720 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001721 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001722 Offset offset = field_info.GetFieldOffset();
1723 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001724
Roland Levillain4d027112015-07-01 15:41:14 +01001725 {
1726 // We use a block to end the scratch scope before the write barrier, thus
1727 // freeing the temporary registers so they can be used in `MarkGCCard`.
1728 UseScratchRegisterScope temps(GetVIXLAssembler());
1729
1730 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1731 DCHECK(value.IsW());
1732 Register temp = temps.AcquireW();
1733 __ Mov(temp, value.W());
1734 GetAssembler()->PoisonHeapReference(temp.W());
1735 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001736 }
Roland Levillain4d027112015-07-01 15:41:14 +01001737
1738 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001739 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1740 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001741 } else {
1742 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1743 codegen_->MaybeRecordImplicitNullCheck(instruction);
1744 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001745 }
1746
1747 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001748 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001749 }
1750}
1751
Alexandre Rames67555f72014-11-18 10:55:16 +00001752void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001753 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001754
1755 switch (type) {
1756 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001757 case Primitive::kPrimLong: {
1758 Register dst = OutputRegister(instr);
1759 Register lhs = InputRegisterAt(instr, 0);
1760 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001761 if (instr->IsAdd()) {
1762 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001763 } else if (instr->IsAnd()) {
1764 __ And(dst, lhs, rhs);
1765 } else if (instr->IsOr()) {
1766 __ Orr(dst, lhs, rhs);
1767 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001768 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001769 } else if (instr->IsRor()) {
1770 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001771 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001772 __ Ror(dst, lhs, shift);
1773 } else {
1774 // Ensure shift distance is in the same size register as the result. If
1775 // we are rotating a long and the shift comes in a w register originally,
1776 // we don't need to sxtw for use as an x since the shift distances are
1777 // all & reg_bits - 1.
1778 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1779 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001780 } else {
1781 DCHECK(instr->IsXor());
1782 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001783 }
1784 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001785 }
1786 case Primitive::kPrimFloat:
1787 case Primitive::kPrimDouble: {
1788 FPRegister dst = OutputFPRegister(instr);
1789 FPRegister lhs = InputFPRegisterAt(instr, 0);
1790 FPRegister rhs = InputFPRegisterAt(instr, 1);
1791 if (instr->IsAdd()) {
1792 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001793 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001794 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001795 } else {
1796 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001797 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001798 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001799 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001800 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001801 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001802 }
1803}
1804
Serban Constantinescu02164b32014-11-13 14:05:07 +00001805void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1806 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1807
1808 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1809 Primitive::Type type = instr->GetResultType();
1810 switch (type) {
1811 case Primitive::kPrimInt:
1812 case Primitive::kPrimLong: {
1813 locations->SetInAt(0, Location::RequiresRegister());
1814 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1815 locations->SetOut(Location::RequiresRegister());
1816 break;
1817 }
1818 default:
1819 LOG(FATAL) << "Unexpected shift type " << type;
1820 }
1821}
1822
1823void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1824 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1825
1826 Primitive::Type type = instr->GetType();
1827 switch (type) {
1828 case Primitive::kPrimInt:
1829 case Primitive::kPrimLong: {
1830 Register dst = OutputRegister(instr);
1831 Register lhs = InputRegisterAt(instr, 0);
1832 Operand rhs = InputOperandAt(instr, 1);
1833 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001834 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00001835 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001836 if (instr->IsShl()) {
1837 __ Lsl(dst, lhs, shift_value);
1838 } else if (instr->IsShr()) {
1839 __ Asr(dst, lhs, shift_value);
1840 } else {
1841 __ Lsr(dst, lhs, shift_value);
1842 }
1843 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001844 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001845
1846 if (instr->IsShl()) {
1847 __ Lsl(dst, lhs, rhs_reg);
1848 } else if (instr->IsShr()) {
1849 __ Asr(dst, lhs, rhs_reg);
1850 } else {
1851 __ Lsr(dst, lhs, rhs_reg);
1852 }
1853 }
1854 break;
1855 }
1856 default:
1857 LOG(FATAL) << "Unexpected shift operation type " << type;
1858 }
1859}
1860
Alexandre Rames5319def2014-10-23 10:03:10 +01001861void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001862 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001863}
1864
1865void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001866 HandleBinaryOp(instruction);
1867}
1868
1869void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1870 HandleBinaryOp(instruction);
1871}
1872
1873void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1874 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001875}
1876
Artem Serov7fc63502016-02-09 17:15:29 +00001877void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001878 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1879 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1880 locations->SetInAt(0, Location::RequiresRegister());
1881 // There is no immediate variant of negated bitwise instructions in AArch64.
1882 locations->SetInAt(1, Location::RequiresRegister());
1883 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1884}
1885
Artem Serov7fc63502016-02-09 17:15:29 +00001886void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001887 Register dst = OutputRegister(instr);
1888 Register lhs = InputRegisterAt(instr, 0);
1889 Register rhs = InputRegisterAt(instr, 1);
1890
1891 switch (instr->GetOpKind()) {
1892 case HInstruction::kAnd:
1893 __ Bic(dst, lhs, rhs);
1894 break;
1895 case HInstruction::kOr:
1896 __ Orn(dst, lhs, rhs);
1897 break;
1898 case HInstruction::kXor:
1899 __ Eon(dst, lhs, rhs);
1900 break;
1901 default:
1902 LOG(FATAL) << "Unreachable";
1903 }
1904}
1905
Alexandre Rames8626b742015-11-25 16:28:08 +00001906void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1907 HArm64DataProcWithShifterOp* instruction) {
1908 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1909 instruction->GetType() == Primitive::kPrimLong);
1910 LocationSummary* locations =
1911 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1912 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1913 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1914 } else {
1915 locations->SetInAt(0, Location::RequiresRegister());
1916 }
1917 locations->SetInAt(1, Location::RequiresRegister());
1918 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1919}
1920
1921void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1922 HArm64DataProcWithShifterOp* instruction) {
1923 Primitive::Type type = instruction->GetType();
1924 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1925 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1926 Register out = OutputRegister(instruction);
1927 Register left;
1928 if (kind != HInstruction::kNeg) {
1929 left = InputRegisterAt(instruction, 0);
1930 }
1931 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1932 // shifter operand operation, the IR generating `right_reg` (input to the type
1933 // conversion) can have a different type from the current instruction's type,
1934 // so we manually indicate the type.
1935 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001936 int64_t shift_amount = instruction->GetShiftAmount() &
1937 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001938
1939 Operand right_operand(0);
1940
1941 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1942 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1943 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1944 } else {
1945 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1946 }
1947
1948 // Logical binary operations do not support extension operations in the
1949 // operand. Note that VIXL would still manage if it was passed by generating
1950 // the extension as a separate instruction.
1951 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1952 DCHECK(!right_operand.IsExtendedRegister() ||
1953 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1954 kind != HInstruction::kNeg));
1955 switch (kind) {
1956 case HInstruction::kAdd:
1957 __ Add(out, left, right_operand);
1958 break;
1959 case HInstruction::kAnd:
1960 __ And(out, left, right_operand);
1961 break;
1962 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001963 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001964 __ Neg(out, right_operand);
1965 break;
1966 case HInstruction::kOr:
1967 __ Orr(out, left, right_operand);
1968 break;
1969 case HInstruction::kSub:
1970 __ Sub(out, left, right_operand);
1971 break;
1972 case HInstruction::kXor:
1973 __ Eor(out, left, right_operand);
1974 break;
1975 default:
1976 LOG(FATAL) << "Unexpected operation kind: " << kind;
1977 UNREACHABLE();
1978 }
1979}
1980
Artem Serov328429f2016-07-06 16:23:04 +01001981void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00001982 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
1983 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001984 LocationSummary* locations =
1985 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1986 locations->SetInAt(0, Location::RequiresRegister());
1987 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1988 locations->SetOut(Location::RequiresRegister());
1989}
1990
Roland Levillain4a3aa572016-08-15 13:17:06 +00001991void InstructionCodeGeneratorARM64::VisitIntermediateAddress(
1992 HIntermediateAddress* instruction) {
1993 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
1994 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001995 __ Add(OutputRegister(instruction),
1996 InputRegisterAt(instruction, 0),
1997 Operand(InputOperandAt(instruction, 1)));
1998}
1999
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002000void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002001 LocationSummary* locations =
2002 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002003 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2004 if (instr->GetOpKind() == HInstruction::kSub &&
2005 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002006 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002007 // Don't allocate register for Mneg instruction.
2008 } else {
2009 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2010 Location::RequiresRegister());
2011 }
2012 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2013 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002014 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2015}
2016
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002017void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002018 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002019 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2020 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002021
2022 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2023 // This fixup should be carried out for all multiply-accumulate instructions:
2024 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2025 if (instr->GetType() == Primitive::kPrimLong &&
2026 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2027 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002028 vixl::aarch64::Instruction* prev =
2029 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002030 if (prev->IsLoadOrStore()) {
2031 // Make sure we emit only exactly one nop.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002032 vixl::aarch64::CodeBufferCheckScope scope(masm,
2033 kInstructionSize,
2034 vixl::aarch64::CodeBufferCheckScope::kCheck,
2035 vixl::aarch64::CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002036 __ nop();
2037 }
2038 }
2039
2040 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002041 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002042 __ Madd(res, mul_left, mul_right, accumulator);
2043 } else {
2044 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002045 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002046 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002047 __ Mneg(res, mul_left, mul_right);
2048 } else {
2049 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2050 __ Msub(res, mul_left, mul_right, accumulator);
2051 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002052 }
2053}
2054
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002055void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002056 bool object_array_get_with_read_barrier =
2057 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002058 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002059 new (GetGraph()->GetArena()) LocationSummary(instruction,
2060 object_array_get_with_read_barrier ?
2061 LocationSummary::kCallOnSlowPath :
2062 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002063 locations->SetInAt(0, Location::RequiresRegister());
2064 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002065 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2066 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2067 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002068 // The output overlaps in the case of an object array get with
2069 // read barriers enabled: we do not want the move to overwrite the
2070 // array's location, as we need it to emit the read barrier.
2071 locations->SetOut(
2072 Location::RequiresRegister(),
2073 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002074 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002075}
2076
2077void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002078 Primitive::Type type = instruction->GetType();
2079 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002080 LocationSummary* locations = instruction->GetLocations();
2081 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002082 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002083 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002084
Alexandre Ramesd921d642015-04-16 15:07:16 +01002085 MacroAssembler* masm = GetVIXLAssembler();
2086 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002087 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002088 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002089
Roland Levillain44015862016-01-22 11:47:17 +00002090 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2091 // Object ArrayGet with Baker's read barrier case.
2092 Register temp = temps.AcquireW();
Roland Levillain4a3aa572016-08-15 13:17:06 +00002093 // The read barrier instrumentation does not support the HIntermediateAddress instruction yet.
2094 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Roland Levillain44015862016-01-22 11:47:17 +00002095 // Note that a potential implicit null check is handled in the
2096 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2097 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2098 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002099 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002100 // General case.
2101 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002102 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002103 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2104 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002105 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002106 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002107 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002108 // The read barrier instrumentation does not support the
2109 // HIntermediateAddress instruction yet.
2110 DCHECK(!kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00002111 // We do not need to compute the intermediate address from the array: the
2112 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002113 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002114 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002115 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002116 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2117 }
2118 temp = obj;
2119 } else {
2120 __ Add(temp, obj, offset);
2121 }
2122 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2123 }
2124
2125 codegen_->Load(type, OutputCPURegister(instruction), source);
2126 codegen_->MaybeRecordImplicitNullCheck(instruction);
2127
2128 if (type == Primitive::kPrimNot) {
2129 static_assert(
2130 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2131 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2132 Location obj_loc = locations->InAt(0);
2133 if (index.IsConstant()) {
2134 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2135 } else {
2136 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2137 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002138 }
Roland Levillain4d027112015-07-01 15:41:14 +01002139 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002140}
2141
Alexandre Rames5319def2014-10-23 10:03:10 +01002142void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2143 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2144 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002145 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002146}
2147
2148void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002149 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002150 BlockPoolsScope block_pools(GetVIXLAssembler());
Vladimir Markodce016e2016-04-28 13:10:02 +01002151 __ Ldr(OutputRegister(instruction), HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002152 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002153}
2154
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002155void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002156 Primitive::Type value_type = instruction->GetComponentType();
2157
2158 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002159 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2160 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002161 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002162 LocationSummary::kCallOnSlowPath :
2163 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002164 locations->SetInAt(0, Location::RequiresRegister());
2165 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002166 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2167 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2168 } else if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002169 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002170 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002171 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002172 }
2173}
2174
2175void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2176 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002177 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002178 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002179 bool needs_write_barrier =
2180 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002181
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002182 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002183 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002184 CPURegister source = value;
2185 Location index = locations->InAt(1);
2186 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2187 MemOperand destination = HeapOperand(array);
2188 MacroAssembler* masm = GetVIXLAssembler();
2189 BlockPoolsScope block_pools(masm);
2190
2191 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002192 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002193 if (index.IsConstant()) {
2194 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2195 destination = HeapOperand(array, offset);
2196 } else {
2197 UseScratchRegisterScope temps(masm);
2198 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002199 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002200 // The read barrier instrumentation does not support the
2201 // HIntermediateAddress instruction yet.
2202 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002203 // We do not need to compute the intermediate address from the array: the
2204 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002205 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002206 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002207 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002208 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2209 }
2210 temp = array;
2211 } else {
2212 __ Add(temp, array, offset);
2213 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002214 destination = HeapOperand(temp,
2215 XRegisterFrom(index),
2216 LSL,
2217 Primitive::ComponentSizeShift(value_type));
2218 }
2219 codegen_->Store(value_type, value, destination);
2220 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002221 } else {
Roland Levillain4a3aa572016-08-15 13:17:06 +00002222 DCHECK(needs_write_barrier);
Artem Serov328429f2016-07-06 16:23:04 +01002223 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002224 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002225 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002226 {
2227 // We use a block to end the scratch scope before the write barrier, thus
2228 // freeing the temporary registers so they can be used in `MarkGCCard`.
2229 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002230 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002231 if (index.IsConstant()) {
2232 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002233 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002234 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002235 destination = HeapOperand(temp,
2236 XRegisterFrom(index),
2237 LSL,
2238 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002239 }
2240
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002241 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2242 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2243 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2244
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002245 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002246 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2247 codegen_->AddSlowPath(slow_path);
2248 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002249 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002250 __ Cbnz(Register(value), &non_zero);
2251 if (!index.IsConstant()) {
2252 __ Add(temp, array, offset);
2253 }
2254 __ Str(wzr, destination);
2255 codegen_->MaybeRecordImplicitNullCheck(instruction);
2256 __ B(&done);
2257 __ Bind(&non_zero);
2258 }
2259
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002260 if (kEmitCompilerReadBarrier) {
2261 // When read barriers are enabled, the type checking
2262 // instrumentation requires two read barriers:
2263 //
2264 // __ Mov(temp2, temp);
2265 // // /* HeapReference<Class> */ temp = temp->component_type_
2266 // __ Ldr(temp, HeapOperand(temp, component_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002267 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002268 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2269 //
2270 // // /* HeapReference<Class> */ temp2 = value->klass_
2271 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002272 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002273 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2274 //
2275 // __ Cmp(temp, temp2);
2276 //
2277 // However, the second read barrier may trash `temp`, as it
2278 // is a temporary register, and as such would not be saved
2279 // along with live registers before calling the runtime (nor
2280 // restored afterwards). So in this case, we bail out and
2281 // delegate the work to the array set slow path.
2282 //
2283 // TODO: Extend the register allocator to support a new
2284 // "(locally) live temp" location so as to avoid always
2285 // going into the slow path when read barriers are enabled.
2286 __ B(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002287 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002288 Register temp2 = temps.AcquireSameSizeAs(array);
2289 // /* HeapReference<Class> */ temp = array->klass_
2290 __ Ldr(temp, HeapOperand(array, class_offset));
2291 codegen_->MaybeRecordImplicitNullCheck(instruction);
2292 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2293
2294 // /* HeapReference<Class> */ temp = temp->component_type_
2295 __ Ldr(temp, HeapOperand(temp, component_offset));
2296 // /* HeapReference<Class> */ temp2 = value->klass_
2297 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2298 // If heap poisoning is enabled, no need to unpoison `temp`
2299 // nor `temp2`, as we are comparing two poisoned references.
2300 __ Cmp(temp, temp2);
2301
2302 if (instruction->StaticTypeOfArrayIsObjectArray()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002303 vixl::aarch64::Label do_put;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002304 __ B(eq, &do_put);
2305 // If heap poisoning is enabled, the `temp` reference has
2306 // not been unpoisoned yet; unpoison it now.
2307 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2308
2309 // /* HeapReference<Class> */ temp = temp->super_class_
2310 __ Ldr(temp, HeapOperand(temp, super_offset));
2311 // If heap poisoning is enabled, no need to unpoison
2312 // `temp`, as we are comparing against null below.
2313 __ Cbnz(temp, slow_path->GetEntryLabel());
2314 __ Bind(&do_put);
2315 } else {
2316 __ B(ne, slow_path->GetEntryLabel());
2317 }
2318 temps.Release(temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002319 }
2320 }
2321
2322 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002323 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002324 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002325 __ Mov(temp2, value.W());
2326 GetAssembler()->PoisonHeapReference(temp2);
2327 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002328 }
2329
2330 if (!index.IsConstant()) {
2331 __ Add(temp, array, offset);
2332 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002333 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002334
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002335 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002336 codegen_->MaybeRecordImplicitNullCheck(instruction);
2337 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002338 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002339
2340 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2341
2342 if (done.IsLinked()) {
2343 __ Bind(&done);
2344 }
2345
2346 if (slow_path != nullptr) {
2347 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002348 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002349 }
2350}
2351
Alexandre Rames67555f72014-11-18 10:55:16 +00002352void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002353 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2354 ? LocationSummary::kCallOnSlowPath
2355 : LocationSummary::kNoCall;
2356 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002357 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002358 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002359 if (instruction->HasUses()) {
2360 locations->SetOut(Location::SameAsFirstInput());
2361 }
2362}
2363
2364void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002365 BoundsCheckSlowPathARM64* slow_path =
2366 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002367 codegen_->AddSlowPath(slow_path);
2368
2369 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2370 __ B(slow_path->GetEntryLabel(), hs);
2371}
2372
Alexandre Rames67555f72014-11-18 10:55:16 +00002373void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2374 LocationSummary* locations =
2375 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2376 locations->SetInAt(0, Location::RequiresRegister());
2377 if (check->HasUses()) {
2378 locations->SetOut(Location::SameAsFirstInput());
2379 }
2380}
2381
2382void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2383 // We assume the class is not null.
2384 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2385 check->GetLoadClass(), check, check->GetDexPc(), true);
2386 codegen_->AddSlowPath(slow_path);
2387 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2388}
2389
Roland Levillain1a653882016-03-18 18:05:57 +00002390static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2391 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2392 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2393}
2394
2395void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2396 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2397 Location rhs_loc = instruction->GetLocations()->InAt(1);
2398 if (rhs_loc.IsConstant()) {
2399 // 0.0 is the only immediate that can be encoded directly in
2400 // an FCMP instruction.
2401 //
2402 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2403 // specify that in a floating-point comparison, positive zero
2404 // and negative zero are considered equal, so we can use the
2405 // literal 0.0 for both cases here.
2406 //
2407 // Note however that some methods (Float.equal, Float.compare,
2408 // Float.compareTo, Double.equal, Double.compare,
2409 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2410 // StrictMath.min) consider 0.0 to be (strictly) greater than
2411 // -0.0. So if we ever translate calls to these methods into a
2412 // HCompare instruction, we must handle the -0.0 case with
2413 // care here.
2414 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2415 __ Fcmp(lhs_reg, 0.0);
2416 } else {
2417 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2418 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002419}
2420
Serban Constantinescu02164b32014-11-13 14:05:07 +00002421void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002422 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002423 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2424 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002425 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002426 case Primitive::kPrimBoolean:
2427 case Primitive::kPrimByte:
2428 case Primitive::kPrimShort:
2429 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002430 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002431 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002432 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002433 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002434 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2435 break;
2436 }
2437 case Primitive::kPrimFloat:
2438 case Primitive::kPrimDouble: {
2439 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002440 locations->SetInAt(1,
2441 IsFloatingPointZeroConstant(compare->InputAt(1))
2442 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2443 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002444 locations->SetOut(Location::RequiresRegister());
2445 break;
2446 }
2447 default:
2448 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2449 }
2450}
2451
2452void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2453 Primitive::Type in_type = compare->InputAt(0)->GetType();
2454
2455 // 0 if: left == right
2456 // 1 if: left > right
2457 // -1 if: left < right
2458 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002459 case Primitive::kPrimBoolean:
2460 case Primitive::kPrimByte:
2461 case Primitive::kPrimShort:
2462 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002463 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002464 case Primitive::kPrimLong: {
2465 Register result = OutputRegister(compare);
2466 Register left = InputRegisterAt(compare, 0);
2467 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002468 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002469 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2470 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002471 break;
2472 }
2473 case Primitive::kPrimFloat:
2474 case Primitive::kPrimDouble: {
2475 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002476 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002477 __ Cset(result, ne);
2478 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002479 break;
2480 }
2481 default:
2482 LOG(FATAL) << "Unimplemented compare type " << in_type;
2483 }
2484}
2485
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002486void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002487 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002488
2489 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2490 locations->SetInAt(0, Location::RequiresFpuRegister());
2491 locations->SetInAt(1,
2492 IsFloatingPointZeroConstant(instruction->InputAt(1))
2493 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2494 : Location::RequiresFpuRegister());
2495 } else {
2496 // Integer cases.
2497 locations->SetInAt(0, Location::RequiresRegister());
2498 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2499 }
2500
David Brazdilb3e773e2016-01-26 11:28:37 +00002501 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002502 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002503 }
2504}
2505
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002506void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002507 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002508 return;
2509 }
2510
2511 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002512 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002513 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002514
Roland Levillain7f63c522015-07-13 15:54:55 +00002515 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002516 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002517 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002518 } else {
2519 // Integer cases.
2520 Register lhs = InputRegisterAt(instruction, 0);
2521 Operand rhs = InputOperandAt(instruction, 1);
2522 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002523 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002524 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002525}
2526
2527#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2528 M(Equal) \
2529 M(NotEqual) \
2530 M(LessThan) \
2531 M(LessThanOrEqual) \
2532 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002533 M(GreaterThanOrEqual) \
2534 M(Below) \
2535 M(BelowOrEqual) \
2536 M(Above) \
2537 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002538#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002539void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2540void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002541FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002542#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002543#undef FOR_EACH_CONDITION_INSTRUCTION
2544
Zheng Xuc6667102015-05-15 16:08:45 +08002545void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2546 DCHECK(instruction->IsDiv() || instruction->IsRem());
2547
2548 LocationSummary* locations = instruction->GetLocations();
2549 Location second = locations->InAt(1);
2550 DCHECK(second.IsConstant());
2551
2552 Register out = OutputRegister(instruction);
2553 Register dividend = InputRegisterAt(instruction, 0);
2554 int64_t imm = Int64FromConstant(second.GetConstant());
2555 DCHECK(imm == 1 || imm == -1);
2556
2557 if (instruction->IsRem()) {
2558 __ Mov(out, 0);
2559 } else {
2560 if (imm == 1) {
2561 __ Mov(out, dividend);
2562 } else {
2563 __ Neg(out, dividend);
2564 }
2565 }
2566}
2567
2568void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2569 DCHECK(instruction->IsDiv() || instruction->IsRem());
2570
2571 LocationSummary* locations = instruction->GetLocations();
2572 Location second = locations->InAt(1);
2573 DCHECK(second.IsConstant());
2574
2575 Register out = OutputRegister(instruction);
2576 Register dividend = InputRegisterAt(instruction, 0);
2577 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002578 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002579 int ctz_imm = CTZ(abs_imm);
2580
2581 UseScratchRegisterScope temps(GetVIXLAssembler());
2582 Register temp = temps.AcquireSameSizeAs(out);
2583
2584 if (instruction->IsDiv()) {
2585 __ Add(temp, dividend, abs_imm - 1);
2586 __ Cmp(dividend, 0);
2587 __ Csel(out, temp, dividend, lt);
2588 if (imm > 0) {
2589 __ Asr(out, out, ctz_imm);
2590 } else {
2591 __ Neg(out, Operand(out, ASR, ctz_imm));
2592 }
2593 } else {
2594 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2595 __ Asr(temp, dividend, bits - 1);
2596 __ Lsr(temp, temp, bits - ctz_imm);
2597 __ Add(out, dividend, temp);
2598 __ And(out, out, abs_imm - 1);
2599 __ Sub(out, out, temp);
2600 }
2601}
2602
2603void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2604 DCHECK(instruction->IsDiv() || instruction->IsRem());
2605
2606 LocationSummary* locations = instruction->GetLocations();
2607 Location second = locations->InAt(1);
2608 DCHECK(second.IsConstant());
2609
2610 Register out = OutputRegister(instruction);
2611 Register dividend = InputRegisterAt(instruction, 0);
2612 int64_t imm = Int64FromConstant(second.GetConstant());
2613
2614 Primitive::Type type = instruction->GetResultType();
2615 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2616
2617 int64_t magic;
2618 int shift;
2619 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2620
2621 UseScratchRegisterScope temps(GetVIXLAssembler());
2622 Register temp = temps.AcquireSameSizeAs(out);
2623
2624 // temp = get_high(dividend * magic)
2625 __ Mov(temp, magic);
2626 if (type == Primitive::kPrimLong) {
2627 __ Smulh(temp, dividend, temp);
2628 } else {
2629 __ Smull(temp.X(), dividend, temp);
2630 __ Lsr(temp.X(), temp.X(), 32);
2631 }
2632
2633 if (imm > 0 && magic < 0) {
2634 __ Add(temp, temp, dividend);
2635 } else if (imm < 0 && magic > 0) {
2636 __ Sub(temp, temp, dividend);
2637 }
2638
2639 if (shift != 0) {
2640 __ Asr(temp, temp, shift);
2641 }
2642
2643 if (instruction->IsDiv()) {
2644 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2645 } else {
2646 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2647 // TODO: Strength reduction for msub.
2648 Register temp_imm = temps.AcquireSameSizeAs(out);
2649 __ Mov(temp_imm, imm);
2650 __ Msub(out, temp, temp_imm, dividend);
2651 }
2652}
2653
2654void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2655 DCHECK(instruction->IsDiv() || instruction->IsRem());
2656 Primitive::Type type = instruction->GetResultType();
2657 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2658
2659 LocationSummary* locations = instruction->GetLocations();
2660 Register out = OutputRegister(instruction);
2661 Location second = locations->InAt(1);
2662
2663 if (second.IsConstant()) {
2664 int64_t imm = Int64FromConstant(second.GetConstant());
2665
2666 if (imm == 0) {
2667 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2668 } else if (imm == 1 || imm == -1) {
2669 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002670 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002671 DivRemByPowerOfTwo(instruction);
2672 } else {
2673 DCHECK(imm <= -2 || imm >= 2);
2674 GenerateDivRemWithAnyConstant(instruction);
2675 }
2676 } else {
2677 Register dividend = InputRegisterAt(instruction, 0);
2678 Register divisor = InputRegisterAt(instruction, 1);
2679 if (instruction->IsDiv()) {
2680 __ Sdiv(out, dividend, divisor);
2681 } else {
2682 UseScratchRegisterScope temps(GetVIXLAssembler());
2683 Register temp = temps.AcquireSameSizeAs(out);
2684 __ Sdiv(temp, dividend, divisor);
2685 __ Msub(out, temp, divisor, dividend);
2686 }
2687 }
2688}
2689
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002690void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2691 LocationSummary* locations =
2692 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2693 switch (div->GetResultType()) {
2694 case Primitive::kPrimInt:
2695 case Primitive::kPrimLong:
2696 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002697 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002698 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2699 break;
2700
2701 case Primitive::kPrimFloat:
2702 case Primitive::kPrimDouble:
2703 locations->SetInAt(0, Location::RequiresFpuRegister());
2704 locations->SetInAt(1, Location::RequiresFpuRegister());
2705 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2706 break;
2707
2708 default:
2709 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2710 }
2711}
2712
2713void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2714 Primitive::Type type = div->GetResultType();
2715 switch (type) {
2716 case Primitive::kPrimInt:
2717 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002718 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002719 break;
2720
2721 case Primitive::kPrimFloat:
2722 case Primitive::kPrimDouble:
2723 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2724 break;
2725
2726 default:
2727 LOG(FATAL) << "Unexpected div type " << type;
2728 }
2729}
2730
Alexandre Rames67555f72014-11-18 10:55:16 +00002731void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002732 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2733 ? LocationSummary::kCallOnSlowPath
2734 : LocationSummary::kNoCall;
2735 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002736 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2737 if (instruction->HasUses()) {
2738 locations->SetOut(Location::SameAsFirstInput());
2739 }
2740}
2741
2742void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2743 SlowPathCodeARM64* slow_path =
2744 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2745 codegen_->AddSlowPath(slow_path);
2746 Location value = instruction->GetLocations()->InAt(0);
2747
Alexandre Rames3e69f162014-12-10 10:36:50 +00002748 Primitive::Type type = instruction->GetType();
2749
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002750 if (!Primitive::IsIntegralType(type)) {
2751 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002752 return;
2753 }
2754
Alexandre Rames67555f72014-11-18 10:55:16 +00002755 if (value.IsConstant()) {
2756 int64_t divisor = Int64ConstantFrom(value);
2757 if (divisor == 0) {
2758 __ B(slow_path->GetEntryLabel());
2759 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002760 // A division by a non-null constant is valid. We don't need to perform
2761 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002762 }
2763 } else {
2764 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2765 }
2766}
2767
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002768void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2769 LocationSummary* locations =
2770 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2771 locations->SetOut(Location::ConstantLocation(constant));
2772}
2773
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002774void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2775 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002776 // Will be generated at use site.
2777}
2778
Alexandre Rames5319def2014-10-23 10:03:10 +01002779void LocationsBuilderARM64::VisitExit(HExit* exit) {
2780 exit->SetLocations(nullptr);
2781}
2782
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002783void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002784}
2785
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002786void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2787 LocationSummary* locations =
2788 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2789 locations->SetOut(Location::ConstantLocation(constant));
2790}
2791
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002792void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002793 // Will be generated at use site.
2794}
2795
David Brazdilfc6a86a2015-06-26 10:33:45 +00002796void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002797 DCHECK(!successor->IsExitBlock());
2798 HBasicBlock* block = got->GetBlock();
2799 HInstruction* previous = got->GetPrevious();
2800 HLoopInformation* info = block->GetLoopInformation();
2801
David Brazdil46e2a392015-03-16 17:31:52 +00002802 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002803 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2804 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2805 return;
2806 }
2807 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2808 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2809 }
2810 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002811 __ B(codegen_->GetLabelOf(successor));
2812 }
2813}
2814
David Brazdilfc6a86a2015-06-26 10:33:45 +00002815void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2816 got->SetLocations(nullptr);
2817}
2818
2819void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2820 HandleGoto(got, got->GetSuccessor());
2821}
2822
2823void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2824 try_boundary->SetLocations(nullptr);
2825}
2826
2827void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2828 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2829 if (!successor->IsExitBlock()) {
2830 HandleGoto(try_boundary, successor);
2831 }
2832}
2833
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002834void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002835 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002836 vixl::aarch64::Label* true_target,
2837 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002838 // FP branching requires both targets to be explicit. If either of the targets
2839 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002840 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002841 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002842
David Brazdil0debae72015-11-12 18:37:00 +00002843 if (true_target == nullptr && false_target == nullptr) {
2844 // Nothing to do. The code always falls through.
2845 return;
2846 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002847 // Constant condition, statically compared against "true" (integer value 1).
2848 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002849 if (true_target != nullptr) {
2850 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002851 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002852 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002853 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002854 if (false_target != nullptr) {
2855 __ B(false_target);
2856 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002857 }
David Brazdil0debae72015-11-12 18:37:00 +00002858 return;
2859 }
2860
2861 // The following code generates these patterns:
2862 // (1) true_target == nullptr && false_target != nullptr
2863 // - opposite condition true => branch to false_target
2864 // (2) true_target != nullptr && false_target == nullptr
2865 // - condition true => branch to true_target
2866 // (3) true_target != nullptr && false_target != nullptr
2867 // - condition true => branch to true_target
2868 // - branch to false_target
2869 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002870 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002871 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002872 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002873 if (true_target == nullptr) {
2874 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2875 } else {
2876 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2877 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002878 } else {
2879 // The condition instruction has not been materialized, use its inputs as
2880 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002881 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002882
David Brazdil0debae72015-11-12 18:37:00 +00002883 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002884 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002885 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002886 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002887 IfCondition opposite_condition = condition->GetOppositeCondition();
2888 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002889 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002890 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002891 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002892 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002893 // Integer cases.
2894 Register lhs = InputRegisterAt(condition, 0);
2895 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002896
2897 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01002898 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00002899 if (true_target == nullptr) {
2900 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2901 non_fallthrough_target = false_target;
2902 } else {
2903 arm64_cond = ARM64Condition(condition->GetCondition());
2904 non_fallthrough_target = true_target;
2905 }
2906
Aart Bik086d27e2016-01-20 17:02:00 -08002907 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01002908 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002909 switch (arm64_cond) {
2910 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002911 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002912 break;
2913 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002914 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002915 break;
2916 case lt:
2917 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002918 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002919 break;
2920 case ge:
2921 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002922 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002923 break;
2924 default:
2925 // Without the `static_cast` the compiler throws an error for
2926 // `-Werror=sign-promo`.
2927 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2928 }
2929 } else {
2930 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002931 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002932 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002933 }
2934 }
David Brazdil0debae72015-11-12 18:37:00 +00002935
2936 // If neither branch falls through (case 3), the conditional branch to `true_target`
2937 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2938 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002939 __ B(false_target);
2940 }
David Brazdil0debae72015-11-12 18:37:00 +00002941
2942 if (fallthrough_target.IsLinked()) {
2943 __ Bind(&fallthrough_target);
2944 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002945}
2946
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002947void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2948 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002949 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002950 locations->SetInAt(0, Location::RequiresRegister());
2951 }
2952}
2953
2954void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002955 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2956 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002957 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
2958 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
2959 true_target = nullptr;
2960 }
2961 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
2962 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
2963 false_target = nullptr;
2964 }
David Brazdil0debae72015-11-12 18:37:00 +00002965 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002966}
2967
2968void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2969 LocationSummary* locations = new (GetGraph()->GetArena())
2970 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00002971 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002972 locations->SetInAt(0, Location::RequiresRegister());
2973 }
2974}
2975
2976void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002977 SlowPathCodeARM64* slow_path =
2978 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002979 GenerateTestAndBranch(deoptimize,
2980 /* condition_input_index */ 0,
2981 slow_path->GetEntryLabel(),
2982 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002983}
2984
David Brazdilc0b601b2016-02-08 14:20:45 +00002985static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
2986 return condition->IsCondition() &&
2987 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
2988}
2989
Alexandre Rames880f1192016-06-13 16:04:50 +01002990static inline Condition GetConditionForSelect(HCondition* condition) {
2991 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00002992 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
2993 : ARM64Condition(cond);
2994}
2995
David Brazdil74eb1b22015-12-14 11:44:01 +00002996void LocationsBuilderARM64::VisitSelect(HSelect* select) {
2997 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01002998 if (Primitive::IsFloatingPointType(select->GetType())) {
2999 locations->SetInAt(0, Location::RequiresFpuRegister());
3000 locations->SetInAt(1, Location::RequiresFpuRegister());
3001 locations->SetOut(Location::RequiresFpuRegister());
3002 } else {
3003 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3004 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3005 bool is_true_value_constant = cst_true_value != nullptr;
3006 bool is_false_value_constant = cst_false_value != nullptr;
3007 // Ask VIXL whether we should synthesize constants in registers.
3008 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3009 Operand true_op = is_true_value_constant ?
3010 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3011 Operand false_op = is_false_value_constant ?
3012 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3013 bool true_value_in_register = false;
3014 bool false_value_in_register = false;
3015 MacroAssembler::GetCselSynthesisInformation(
3016 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3017 true_value_in_register |= !is_true_value_constant;
3018 false_value_in_register |= !is_false_value_constant;
3019
3020 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3021 : Location::ConstantLocation(cst_true_value));
3022 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3023 : Location::ConstantLocation(cst_false_value));
3024 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003025 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003026
David Brazdil74eb1b22015-12-14 11:44:01 +00003027 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3028 locations->SetInAt(2, Location::RequiresRegister());
3029 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003030}
3031
3032void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003033 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003034 Condition csel_cond;
3035
3036 if (IsBooleanValueOrMaterializedCondition(cond)) {
3037 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003038 // Use the condition flags set by the previous instruction.
3039 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003040 } else {
3041 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003042 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003043 }
3044 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003045 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003046 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003047 } else {
3048 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003049 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003050 }
3051
Alexandre Rames880f1192016-06-13 16:04:50 +01003052 if (Primitive::IsFloatingPointType(select->GetType())) {
3053 __ Fcsel(OutputFPRegister(select),
3054 InputFPRegisterAt(select, 1),
3055 InputFPRegisterAt(select, 0),
3056 csel_cond);
3057 } else {
3058 __ Csel(OutputRegister(select),
3059 InputOperandAt(select, 1),
3060 InputOperandAt(select, 0),
3061 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003062 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003063}
3064
David Srbecky0cf44932015-12-09 14:09:59 +00003065void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3066 new (GetGraph()->GetArena()) LocationSummary(info);
3067}
3068
David Srbeckyd28f4a02016-03-14 17:14:24 +00003069void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3070 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003071}
3072
3073void CodeGeneratorARM64::GenerateNop() {
3074 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003075}
3076
Alexandre Rames5319def2014-10-23 10:03:10 +01003077void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003078 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003079}
3080
3081void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003082 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003083}
3084
3085void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003086 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003087}
3088
3089void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003090 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003091}
3092
Roland Levillain44015862016-01-22 11:47:17 +00003093static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3094 return kEmitCompilerReadBarrier &&
3095 (kUseBakerReadBarrier ||
3096 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3097 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3098 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3099}
3100
Alexandre Rames67555f72014-11-18 10:55:16 +00003101void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003102 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003103 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3104 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003105 case TypeCheckKind::kExactCheck:
3106 case TypeCheckKind::kAbstractClassCheck:
3107 case TypeCheckKind::kClassHierarchyCheck:
3108 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003109 call_kind =
3110 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003111 break;
3112 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003113 case TypeCheckKind::kUnresolvedCheck:
3114 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003115 call_kind = LocationSummary::kCallOnSlowPath;
3116 break;
3117 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003118
Alexandre Rames67555f72014-11-18 10:55:16 +00003119 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003120 locations->SetInAt(0, Location::RequiresRegister());
3121 locations->SetInAt(1, Location::RequiresRegister());
3122 // The "out" register is used as a temporary, so it overlaps with the inputs.
3123 // Note that TypeCheckSlowPathARM64 uses this register too.
3124 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3125 // When read barriers are enabled, we need a temporary register for
3126 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003127 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003128 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003129 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003130}
3131
3132void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003133 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003134 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003135 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003136 Register obj = InputRegisterAt(instruction, 0);
3137 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003138 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003139 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003140 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3141 locations->GetTemp(0) :
3142 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003143 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3144 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3145 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3146 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003147
Scott Wakeling97c72b72016-06-24 16:19:36 +01003148 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003149 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003150
3151 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003152 // Avoid null check if we know `obj` is not null.
3153 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003154 __ Cbz(obj, &zero);
3155 }
3156
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003157 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003158 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003159
Roland Levillain44015862016-01-22 11:47:17 +00003160 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003161 case TypeCheckKind::kExactCheck: {
3162 __ Cmp(out, cls);
3163 __ Cset(out, eq);
3164 if (zero.IsLinked()) {
3165 __ B(&done);
3166 }
3167 break;
3168 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003169
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003170 case TypeCheckKind::kAbstractClassCheck: {
3171 // If the class is abstract, we eagerly fetch the super class of the
3172 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003173 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003174 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003175 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003176 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003177 // If `out` is null, we use it for the result, and jump to `done`.
3178 __ Cbz(out, &done);
3179 __ Cmp(out, cls);
3180 __ B(ne, &loop);
3181 __ Mov(out, 1);
3182 if (zero.IsLinked()) {
3183 __ B(&done);
3184 }
3185 break;
3186 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003187
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003188 case TypeCheckKind::kClassHierarchyCheck: {
3189 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003190 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003191 __ Bind(&loop);
3192 __ Cmp(out, cls);
3193 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003194 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003195 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003196 __ Cbnz(out, &loop);
3197 // If `out` is null, we use it for the result, and jump to `done`.
3198 __ B(&done);
3199 __ Bind(&success);
3200 __ Mov(out, 1);
3201 if (zero.IsLinked()) {
3202 __ B(&done);
3203 }
3204 break;
3205 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003206
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003207 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003208 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003209 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003210 __ Cmp(out, cls);
3211 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003212 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003213 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003214 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003215 // If `out` is null, we use it for the result, and jump to `done`.
3216 __ Cbz(out, &done);
3217 __ Ldrh(out, HeapOperand(out, primitive_offset));
3218 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3219 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003220 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003221 __ Mov(out, 1);
3222 __ B(&done);
3223 break;
3224 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003225
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003226 case TypeCheckKind::kArrayCheck: {
3227 __ Cmp(out, cls);
3228 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003229 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3230 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003231 codegen_->AddSlowPath(slow_path);
3232 __ B(ne, slow_path->GetEntryLabel());
3233 __ Mov(out, 1);
3234 if (zero.IsLinked()) {
3235 __ B(&done);
3236 }
3237 break;
3238 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003239
Calin Juravle98893e12015-10-02 21:05:03 +01003240 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003241 case TypeCheckKind::kInterfaceCheck: {
3242 // Note that we indeed only call on slow path, but we always go
3243 // into the slow path for the unresolved and interface check
3244 // cases.
3245 //
3246 // We cannot directly call the InstanceofNonTrivial runtime
3247 // entry point without resorting to a type checking slow path
3248 // here (i.e. by calling InvokeRuntime directly), as it would
3249 // require to assign fixed registers for the inputs of this
3250 // HInstanceOf instruction (following the runtime calling
3251 // convention), which might be cluttered by the potential first
3252 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003253 //
3254 // TODO: Introduce a new runtime entry point taking the object
3255 // to test (instead of its class) as argument, and let it deal
3256 // with the read barrier issues. This will let us refactor this
3257 // case of the `switch` code as it was previously (with a direct
3258 // call to the runtime not using a type checking slow path).
3259 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003260 DCHECK(locations->OnlyCallsOnSlowPath());
3261 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3262 /* is_fatal */ false);
3263 codegen_->AddSlowPath(slow_path);
3264 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003265 if (zero.IsLinked()) {
3266 __ B(&done);
3267 }
3268 break;
3269 }
3270 }
3271
3272 if (zero.IsLinked()) {
3273 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003274 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003275 }
3276
3277 if (done.IsLinked()) {
3278 __ Bind(&done);
3279 }
3280
3281 if (slow_path != nullptr) {
3282 __ Bind(slow_path->GetExitLabel());
3283 }
3284}
3285
3286void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3287 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3288 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3289
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003290 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3291 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003292 case TypeCheckKind::kExactCheck:
3293 case TypeCheckKind::kAbstractClassCheck:
3294 case TypeCheckKind::kClassHierarchyCheck:
3295 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003296 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3297 LocationSummary::kCallOnSlowPath :
3298 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003299 break;
3300 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003301 case TypeCheckKind::kUnresolvedCheck:
3302 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003303 call_kind = LocationSummary::kCallOnSlowPath;
3304 break;
3305 }
3306
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003307 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3308 locations->SetInAt(0, Location::RequiresRegister());
3309 locations->SetInAt(1, Location::RequiresRegister());
3310 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3311 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003312 // When read barriers are enabled, we need an additional temporary
3313 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003314 if (TypeCheckNeedsATemporary(type_check_kind)) {
3315 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003316 }
3317}
3318
3319void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003320 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003321 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003322 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003323 Register obj = InputRegisterAt(instruction, 0);
3324 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003325 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003326 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3327 locations->GetTemp(1) :
3328 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003329 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003330 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3331 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3332 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3333 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003334
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003335 bool is_type_check_slow_path_fatal =
3336 (type_check_kind == TypeCheckKind::kExactCheck ||
3337 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3338 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3339 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3340 !instruction->CanThrowIntoCatchBlock();
3341 SlowPathCodeARM64* type_check_slow_path =
3342 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3343 is_type_check_slow_path_fatal);
3344 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003345
Scott Wakeling97c72b72016-06-24 16:19:36 +01003346 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003347 // Avoid null check if we know obj is not null.
3348 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003349 __ Cbz(obj, &done);
3350 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003351
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003352 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003353 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003354
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003355 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003356 case TypeCheckKind::kExactCheck:
3357 case TypeCheckKind::kArrayCheck: {
3358 __ Cmp(temp, cls);
3359 // Jump to slow path for throwing the exception or doing a
3360 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003361 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003362 break;
3363 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003364
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003365 case TypeCheckKind::kAbstractClassCheck: {
3366 // If the class is abstract, we eagerly fetch the super class of the
3367 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003368 vixl::aarch64::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003369 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003370 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003371 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003372
3373 // If the class reference currently in `temp` is not null, jump
3374 // to the `compare_classes` label to compare it with the checked
3375 // class.
3376 __ Cbnz(temp, &compare_classes);
3377 // Otherwise, jump to the slow path to throw the exception.
3378 //
3379 // But before, move back the object's class into `temp` before
3380 // going into the slow path, as it has been overwritten in the
3381 // meantime.
3382 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003383 GenerateReferenceLoadTwoRegisters(
3384 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003385 __ B(type_check_slow_path->GetEntryLabel());
3386
3387 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003388 __ Cmp(temp, cls);
3389 __ B(ne, &loop);
3390 break;
3391 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003392
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003393 case TypeCheckKind::kClassHierarchyCheck: {
3394 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003395 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003396 __ Bind(&loop);
3397 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003398 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003399
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003400 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003401 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003402
3403 // If the class reference currently in `temp` is not null, jump
3404 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003405 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003406 // Otherwise, jump to the slow path to throw the exception.
3407 //
3408 // But before, move back the object's class into `temp` before
3409 // going into the slow path, as it has been overwritten in the
3410 // meantime.
3411 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003412 GenerateReferenceLoadTwoRegisters(
3413 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003414 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003415 break;
3416 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003417
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003418 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003419 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003420 vixl::aarch64::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003421 __ Cmp(temp, cls);
3422 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003423
3424 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003425 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003426 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003427
3428 // If the component type is not null (i.e. the object is indeed
3429 // an array), jump to label `check_non_primitive_component_type`
3430 // to further check that this component type is not a primitive
3431 // type.
3432 __ Cbnz(temp, &check_non_primitive_component_type);
3433 // Otherwise, jump to the slow path to throw the exception.
3434 //
3435 // But before, move back the object's class into `temp` before
3436 // going into the slow path, as it has been overwritten in the
3437 // meantime.
3438 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003439 GenerateReferenceLoadTwoRegisters(
3440 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003441 __ B(type_check_slow_path->GetEntryLabel());
3442
3443 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003444 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3445 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003446 __ Cbz(temp, &done);
3447 // Same comment as above regarding `temp` and the slow path.
3448 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003449 GenerateReferenceLoadTwoRegisters(
3450 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003451 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003452 break;
3453 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003454
Calin Juravle98893e12015-10-02 21:05:03 +01003455 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003456 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003457 // We always go into the type check slow path for the unresolved
3458 // and interface check cases.
3459 //
3460 // We cannot directly call the CheckCast runtime entry point
3461 // without resorting to a type checking slow path here (i.e. by
3462 // calling InvokeRuntime directly), as it would require to
3463 // assign fixed registers for the inputs of this HInstanceOf
3464 // instruction (following the runtime calling convention), which
3465 // might be cluttered by the potential first read barrier
3466 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003467 //
3468 // TODO: Introduce a new runtime entry point taking the object
3469 // to test (instead of its class) as argument, and let it deal
3470 // with the read barrier issues. This will let us refactor this
3471 // case of the `switch` code as it was previously (with a direct
3472 // call to the runtime not using a type checking slow path).
3473 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003474 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003475 break;
3476 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003477 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003478
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003479 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003480}
3481
Alexandre Rames5319def2014-10-23 10:03:10 +01003482void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3483 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3484 locations->SetOut(Location::ConstantLocation(constant));
3485}
3486
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003487void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003488 // Will be generated at use site.
3489}
3490
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003491void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3492 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3493 locations->SetOut(Location::ConstantLocation(constant));
3494}
3495
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003496void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003497 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003498}
3499
Calin Juravle175dc732015-08-25 15:42:32 +01003500void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3501 // The trampoline uses the same calling convention as dex calling conventions,
3502 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3503 // the method_idx.
3504 HandleInvoke(invoke);
3505}
3506
3507void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3508 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3509}
3510
Alexandre Rames5319def2014-10-23 10:03:10 +01003511void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003512 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003513 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003514}
3515
Alexandre Rames67555f72014-11-18 10:55:16 +00003516void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3517 HandleInvoke(invoke);
3518}
3519
3520void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3521 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003522 LocationSummary* locations = invoke->GetLocations();
3523 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003524 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003525 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003526 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003527
3528 // The register ip1 is required to be used for the hidden argument in
3529 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003530 MacroAssembler* masm = GetVIXLAssembler();
3531 UseScratchRegisterScope scratch_scope(masm);
3532 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003533 scratch_scope.Exclude(ip1);
3534 __ Mov(ip1, invoke->GetDexMethodIndex());
3535
Alexandre Rames67555f72014-11-18 10:55:16 +00003536 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003537 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003538 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003539 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003540 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003541 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003542 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003543 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003544 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003545 // Instead of simply (possibly) unpoisoning `temp` here, we should
3546 // emit a read barrier for the previous class reference load.
3547 // However this is not required in practice, as this is an
3548 // intermediate/temporary reference and because the current
3549 // concurrent copying collector keeps the from-space memory
3550 // intact/accessible until the end of the marking phase (the
3551 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003552 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003553 __ Ldr(temp,
3554 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3555 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003556 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003557 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003558 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003559 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003560 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003561 // lr();
3562 __ Blr(lr);
3563 DCHECK(!codegen_->IsLeafMethod());
3564 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3565}
3566
3567void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003568 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3569 if (intrinsic.TryDispatch(invoke)) {
3570 return;
3571 }
3572
Alexandre Rames67555f72014-11-18 10:55:16 +00003573 HandleInvoke(invoke);
3574}
3575
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003576void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003577 // Explicit clinit checks triggered by static invokes must have been pruned by
3578 // art::PrepareForRegisterAllocation.
3579 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003580
Andreas Gampe878d58c2015-01-15 23:24:00 -08003581 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3582 if (intrinsic.TryDispatch(invoke)) {
3583 return;
3584 }
3585
Alexandre Rames67555f72014-11-18 10:55:16 +00003586 HandleInvoke(invoke);
3587}
3588
Andreas Gampe878d58c2015-01-15 23:24:00 -08003589static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3590 if (invoke->GetLocations()->Intrinsified()) {
3591 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3592 intrinsic.Dispatch(invoke);
3593 return true;
3594 }
3595 return false;
3596}
3597
Vladimir Markodc151b22015-10-15 18:02:30 +01003598HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3599 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3600 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003601 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003602 return desired_dispatch_info;
3603}
3604
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003605void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003606 // For better instruction scheduling we load the direct code pointer before the method pointer.
3607 bool direct_code_loaded = false;
3608 switch (invoke->GetCodePtrLocation()) {
3609 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3610 // LR = code address from literal pool with link-time patch.
3611 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3612 direct_code_loaded = true;
3613 break;
3614 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3615 // LR = invoke->GetDirectCodePtr();
3616 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3617 direct_code_loaded = true;
3618 break;
3619 default:
3620 break;
3621 }
3622
Andreas Gampe878d58c2015-01-15 23:24:00 -08003623 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003624 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3625 switch (invoke->GetMethodLoadKind()) {
3626 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3627 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003628 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003629 break;
3630 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003631 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003632 break;
3633 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3634 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003635 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003636 break;
3637 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3638 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003639 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003640 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3641 break;
3642 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3643 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003644 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3645 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003646 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003647 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003648 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003649 __ Bind(adrp_label);
3650 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003651 }
Vladimir Marko58155012015-08-19 12:49:41 +00003652 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003653 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003654 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003655 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003656 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003657 __ Bind(ldr_label);
3658 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003659 }
Vladimir Marko58155012015-08-19 12:49:41 +00003660 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003661 }
Vladimir Marko58155012015-08-19 12:49:41 +00003662 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003663 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003664 Register reg = XRegisterFrom(temp);
3665 Register method_reg;
3666 if (current_method.IsRegister()) {
3667 method_reg = XRegisterFrom(current_method);
3668 } else {
3669 DCHECK(invoke->GetLocations()->Intrinsified());
3670 DCHECK(!current_method.IsValid());
3671 method_reg = reg;
3672 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3673 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003674
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003675 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003676 __ Ldr(reg.X(),
3677 MemOperand(method_reg.X(),
Andreas Gampe542451c2016-07-26 09:02:02 -07003678 ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003679 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003680 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3681 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003682 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3683 break;
3684 }
3685 }
3686
3687 switch (invoke->GetCodePtrLocation()) {
3688 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3689 __ Bl(&frame_entry_label_);
3690 break;
3691 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3692 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003693 vixl::aarch64::Label* label = &relative_call_patches_.back().label;
3694 SingleEmissionCheckScope guard(GetVIXLAssembler());
Alexandre Rames6dc01742015-11-12 14:44:19 +00003695 __ Bind(label);
3696 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003697 break;
3698 }
3699 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3700 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3701 // LR prepared above for better instruction scheduling.
3702 DCHECK(direct_code_loaded);
3703 // lr()
3704 __ Blr(lr);
3705 break;
3706 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3707 // LR = callee_method->entry_point_from_quick_compiled_code_;
3708 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003709 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07003710 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003711 // lr()
3712 __ Blr(lr);
3713 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003714 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003715
Andreas Gampe878d58c2015-01-15 23:24:00 -08003716 DCHECK(!IsLeafMethod());
3717}
3718
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003719void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003720 // Use the calling convention instead of the location of the receiver, as
3721 // intrinsics may have put the receiver in a different register. In the intrinsics
3722 // slow path, the arguments have been moved to the right place, so here we are
3723 // guaranteed that the receiver is the first register of the calling convention.
3724 InvokeDexCallingConvention calling_convention;
3725 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003726 Register temp = XRegisterFrom(temp_in);
3727 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3728 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3729 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003730 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003731
3732 BlockPoolsScope block_pools(GetVIXLAssembler());
3733
3734 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003735 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003736 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003737 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003738 // Instead of simply (possibly) unpoisoning `temp` here, we should
3739 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003740 // intermediate/temporary reference and because the current
3741 // concurrent copying collector keeps the from-space memory
3742 // intact/accessible until the end of the marking phase (the
3743 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003744 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3745 // temp = temp->GetMethodAt(method_offset);
3746 __ Ldr(temp, MemOperand(temp, method_offset));
3747 // lr = temp->GetEntryPoint();
3748 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3749 // lr();
3750 __ Blr(lr);
3751}
3752
Scott Wakeling97c72b72016-06-24 16:19:36 +01003753vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
3754 const DexFile& dex_file,
3755 uint32_t string_index,
3756 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003757 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3758}
3759
Scott Wakeling97c72b72016-06-24 16:19:36 +01003760vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
3761 const DexFile& dex_file,
3762 uint32_t type_index,
3763 vixl::aarch64::Label* adrp_label) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003764 return NewPcRelativePatch(dex_file, type_index, adrp_label, &pc_relative_type_patches_);
3765}
3766
Scott Wakeling97c72b72016-06-24 16:19:36 +01003767vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
3768 const DexFile& dex_file,
3769 uint32_t element_offset,
3770 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003771 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3772}
3773
Scott Wakeling97c72b72016-06-24 16:19:36 +01003774vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
3775 const DexFile& dex_file,
3776 uint32_t offset_or_index,
3777 vixl::aarch64::Label* adrp_label,
3778 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003779 // Add a patch entry and return the label.
3780 patches->emplace_back(dex_file, offset_or_index);
3781 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003782 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003783 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3784 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3785 return label;
3786}
3787
Scott Wakeling97c72b72016-06-24 16:19:36 +01003788vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003789 const DexFile& dex_file, uint32_t string_index) {
3790 return boot_image_string_patches_.GetOrCreate(
3791 StringReference(&dex_file, string_index),
3792 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3793}
3794
Scott Wakeling97c72b72016-06-24 16:19:36 +01003795vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003796 const DexFile& dex_file, uint32_t type_index) {
3797 return boot_image_type_patches_.GetOrCreate(
3798 TypeReference(&dex_file, type_index),
3799 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3800}
3801
Scott Wakeling97c72b72016-06-24 16:19:36 +01003802vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
3803 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003804 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3805 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3806 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3807}
3808
Scott Wakeling97c72b72016-06-24 16:19:36 +01003809vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(
3810 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003811 return DeduplicateUint64Literal(address);
3812}
3813
Vladimir Marko58155012015-08-19 12:49:41 +00003814void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3815 DCHECK(linker_patches->empty());
3816 size_t size =
3817 method_patches_.size() +
3818 call_patches_.size() +
3819 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003820 pc_relative_dex_cache_patches_.size() +
3821 boot_image_string_patches_.size() +
3822 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003823 boot_image_type_patches_.size() +
3824 pc_relative_type_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003825 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003826 linker_patches->reserve(size);
3827 for (const auto& entry : method_patches_) {
3828 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003829 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3830 linker_patches->push_back(LinkerPatch::MethodPatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003831 target_method.dex_file,
3832 target_method.dex_method_index));
3833 }
3834 for (const auto& entry : call_patches_) {
3835 const MethodReference& target_method = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003836 vixl::aarch64::Literal<uint64_t>* literal = entry.second;
3837 linker_patches->push_back(LinkerPatch::CodePatch(literal->GetOffset(),
Vladimir Marko58155012015-08-19 12:49:41 +00003838 target_method.dex_file,
3839 target_method.dex_method_index));
3840 }
Scott Wakeling97c72b72016-06-24 16:19:36 +01003841 for (const MethodPatchInfo<vixl::aarch64::Label>& info : relative_call_patches_) {
3842 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003843 info.target_method.dex_file,
3844 info.target_method.dex_method_index));
3845 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003846 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003847 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00003848 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003849 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003850 info.offset_or_index));
3851 }
3852 for (const auto& entry : boot_image_string_patches_) {
3853 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003854 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3855 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003856 target_string.dex_file,
3857 target_string.string_index));
3858 }
3859 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003860 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003861 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003862 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003863 info.offset_or_index));
3864 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003865 for (const auto& entry : boot_image_type_patches_) {
3866 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003867 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3868 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003869 target_type.dex_file,
3870 target_type.type_index));
3871 }
3872 for (const PcRelativePatchInfo& info : pc_relative_type_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003873 linker_patches->push_back(LinkerPatch::RelativeTypePatch(info.label.GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003874 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003875 info.pc_insn_label->GetLocation(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003876 info.offset_or_index));
3877 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003878 for (const auto& entry : boot_image_address_patches_) {
3879 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003880 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
3881 linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003882 }
3883}
3884
Scott Wakeling97c72b72016-06-24 16:19:36 +01003885vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003886 Uint32ToLiteralMap* map) {
3887 return map->GetOrCreate(
3888 value,
3889 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3890}
3891
Scott Wakeling97c72b72016-06-24 16:19:36 +01003892vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003893 return uint64_literals_.GetOrCreate(
3894 value,
3895 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003896}
3897
Scott Wakeling97c72b72016-06-24 16:19:36 +01003898vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003899 MethodReference target_method,
3900 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003901 return map->GetOrCreate(
3902 target_method,
3903 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003904}
3905
Scott Wakeling97c72b72016-06-24 16:19:36 +01003906vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003907 MethodReference target_method) {
3908 return DeduplicateMethodLiteral(target_method, &method_patches_);
3909}
3910
Scott Wakeling97c72b72016-06-24 16:19:36 +01003911vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00003912 MethodReference target_method) {
3913 return DeduplicateMethodLiteral(target_method, &call_patches_);
3914}
3915
3916
Andreas Gampe878d58c2015-01-15 23:24:00 -08003917void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003918 // Explicit clinit checks triggered by static invokes must have been pruned by
3919 // art::PrepareForRegisterAllocation.
3920 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003921
Andreas Gampe878d58c2015-01-15 23:24:00 -08003922 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3923 return;
3924 }
3925
Alexandre Ramesd921d642015-04-16 15:07:16 +01003926 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003927 LocationSummary* locations = invoke->GetLocations();
3928 codegen_->GenerateStaticOrDirectCall(
3929 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003930 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003931}
3932
3933void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003934 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3935 return;
3936 }
3937
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003938 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003939 DCHECK(!codegen_->IsLeafMethod());
3940 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3941}
3942
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003943HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
3944 HLoadClass::LoadKind desired_class_load_kind) {
3945 if (kEmitCompilerReadBarrier) {
3946 switch (desired_class_load_kind) {
3947 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3948 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3949 case HLoadClass::LoadKind::kBootImageAddress:
3950 // TODO: Implement for read barrier.
3951 return HLoadClass::LoadKind::kDexCacheViaMethod;
3952 default:
3953 break;
3954 }
3955 }
3956 switch (desired_class_load_kind) {
3957 case HLoadClass::LoadKind::kReferrersClass:
3958 break;
3959 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3960 DCHECK(!GetCompilerOptions().GetCompilePic());
3961 break;
3962 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3963 DCHECK(GetCompilerOptions().GetCompilePic());
3964 break;
3965 case HLoadClass::LoadKind::kBootImageAddress:
3966 break;
3967 case HLoadClass::LoadKind::kDexCacheAddress:
3968 DCHECK(Runtime::Current()->UseJitCompilation());
3969 break;
3970 case HLoadClass::LoadKind::kDexCachePcRelative:
3971 DCHECK(!Runtime::Current()->UseJitCompilation());
3972 break;
3973 case HLoadClass::LoadKind::kDexCacheViaMethod:
3974 break;
3975 }
3976 return desired_class_load_kind;
3977}
3978
Alexandre Rames67555f72014-11-18 10:55:16 +00003979void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003980 if (cls->NeedsAccessCheck()) {
3981 InvokeRuntimeCallingConvention calling_convention;
3982 CodeGenerator::CreateLoadClassLocationSummary(
3983 cls,
3984 LocationFrom(calling_convention.GetRegisterAt(0)),
Scott Wakeling97c72b72016-06-24 16:19:36 +01003985 LocationFrom(vixl::aarch64::x0),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003986 /* code_generator_supports_read_barrier */ true);
3987 return;
3988 }
3989
3990 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
3991 ? LocationSummary::kCallOnSlowPath
3992 : LocationSummary::kNoCall;
3993 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3994 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3995 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
3996 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
3997 locations->SetInAt(0, Location::RequiresRegister());
3998 }
3999 locations->SetOut(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004000}
4001
4002void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01004003 if (cls->NeedsAccessCheck()) {
4004 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004005 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004006 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01004007 return;
4008 }
4009
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004010 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004011 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004012
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004013 bool generate_null_check = false;
4014 switch (cls->GetLoadKind()) {
4015 case HLoadClass::LoadKind::kReferrersClass: {
4016 DCHECK(!cls->CanCallRuntime());
4017 DCHECK(!cls->MustGenerateClinitCheck());
4018 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4019 Register current_method = InputRegisterAt(cls, 0);
4020 GenerateGcRootFieldLoad(
4021 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4022 break;
4023 }
4024 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4025 DCHECK(!kEmitCompilerReadBarrier);
4026 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4027 cls->GetTypeIndex()));
4028 break;
4029 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
4030 DCHECK(!kEmitCompilerReadBarrier);
4031 // Add ADRP with its PC-relative type patch.
4032 const DexFile& dex_file = cls->GetDexFile();
4033 uint32_t type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004034 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004035 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004036 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004037 __ Bind(adrp_label);
4038 __ adrp(out.X(), /* offset placeholder */ 0);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004039 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004040 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004041 vixl::aarch64::Label* add_label =
4042 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004043 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004044 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004045 __ Bind(add_label);
4046 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004047 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004048 break;
4049 }
4050 case HLoadClass::LoadKind::kBootImageAddress: {
4051 DCHECK(!kEmitCompilerReadBarrier);
4052 DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress()));
4053 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress()));
4054 break;
4055 }
4056 case HLoadClass::LoadKind::kDexCacheAddress: {
4057 DCHECK_NE(cls->GetAddress(), 0u);
4058 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4059 // that gives a 16KiB range. To try and reduce the number of literals if we load
4060 // multiple types, simply split the dex cache address to a 16KiB aligned base
4061 // loaded from a literal and the remaining offset embedded in the load.
4062 static_assert(sizeof(GcRoot<mirror::Class>) == 4u, "Expected GC root to be 4 bytes.");
4063 DCHECK_ALIGNED(cls->GetAddress(), 4u);
4064 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4065 uint64_t base_address = cls->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4066 uint32_t offset = cls->GetAddress() & MaxInt<uint64_t>(offset_bits);
4067 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4068 // /* GcRoot<mirror::Class> */ out = *(base_address + offset)
4069 GenerateGcRootFieldLoad(cls, out_loc, out.X(), offset);
4070 generate_null_check = !cls->IsInDexCache();
4071 break;
4072 }
4073 case HLoadClass::LoadKind::kDexCachePcRelative: {
4074 // Add ADRP with its PC-relative DexCache access patch.
4075 const DexFile& dex_file = cls->GetDexFile();
4076 uint32_t element_offset = cls->GetDexCacheElementOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004077 vixl::aarch64::Label* adrp_label =
4078 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004079 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004080 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004081 __ Bind(adrp_label);
4082 __ adrp(out.X(), /* offset placeholder */ 0);
4083 }
4084 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004085 vixl::aarch64::Label* ldr_label =
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004086 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4087 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4088 GenerateGcRootFieldLoad(cls, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4089 generate_null_check = !cls->IsInDexCache();
4090 break;
4091 }
4092 case HLoadClass::LoadKind::kDexCacheViaMethod: {
4093 MemberOffset resolved_types_offset =
4094 ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
4095 // /* GcRoot<mirror::Class>[] */ out =
4096 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
4097 Register current_method = InputRegisterAt(cls, 0);
4098 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
4099 // /* GcRoot<mirror::Class> */ out = out[type_index]
4100 GenerateGcRootFieldLoad(
4101 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
4102 generate_null_check = !cls->IsInDexCache();
4103 break;
4104 }
4105 }
4106
4107 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4108 DCHECK(cls->CanCallRuntime());
4109 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
4110 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4111 codegen_->AddSlowPath(slow_path);
4112 if (generate_null_check) {
4113 __ Cbz(out, slow_path->GetEntryLabel());
4114 }
4115 if (cls->MustGenerateClinitCheck()) {
4116 GenerateClassInitializationCheck(slow_path, out);
4117 } else {
4118 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004119 }
4120 }
4121}
4122
David Brazdilcb1c0552015-08-04 16:22:25 +01004123static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004124 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004125}
4126
Alexandre Rames67555f72014-11-18 10:55:16 +00004127void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4128 LocationSummary* locations =
4129 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4130 locations->SetOut(Location::RequiresRegister());
4131}
4132
4133void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004134 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4135}
4136
4137void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4138 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4139}
4140
4141void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4142 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004143}
4144
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004145HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4146 HLoadString::LoadKind desired_string_load_kind) {
4147 if (kEmitCompilerReadBarrier) {
4148 switch (desired_string_load_kind) {
4149 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4150 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4151 case HLoadString::LoadKind::kBootImageAddress:
4152 // TODO: Implement for read barrier.
4153 return HLoadString::LoadKind::kDexCacheViaMethod;
4154 default:
4155 break;
4156 }
4157 }
4158 switch (desired_string_load_kind) {
4159 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4160 DCHECK(!GetCompilerOptions().GetCompilePic());
4161 break;
4162 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4163 DCHECK(GetCompilerOptions().GetCompilePic());
4164 break;
4165 case HLoadString::LoadKind::kBootImageAddress:
4166 break;
4167 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01004168 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004169 break;
4170 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01004171 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004172 break;
4173 case HLoadString::LoadKind::kDexCacheViaMethod:
4174 break;
4175 }
4176 return desired_string_load_kind;
4177}
4178
Alexandre Rames67555f72014-11-18 10:55:16 +00004179void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004180 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004181 ? LocationSummary::kCallOnSlowPath
4182 : LocationSummary::kNoCall;
4183 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004184 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4185 locations->SetInAt(0, Location::RequiresRegister());
4186 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004187 locations->SetOut(Location::RequiresRegister());
4188}
4189
4190void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004191 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004192
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004193 switch (load->GetLoadKind()) {
4194 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4195 DCHECK(!kEmitCompilerReadBarrier);
4196 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4197 load->GetStringIndex()));
4198 return; // No dex cache slow path.
4199 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
4200 DCHECK(!kEmitCompilerReadBarrier);
4201 // Add ADRP with its PC-relative String patch.
4202 const DexFile& dex_file = load->GetDexFile();
4203 uint32_t string_index = load->GetStringIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004204 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004205 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004206 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004207 __ Bind(adrp_label);
4208 __ adrp(out.X(), /* offset placeholder */ 0);
4209 }
4210 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004211 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004212 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4213 {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004214 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004215 __ Bind(add_label);
4216 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4217 }
4218 return; // No dex cache slow path.
4219 }
4220 case HLoadString::LoadKind::kBootImageAddress: {
4221 DCHECK(!kEmitCompilerReadBarrier);
4222 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4223 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4224 return; // No dex cache slow path.
4225 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004226 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004227 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004228 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004229
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004230 // TODO: Re-add the compiler code to do string dex cache lookup again.
4231 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
4232 codegen_->AddSlowPath(slow_path);
4233 __ B(slow_path->GetEntryLabel());
4234 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004235}
4236
Alexandre Rames5319def2014-10-23 10:03:10 +01004237void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4238 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4239 locations->SetOut(Location::ConstantLocation(constant));
4240}
4241
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004242void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004243 // Will be generated at use site.
4244}
4245
Alexandre Rames67555f72014-11-18 10:55:16 +00004246void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4247 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004248 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004249 InvokeRuntimeCallingConvention calling_convention;
4250 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4251}
4252
4253void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004254 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject: kQuickUnlockObject,
4255 instruction,
4256 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004257 if (instruction->IsEnter()) {
4258 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4259 } else {
4260 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4261 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004262}
4263
Alexandre Rames42d641b2014-10-27 14:00:51 +00004264void LocationsBuilderARM64::VisitMul(HMul* mul) {
4265 LocationSummary* locations =
4266 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4267 switch (mul->GetResultType()) {
4268 case Primitive::kPrimInt:
4269 case Primitive::kPrimLong:
4270 locations->SetInAt(0, Location::RequiresRegister());
4271 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004272 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004273 break;
4274
4275 case Primitive::kPrimFloat:
4276 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004277 locations->SetInAt(0, Location::RequiresFpuRegister());
4278 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004279 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004280 break;
4281
4282 default:
4283 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4284 }
4285}
4286
4287void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4288 switch (mul->GetResultType()) {
4289 case Primitive::kPrimInt:
4290 case Primitive::kPrimLong:
4291 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4292 break;
4293
4294 case Primitive::kPrimFloat:
4295 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004296 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004297 break;
4298
4299 default:
4300 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4301 }
4302}
4303
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004304void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4305 LocationSummary* locations =
4306 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4307 switch (neg->GetResultType()) {
4308 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004309 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004310 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004311 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004312 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004313
4314 case Primitive::kPrimFloat:
4315 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004316 locations->SetInAt(0, Location::RequiresFpuRegister());
4317 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004318 break;
4319
4320 default:
4321 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4322 }
4323}
4324
4325void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4326 switch (neg->GetResultType()) {
4327 case Primitive::kPrimInt:
4328 case Primitive::kPrimLong:
4329 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4330 break;
4331
4332 case Primitive::kPrimFloat:
4333 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004334 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004335 break;
4336
4337 default:
4338 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4339 }
4340}
4341
4342void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4343 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004344 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004345 InvokeRuntimeCallingConvention calling_convention;
4346 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004347 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004348 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004349 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004350}
4351
4352void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4353 LocationSummary* locations = instruction->GetLocations();
4354 InvokeRuntimeCallingConvention calling_convention;
4355 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4356 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004357 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004358 // Note: if heap poisoning is enabled, the entry point takes cares
4359 // of poisoning the reference.
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004360 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Mathieu Chartiere401d142015-04-22 13:56:20 -07004361 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004362}
4363
Alexandre Rames5319def2014-10-23 10:03:10 +01004364void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4365 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004366 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004367 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004368 if (instruction->IsStringAlloc()) {
4369 locations->AddTemp(LocationFrom(kArtMethodRegister));
4370 } else {
4371 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4372 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4373 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004374 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4375}
4376
4377void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004378 // Note: if heap poisoning is enabled, the entry point takes cares
4379 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004380 if (instruction->IsStringAlloc()) {
4381 // String is allocated through StringFactory. Call NewEmptyString entry point.
4382 Location temp = instruction->GetLocations()->GetTemp(0);
Andreas Gampe542451c2016-07-26 09:02:02 -07004383 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004384 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4385 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4386 __ Blr(lr);
4387 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4388 } else {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004389 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00004390 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4391 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004392}
4393
4394void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4395 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004396 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004397 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004398}
4399
4400void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004401 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004402 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004403 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004404 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004405 break;
4406
4407 default:
4408 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4409 }
4410}
4411
David Brazdil66d126e2015-04-03 16:02:44 +01004412void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4413 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4414 locations->SetInAt(0, Location::RequiresRegister());
4415 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4416}
4417
4418void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004419 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01004420}
4421
Alexandre Rames5319def2014-10-23 10:03:10 +01004422void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004423 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4424 ? LocationSummary::kCallOnSlowPath
4425 : LocationSummary::kNoCall;
4426 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004427 locations->SetInAt(0, Location::RequiresRegister());
4428 if (instruction->HasUses()) {
4429 locations->SetOut(Location::SameAsFirstInput());
4430 }
4431}
4432
Calin Juravle2ae48182016-03-16 14:05:09 +00004433void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4434 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004435 return;
4436 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004437
Alexandre Ramesd921d642015-04-16 15:07:16 +01004438 BlockPoolsScope block_pools(GetVIXLAssembler());
4439 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004440 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004441 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004442}
4443
Calin Juravle2ae48182016-03-16 14:05:09 +00004444void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004445 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004446 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004447
4448 LocationSummary* locations = instruction->GetLocations();
4449 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004450
4451 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004452}
4453
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004454void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004455 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004456}
4457
Alexandre Rames67555f72014-11-18 10:55:16 +00004458void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4459 HandleBinaryOp(instruction);
4460}
4461
4462void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4463 HandleBinaryOp(instruction);
4464}
4465
Alexandre Rames3e69f162014-12-10 10:36:50 +00004466void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4467 LOG(FATAL) << "Unreachable";
4468}
4469
4470void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4471 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4472}
4473
Alexandre Rames5319def2014-10-23 10:03:10 +01004474void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4475 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4476 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4477 if (location.IsStackSlot()) {
4478 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4479 } else if (location.IsDoubleStackSlot()) {
4480 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4481 }
4482 locations->SetOut(location);
4483}
4484
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004485void InstructionCodeGeneratorARM64::VisitParameterValue(
4486 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004487 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004488}
4489
4490void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4491 LocationSummary* locations =
4492 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004493 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004494}
4495
4496void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4497 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4498 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004499}
4500
4501void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4502 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004503 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004504 locations->SetInAt(i, Location::Any());
4505 }
4506 locations->SetOut(Location::Any());
4507}
4508
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004509void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004510 LOG(FATAL) << "Unreachable";
4511}
4512
Serban Constantinescu02164b32014-11-13 14:05:07 +00004513void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004514 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004515 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004516 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
4517 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004518 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4519
4520 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004521 case Primitive::kPrimInt:
4522 case Primitive::kPrimLong:
4523 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004524 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004525 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4526 break;
4527
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004528 case Primitive::kPrimFloat:
4529 case Primitive::kPrimDouble: {
4530 InvokeRuntimeCallingConvention calling_convention;
4531 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4532 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4533 locations->SetOut(calling_convention.GetReturnLocation(type));
4534
4535 break;
4536 }
4537
Serban Constantinescu02164b32014-11-13 14:05:07 +00004538 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004539 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004540 }
4541}
4542
4543void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4544 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004545
Serban Constantinescu02164b32014-11-13 14:05:07 +00004546 switch (type) {
4547 case Primitive::kPrimInt:
4548 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004549 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004550 break;
4551 }
4552
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004553 case Primitive::kPrimFloat:
4554 case Primitive::kPrimDouble: {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004555 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
4556 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004557 if (type == Primitive::kPrimFloat) {
4558 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4559 } else {
4560 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4561 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004562 break;
4563 }
4564
Serban Constantinescu02164b32014-11-13 14:05:07 +00004565 default:
4566 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004567 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004568 }
4569}
4570
Calin Juravle27df7582015-04-17 19:12:31 +01004571void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4572 memory_barrier->SetLocations(nullptr);
4573}
4574
4575void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004576 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004577}
4578
Alexandre Rames5319def2014-10-23 10:03:10 +01004579void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4580 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4581 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004582 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004583}
4584
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004585void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004586 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004587}
4588
4589void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4590 instruction->SetLocations(nullptr);
4591}
4592
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004593void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004594 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004595}
4596
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004597void LocationsBuilderARM64::VisitRor(HRor* ror) {
4598 HandleBinaryOp(ror);
4599}
4600
4601void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4602 HandleBinaryOp(ror);
4603}
4604
Serban Constantinescu02164b32014-11-13 14:05:07 +00004605void LocationsBuilderARM64::VisitShl(HShl* shl) {
4606 HandleShift(shl);
4607}
4608
4609void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4610 HandleShift(shl);
4611}
4612
4613void LocationsBuilderARM64::VisitShr(HShr* shr) {
4614 HandleShift(shr);
4615}
4616
4617void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4618 HandleShift(shr);
4619}
4620
Alexandre Rames5319def2014-10-23 10:03:10 +01004621void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004622 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004623}
4624
4625void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004626 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004627}
4628
Alexandre Rames67555f72014-11-18 10:55:16 +00004629void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004630 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004631}
4632
4633void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004634 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004635}
4636
4637void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004638 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004639}
4640
Alexandre Rames67555f72014-11-18 10:55:16 +00004641void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004642 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004643}
4644
Calin Juravlee460d1d2015-09-29 04:52:17 +01004645void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4646 HUnresolvedInstanceFieldGet* instruction) {
4647 FieldAccessCallingConventionARM64 calling_convention;
4648 codegen_->CreateUnresolvedFieldLocationSummary(
4649 instruction, instruction->GetFieldType(), calling_convention);
4650}
4651
4652void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4653 HUnresolvedInstanceFieldGet* instruction) {
4654 FieldAccessCallingConventionARM64 calling_convention;
4655 codegen_->GenerateUnresolvedFieldAccess(instruction,
4656 instruction->GetFieldType(),
4657 instruction->GetFieldIndex(),
4658 instruction->GetDexPc(),
4659 calling_convention);
4660}
4661
4662void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4663 HUnresolvedInstanceFieldSet* instruction) {
4664 FieldAccessCallingConventionARM64 calling_convention;
4665 codegen_->CreateUnresolvedFieldLocationSummary(
4666 instruction, instruction->GetFieldType(), calling_convention);
4667}
4668
4669void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4670 HUnresolvedInstanceFieldSet* instruction) {
4671 FieldAccessCallingConventionARM64 calling_convention;
4672 codegen_->GenerateUnresolvedFieldAccess(instruction,
4673 instruction->GetFieldType(),
4674 instruction->GetFieldIndex(),
4675 instruction->GetDexPc(),
4676 calling_convention);
4677}
4678
4679void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4680 HUnresolvedStaticFieldGet* instruction) {
4681 FieldAccessCallingConventionARM64 calling_convention;
4682 codegen_->CreateUnresolvedFieldLocationSummary(
4683 instruction, instruction->GetFieldType(), calling_convention);
4684}
4685
4686void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4687 HUnresolvedStaticFieldGet* instruction) {
4688 FieldAccessCallingConventionARM64 calling_convention;
4689 codegen_->GenerateUnresolvedFieldAccess(instruction,
4690 instruction->GetFieldType(),
4691 instruction->GetFieldIndex(),
4692 instruction->GetDexPc(),
4693 calling_convention);
4694}
4695
4696void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4697 HUnresolvedStaticFieldSet* instruction) {
4698 FieldAccessCallingConventionARM64 calling_convention;
4699 codegen_->CreateUnresolvedFieldLocationSummary(
4700 instruction, instruction->GetFieldType(), calling_convention);
4701}
4702
4703void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4704 HUnresolvedStaticFieldSet* instruction) {
4705 FieldAccessCallingConventionARM64 calling_convention;
4706 codegen_->GenerateUnresolvedFieldAccess(instruction,
4707 instruction->GetFieldType(),
4708 instruction->GetFieldIndex(),
4709 instruction->GetDexPc(),
4710 calling_convention);
4711}
4712
Alexandre Rames5319def2014-10-23 10:03:10 +01004713void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4714 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4715}
4716
4717void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004718 HBasicBlock* block = instruction->GetBlock();
4719 if (block->GetLoopInformation() != nullptr) {
4720 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4721 // The back edge will generate the suspend check.
4722 return;
4723 }
4724 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4725 // The goto will generate the suspend check.
4726 return;
4727 }
4728 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004729}
4730
Alexandre Rames67555f72014-11-18 10:55:16 +00004731void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4732 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004733 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004734 InvokeRuntimeCallingConvention calling_convention;
4735 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4736}
4737
4738void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004739 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004740 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004741}
4742
4743void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4744 LocationSummary* locations =
4745 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4746 Primitive::Type input_type = conversion->GetInputType();
4747 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004748 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004749 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4750 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4751 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4752 }
4753
Alexandre Rames542361f2015-01-29 16:57:31 +00004754 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004755 locations->SetInAt(0, Location::RequiresFpuRegister());
4756 } else {
4757 locations->SetInAt(0, Location::RequiresRegister());
4758 }
4759
Alexandre Rames542361f2015-01-29 16:57:31 +00004760 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004761 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4762 } else {
4763 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4764 }
4765}
4766
4767void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4768 Primitive::Type result_type = conversion->GetResultType();
4769 Primitive::Type input_type = conversion->GetInputType();
4770
4771 DCHECK_NE(input_type, result_type);
4772
Alexandre Rames542361f2015-01-29 16:57:31 +00004773 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004774 int result_size = Primitive::ComponentSize(result_type);
4775 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004776 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004777 Register output = OutputRegister(conversion);
4778 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004779 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004780 // 'int' values are used directly as W registers, discarding the top
4781 // bits, so we don't need to sign-extend and can just perform a move.
4782 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4783 // top 32 bits of the target register. We theoretically could leave those
4784 // bits unchanged, but we would have to make sure that no code uses a
4785 // 32bit input value as a 64bit value assuming that the top 32 bits are
4786 // zero.
4787 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004788 } else if (result_type == Primitive::kPrimChar ||
4789 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4790 __ Ubfx(output,
4791 output.IsX() ? source.X() : source.W(),
4792 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004793 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004794 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004795 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004796 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004797 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004798 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004799 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4800 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004801 } else if (Primitive::IsFloatingPointType(result_type) &&
4802 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004803 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4804 } else {
4805 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4806 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004807 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004808}
Alexandre Rames67555f72014-11-18 10:55:16 +00004809
Serban Constantinescu02164b32014-11-13 14:05:07 +00004810void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4811 HandleShift(ushr);
4812}
4813
4814void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4815 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004816}
4817
4818void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4819 HandleBinaryOp(instruction);
4820}
4821
4822void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4823 HandleBinaryOp(instruction);
4824}
4825
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004826void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004827 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004828 LOG(FATAL) << "Unreachable";
4829}
4830
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004831void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004832 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004833 LOG(FATAL) << "Unreachable";
4834}
4835
Mark Mendellfe57faa2015-09-18 09:26:15 -04004836// Simple implementation of packed switch - generate cascaded compare/jumps.
4837void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4838 LocationSummary* locations =
4839 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4840 locations->SetInAt(0, Location::RequiresRegister());
4841}
4842
4843void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4844 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004845 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004846 Register value_reg = InputRegisterAt(switch_instr, 0);
4847 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4848
Zheng Xu3927c8b2015-11-18 17:46:25 +08004849 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004850 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08004851 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4852 // make sure we don't emit it if the target may run out of range.
4853 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4854 // ranges and emit the tables only as required.
4855 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004856
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004857 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004858 // Current instruction id is an upper bound of the number of HIRs in the graph.
4859 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4860 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004861 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4862 Register temp = temps.AcquireW();
4863 __ Subs(temp, value_reg, Operand(lower_bound));
4864
Zheng Xu3927c8b2015-11-18 17:46:25 +08004865 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004866 // Jump to successors[0] if value == lower_bound.
4867 __ B(eq, codegen_->GetLabelOf(successors[0]));
4868 int32_t last_index = 0;
4869 for (; num_entries - last_index > 2; last_index += 2) {
4870 __ Subs(temp, temp, Operand(2));
4871 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4872 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4873 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4874 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4875 }
4876 if (num_entries - last_index == 2) {
4877 // The last missing case_value.
4878 __ Cmp(temp, Operand(1));
4879 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004880 }
4881
4882 // And the default for any other value.
4883 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4884 __ B(codegen_->GetLabelOf(default_block));
4885 }
4886 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01004887 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004888
4889 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4890
4891 // Below instructions should use at most one blocked register. Since there are two blocked
4892 // registers, we are free to block one.
4893 Register temp_w = temps.AcquireW();
4894 Register index;
4895 // Remove the bias.
4896 if (lower_bound != 0) {
4897 index = temp_w;
4898 __ Sub(index, value_reg, Operand(lower_bound));
4899 } else {
4900 index = value_reg;
4901 }
4902
4903 // Jump to default block if index is out of the range.
4904 __ Cmp(index, Operand(num_entries));
4905 __ B(hs, codegen_->GetLabelOf(default_block));
4906
4907 // In current VIXL implementation, it won't require any blocked registers to encode the
4908 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4909 // register pressure.
4910 Register table_base = temps.AcquireX();
4911 // Load jump offset from the table.
4912 __ Adr(table_base, jump_table->GetTableStartLabel());
4913 Register jump_offset = temp_w;
4914 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4915
4916 // Jump to target block by branching to table_base(pc related) + offset.
4917 Register target_address = table_base;
4918 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4919 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004920 }
4921}
4922
Roland Levillain44015862016-01-22 11:47:17 +00004923void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4924 Location out,
4925 uint32_t offset,
4926 Location maybe_temp) {
4927 Primitive::Type type = Primitive::kPrimNot;
4928 Register out_reg = RegisterFrom(out, type);
4929 if (kEmitCompilerReadBarrier) {
4930 Register temp_reg = RegisterFrom(maybe_temp, type);
4931 if (kUseBakerReadBarrier) {
4932 // Load with fast path based Baker's read barrier.
4933 // /* HeapReference<Object> */ out = *(out + offset)
4934 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4935 out,
4936 out_reg,
4937 offset,
4938 temp_reg,
4939 /* needs_null_check */ false,
4940 /* use_load_acquire */ false);
4941 } else {
4942 // Load with slow path based read barrier.
4943 // Save the value of `out` into `maybe_temp` before overwriting it
4944 // in the following move operation, as we will need it for the
4945 // read barrier below.
4946 __ Mov(temp_reg, out_reg);
4947 // /* HeapReference<Object> */ out = *(out + offset)
4948 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4949 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4950 }
4951 } else {
4952 // Plain load with no read barrier.
4953 // /* HeapReference<Object> */ out = *(out + offset)
4954 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4955 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4956 }
4957}
4958
4959void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
4960 Location out,
4961 Location obj,
4962 uint32_t offset,
4963 Location maybe_temp) {
4964 Primitive::Type type = Primitive::kPrimNot;
4965 Register out_reg = RegisterFrom(out, type);
4966 Register obj_reg = RegisterFrom(obj, type);
4967 if (kEmitCompilerReadBarrier) {
4968 if (kUseBakerReadBarrier) {
4969 // Load with fast path based Baker's read barrier.
4970 Register temp_reg = RegisterFrom(maybe_temp, type);
4971 // /* HeapReference<Object> */ out = *(obj + offset)
4972 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4973 out,
4974 obj_reg,
4975 offset,
4976 temp_reg,
4977 /* needs_null_check */ false,
4978 /* use_load_acquire */ false);
4979 } else {
4980 // Load with slow path based read barrier.
4981 // /* HeapReference<Object> */ out = *(obj + offset)
4982 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4983 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4984 }
4985 } else {
4986 // Plain load with no read barrier.
4987 // /* HeapReference<Object> */ out = *(obj + offset)
4988 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
4989 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4990 }
4991}
4992
4993void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
4994 Location root,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004995 Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004996 uint32_t offset,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004997 vixl::aarch64::Label* fixup_label) {
Roland Levillain44015862016-01-22 11:47:17 +00004998 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
4999 if (kEmitCompilerReadBarrier) {
5000 if (kUseBakerReadBarrier) {
5001 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5002 // Baker's read barrier are used:
5003 //
5004 // root = obj.field;
5005 // if (Thread::Current()->GetIsGcMarking()) {
5006 // root = ReadBarrier::Mark(root)
5007 // }
5008
5009 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005010 if (fixup_label == nullptr) {
5011 __ Ldr(root_reg, MemOperand(obj, offset));
5012 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005013 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005014 __ Bind(fixup_label);
5015 __ ldr(root_reg, MemOperand(obj, offset));
5016 }
Roland Levillain44015862016-01-22 11:47:17 +00005017 static_assert(
5018 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5019 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5020 "have different sizes.");
5021 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5022 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5023 "have different sizes.");
5024
Vladimir Marko953437b2016-08-24 08:30:46 +00005025 // Slow path marking the GC root `root`.
Roland Levillain44015862016-01-22 11:47:17 +00005026 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005027 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root);
Roland Levillain44015862016-01-22 11:47:17 +00005028 codegen_->AddSlowPath(slow_path);
5029
5030 MacroAssembler* masm = GetVIXLAssembler();
5031 UseScratchRegisterScope temps(masm);
5032 Register temp = temps.AcquireW();
5033 // temp = Thread::Current()->GetIsGcMarking()
Andreas Gampe542451c2016-07-26 09:02:02 -07005034 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64PointerSize>().Int32Value()));
Roland Levillain44015862016-01-22 11:47:17 +00005035 __ Cbnz(temp, slow_path->GetEntryLabel());
5036 __ Bind(slow_path->GetExitLabel());
5037 } else {
5038 // GC root loaded through a slow path for read barriers other
5039 // than Baker's.
5040 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005041 if (fixup_label == nullptr) {
5042 __ Add(root_reg.X(), obj.X(), offset);
5043 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005044 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005045 __ Bind(fixup_label);
5046 __ add(root_reg.X(), obj.X(), offset);
5047 }
Roland Levillain44015862016-01-22 11:47:17 +00005048 // /* mirror::Object* */ root = root->Read()
5049 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5050 }
5051 } else {
5052 // Plain GC root load with no read barrier.
5053 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005054 if (fixup_label == nullptr) {
5055 __ Ldr(root_reg, MemOperand(obj, offset));
5056 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005057 SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005058 __ Bind(fixup_label);
5059 __ ldr(root_reg, MemOperand(obj, offset));
5060 }
Roland Levillain44015862016-01-22 11:47:17 +00005061 // Note that GC roots are not affected by heap poisoning, thus we
5062 // do not have to unpoison `root_reg` here.
5063 }
5064}
5065
5066void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5067 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005068 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005069 uint32_t offset,
5070 Register temp,
5071 bool needs_null_check,
5072 bool use_load_acquire) {
5073 DCHECK(kEmitCompilerReadBarrier);
5074 DCHECK(kUseBakerReadBarrier);
5075
5076 // /* HeapReference<Object> */ ref = *(obj + offset)
5077 Location no_index = Location::NoLocation();
Roland Levillainbfea3352016-06-23 13:48:47 +01005078 size_t no_scale_factor = 0U;
5079 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5080 ref,
5081 obj,
5082 offset,
5083 no_index,
5084 no_scale_factor,
5085 temp,
5086 needs_null_check,
5087 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005088}
5089
5090void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5091 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005092 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005093 uint32_t data_offset,
5094 Location index,
5095 Register temp,
5096 bool needs_null_check) {
5097 DCHECK(kEmitCompilerReadBarrier);
5098 DCHECK(kUseBakerReadBarrier);
5099
5100 // Array cells are never volatile variables, therefore array loads
5101 // never use Load-Acquire instructions on ARM64.
5102 const bool use_load_acquire = false;
5103
Roland Levillainbfea3352016-06-23 13:48:47 +01005104 static_assert(
5105 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5106 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005107 // /* HeapReference<Object> */ ref =
5108 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005109 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5110 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5111 ref,
5112 obj,
5113 data_offset,
5114 index,
5115 scale_factor,
5116 temp,
5117 needs_null_check,
5118 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005119}
5120
5121void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5122 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005123 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005124 uint32_t offset,
5125 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005126 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005127 Register temp,
5128 bool needs_null_check,
5129 bool use_load_acquire) {
5130 DCHECK(kEmitCompilerReadBarrier);
5131 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005132 // If we are emitting an array load, we should not be using a
5133 // Load Acquire instruction. In other words:
5134 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5135 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005136
5137 MacroAssembler* masm = GetVIXLAssembler();
5138 UseScratchRegisterScope temps(masm);
5139
5140 // In slow path based read barriers, the read barrier call is
5141 // inserted after the original load. However, in fast path based
5142 // Baker's read barriers, we need to perform the load of
5143 // mirror::Object::monitor_ *before* the original reference load.
5144 // This load-load ordering is required by the read barrier.
5145 // The fast path/slow path (for Baker's algorithm) should look like:
5146 //
5147 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5148 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5149 // HeapReference<Object> ref = *src; // Original reference load.
5150 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5151 // if (is_gray) {
5152 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5153 // }
5154 //
5155 // Note: the original implementation in ReadBarrier::Barrier is
5156 // slightly more complex as it performs additional checks that we do
5157 // not do here for performance reasons.
5158
5159 Primitive::Type type = Primitive::kPrimNot;
5160 Register ref_reg = RegisterFrom(ref, type);
5161 DCHECK(obj.IsW());
5162 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5163
5164 // /* int32_t */ monitor = obj->monitor_
5165 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5166 if (needs_null_check) {
5167 MaybeRecordImplicitNullCheck(instruction);
5168 }
5169 // /* LockWord */ lock_word = LockWord(monitor)
5170 static_assert(sizeof(LockWord) == sizeof(int32_t),
5171 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005172
Vladimir Marko877a0332016-07-11 19:30:56 +01005173 // Introduce a dependency on the lock_word including rb_state,
5174 // to prevent load-load reordering, and without using
Roland Levillain44015862016-01-22 11:47:17 +00005175 // a memory barrier (which would be more expensive).
Roland Levillain0b671c02016-08-19 12:02:34 +01005176 // `obj` is unchanged by this operation, but its value now depends
5177 // on `temp`.
Vladimir Marko877a0332016-07-11 19:30:56 +01005178 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
Roland Levillain44015862016-01-22 11:47:17 +00005179
5180 // The actual reference load.
5181 if (index.IsValid()) {
Roland Levillainbfea3352016-06-23 13:48:47 +01005182 // Load types involving an "index".
5183 if (use_load_acquire) {
5184 // UnsafeGetObjectVolatile intrinsic case.
5185 // Register `index` is not an index in an object array, but an
5186 // offset to an object reference field within object `obj`.
5187 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5188 DCHECK(instruction->GetLocations()->Intrinsified());
5189 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5190 << instruction->AsInvoke()->GetIntrinsic();
5191 DCHECK_EQ(offset, 0U);
5192 DCHECK_EQ(scale_factor, 0U);
5193 DCHECK_EQ(needs_null_check, 0U);
5194 // /* HeapReference<Object> */ ref = *(obj + index)
5195 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5196 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005197 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01005198 // ArrayGet and UnsafeGetObject intrinsics cases.
5199 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5200 if (index.IsConstant()) {
5201 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5202 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5203 } else {
Vladimir Marko877a0332016-07-11 19:30:56 +01005204 Register temp2 = temps.AcquireW();
Roland Levillainbfea3352016-06-23 13:48:47 +01005205 __ Add(temp2, obj, offset);
5206 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, scale_factor));
5207 temps.Release(temp2);
5208 }
Roland Levillain44015862016-01-22 11:47:17 +00005209 }
Roland Levillain44015862016-01-22 11:47:17 +00005210 } else {
5211 // /* HeapReference<Object> */ ref = *(obj + offset)
5212 MemOperand field = HeapOperand(obj, offset);
5213 if (use_load_acquire) {
5214 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5215 } else {
5216 Load(type, ref_reg, field);
5217 }
5218 }
5219
5220 // Object* ref = ref_addr->AsMirrorPtr()
5221 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5222
Vladimir Marko953437b2016-08-24 08:30:46 +00005223 // Slow path marking the object `ref` when it is gray.
Roland Levillain44015862016-01-22 11:47:17 +00005224 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005225 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref);
Roland Levillain44015862016-01-22 11:47:17 +00005226 AddSlowPath(slow_path);
5227
5228 // if (rb_state == ReadBarrier::gray_ptr_)
5229 // ref = ReadBarrier::Mark(ref);
Vladimir Marko877a0332016-07-11 19:30:56 +01005230 // Given the numeric representation, it's enough to check the low bit of the rb_state.
5231 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
5232 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
5233 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
5234 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005235 __ Bind(slow_path->GetExitLabel());
5236}
5237
5238void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5239 Location out,
5240 Location ref,
5241 Location obj,
5242 uint32_t offset,
5243 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005244 DCHECK(kEmitCompilerReadBarrier);
5245
Roland Levillain44015862016-01-22 11:47:17 +00005246 // Insert a slow path based read barrier *after* the reference load.
5247 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005248 // If heap poisoning is enabled, the unpoisoning of the loaded
5249 // reference will be carried out by the runtime within the slow
5250 // path.
5251 //
5252 // Note that `ref` currently does not get unpoisoned (when heap
5253 // poisoning is enabled), which is alright as the `ref` argument is
5254 // not used by the artReadBarrierSlow entry point.
5255 //
5256 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5257 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5258 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5259 AddSlowPath(slow_path);
5260
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005261 __ B(slow_path->GetEntryLabel());
5262 __ Bind(slow_path->GetExitLabel());
5263}
5264
Roland Levillain44015862016-01-22 11:47:17 +00005265void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5266 Location out,
5267 Location ref,
5268 Location obj,
5269 uint32_t offset,
5270 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005271 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005272 // Baker's read barriers shall be handled by the fast path
5273 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5274 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005275 // If heap poisoning is enabled, unpoisoning will be taken care of
5276 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005277 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005278 } else if (kPoisonHeapReferences) {
5279 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5280 }
5281}
5282
Roland Levillain44015862016-01-22 11:47:17 +00005283void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5284 Location out,
5285 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005286 DCHECK(kEmitCompilerReadBarrier);
5287
Roland Levillain44015862016-01-22 11:47:17 +00005288 // Insert a slow path based read barrier *after* the GC root load.
5289 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005290 // Note that GC roots are not affected by heap poisoning, so we do
5291 // not need to do anything special for this here.
5292 SlowPathCodeARM64* slow_path =
5293 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5294 AddSlowPath(slow_path);
5295
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005296 __ B(slow_path->GetEntryLabel());
5297 __ Bind(slow_path->GetExitLabel());
5298}
5299
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005300void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5301 LocationSummary* locations =
5302 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5303 locations->SetInAt(0, Location::RequiresRegister());
5304 locations->SetOut(Location::RequiresRegister());
5305}
5306
5307void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5308 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005309 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005310 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005311 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005312 __ Ldr(XRegisterFrom(locations->Out()),
5313 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005314 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005315 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005316 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005317 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5318 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005319 __ Ldr(XRegisterFrom(locations->Out()),
5320 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005321 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005322}
5323
5324
5325
Alexandre Rames67555f72014-11-18 10:55:16 +00005326#undef __
5327#undef QUICK_ENTRY_POINT
5328
Alexandre Rames5319def2014-10-23 10:03:10 +01005329} // namespace arm64
5330} // namespace art