blob: 4692a4a87659790075edda063f1f2524e4edbbea [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
36
37using namespace vixl; // NOLINT(build/namespaces)
38
39#ifdef __
40#error "ARM64 Codegen VIXL macro-assembler macro already defined."
41#endif
42
Alexandre Rames5319def2014-10-23 10:03:10 +010043namespace art {
44
Roland Levillain22ccc3a2015-11-24 13:10:05 +000045template<class MirrorType>
46class GcRoot;
47
Alexandre Rames5319def2014-10-23 10:03:10 +010048namespace arm64 {
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050using helpers::CPURegisterFrom;
51using helpers::DRegisterFrom;
52using helpers::FPRegisterFrom;
53using helpers::HeapOperand;
54using helpers::HeapOperandFrom;
55using helpers::InputCPURegisterAt;
56using helpers::InputFPRegisterAt;
57using helpers::InputRegisterAt;
58using helpers::InputOperandAt;
59using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080060using helpers::LocationFrom;
61using helpers::OperandFromMemOperand;
62using helpers::OutputCPURegister;
63using helpers::OutputFPRegister;
64using helpers::OutputRegister;
65using helpers::RegisterFrom;
66using helpers::StackOperandFrom;
67using helpers::VIXLRegCodeFromART;
68using helpers::WRegisterFrom;
69using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000070using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080071using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080072
Alexandre Rames5319def2014-10-23 10:03:10 +010073static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000074// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080075// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
76// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000077static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010078
Alexandre Rames5319def2014-10-23 10:03:10 +010079inline Condition ARM64Condition(IfCondition cond) {
80 switch (cond) {
81 case kCondEQ: return eq;
82 case kCondNE: return ne;
83 case kCondLT: return lt;
84 case kCondLE: return le;
85 case kCondGT: return gt;
86 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070087 case kCondB: return lo;
88 case kCondBE: return ls;
89 case kCondA: return hi;
90 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010091 }
Roland Levillain7f63c522015-07-13 15:54:55 +000092 LOG(FATAL) << "Unreachable";
93 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010094}
95
Vladimir Markod6e069b2016-01-18 11:11:01 +000096inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
97 // The ARM64 condition codes can express all the necessary branches, see the
98 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
99 // There is no dex instruction or HIR that would need the missing conditions
100 // "equal or unordered" or "not equal".
101 switch (cond) {
102 case kCondEQ: return eq;
103 case kCondNE: return ne /* unordered */;
104 case kCondLT: return gt_bias ? cc : lt /* unordered */;
105 case kCondLE: return gt_bias ? ls : le /* unordered */;
106 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
107 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
108 default:
109 LOG(FATAL) << "UNREACHABLE";
110 UNREACHABLE();
111 }
112}
113
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000114Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
116 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
117 // but we use the exact registers for clarity.
118 if (return_type == Primitive::kPrimFloat) {
119 return LocationFrom(s0);
120 } else if (return_type == Primitive::kPrimDouble) {
121 return LocationFrom(d0);
122 } else if (return_type == Primitive::kPrimLong) {
123 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100124 } else if (return_type == Primitive::kPrimVoid) {
125 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000126 } else {
127 return LocationFrom(w0);
128 }
129}
130
Alexandre Rames5319def2014-10-23 10:03:10 +0100131Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000132 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100133}
134
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700135// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
136#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Alexandre Rames67555f72014-11-18 10:55:16 +0000137#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100138
Zheng Xuda403092015-04-24 17:35:39 +0800139// Calculate memory accessing operand for save/restore live registers.
140static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
141 RegisterSet* register_set,
142 int64_t spill_offset,
143 bool is_save) {
144 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
145 codegen->GetNumberOfCoreRegisters(),
146 register_set->GetFloatingPointRegisters(),
147 codegen->GetNumberOfFloatingPointRegisters()));
148
149 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
150 register_set->GetCoreRegisters() & (~callee_saved_core_registers.list()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000151 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
152 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.list()));
Zheng Xuda403092015-04-24 17:35:39 +0800153
154 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
155 UseScratchRegisterScope temps(masm);
156
157 Register base = masm->StackPointer();
158 int64_t core_spill_size = core_list.TotalSizeInBytes();
159 int64_t fp_spill_size = fp_list.TotalSizeInBytes();
160 int64_t reg_size = kXRegSizeInBytes;
161 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
162 uint32_t ls_access_size = WhichPowerOf2(reg_size);
163 if (((core_list.Count() > 1) || (fp_list.Count() > 1)) &&
164 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
165 // If the offset does not fit in the instruction's immediate field, use an alternate register
166 // to compute the base address(float point registers spill base address).
167 Register new_base = temps.AcquireSameSizeAs(base);
168 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
169 base = new_base;
170 spill_offset = -core_spill_size;
171 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
172 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
173 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
174 }
175
176 if (is_save) {
177 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
178 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
179 } else {
180 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
181 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
182 }
183}
184
185void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
186 RegisterSet* register_set = locations->GetLiveRegisters();
187 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
188 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
189 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
190 // If the register holds an object, update the stack mask.
191 if (locations->RegisterContainsObject(i)) {
192 locations->SetStackBit(stack_offset / kVRegSize);
193 }
194 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
195 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
196 saved_core_stack_offsets_[i] = stack_offset;
197 stack_offset += kXRegSizeInBytes;
198 }
199 }
200
201 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
202 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
203 register_set->ContainsFloatingPointRegister(i)) {
204 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
205 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
206 saved_fpu_stack_offsets_[i] = stack_offset;
207 stack_offset += kDRegSizeInBytes;
208 }
209 }
210
211 SaveRestoreLiveRegistersHelper(codegen, register_set,
212 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
213}
214
215void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
216 RegisterSet* register_set = locations->GetLiveRegisters();
217 SaveRestoreLiveRegistersHelper(codegen, register_set,
218 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
219}
220
Alexandre Rames5319def2014-10-23 10:03:10 +0100221class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
222 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000223 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100224
Alexandre Rames67555f72014-11-18 10:55:16 +0000225 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100226 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000227 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100228
Alexandre Rames5319def2014-10-23 10:03:10 +0100229 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000230 if (instruction_->CanThrowIntoCatchBlock()) {
231 // Live registers will be restored in the catch block if caught.
232 SaveLiveRegisters(codegen, instruction_->GetLocations());
233 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000234 // We're moving two locations to locations that could overlap, so we need a parallel
235 // move resolver.
236 InvokeRuntimeCallingConvention calling_convention;
237 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100238 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
239 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100240 uint32_t entry_point_offset = instruction_->AsBoundsCheck()->IsStringCharAt()
241 ? QUICK_ENTRY_POINT(pThrowStringBounds)
242 : QUICK_ENTRY_POINT(pThrowArrayBounds);
243 arm64_codegen->InvokeRuntime(entry_point_offset, instruction_, instruction_->GetDexPc(), this);
244 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800245 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100246 }
247
Alexandre Rames8158f282015-08-07 10:26:17 +0100248 bool IsFatal() const OVERRIDE { return true; }
249
Alexandre Rames9931f312015-06-19 14:47:01 +0100250 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
251
Alexandre Rames5319def2014-10-23 10:03:10 +0100252 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100253 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
254};
255
Alexandre Rames67555f72014-11-18 10:55:16 +0000256class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
257 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000258 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000259
260 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
261 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
262 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000263 if (instruction_->CanThrowIntoCatchBlock()) {
264 // Live registers will be restored in the catch block if caught.
265 SaveLiveRegisters(codegen, instruction_->GetLocations());
266 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000267 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000268 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800269 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000270 }
271
Alexandre Rames8158f282015-08-07 10:26:17 +0100272 bool IsFatal() const OVERRIDE { return true; }
273
Alexandre Rames9931f312015-06-19 14:47:01 +0100274 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
275
Alexandre Rames67555f72014-11-18 10:55:16 +0000276 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000277 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
278};
279
280class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
281 public:
282 LoadClassSlowPathARM64(HLoadClass* cls,
283 HInstruction* at,
284 uint32_t dex_pc,
285 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000286 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000287 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
288 }
289
290 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
291 LocationSummary* locations = at_->GetLocations();
292 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
293
294 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000295 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000296
297 InvokeRuntimeCallingConvention calling_convention;
298 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000299 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
300 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000301 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800302 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100303 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800304 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100305 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800306 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000307
308 // Move the class to the desired location.
309 Location out = locations->Out();
310 if (out.IsValid()) {
311 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
312 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000313 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000314 }
315
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000316 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000317 __ B(GetExitLabel());
318 }
319
Alexandre Rames9931f312015-06-19 14:47:01 +0100320 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
321
Alexandre Rames67555f72014-11-18 10:55:16 +0000322 private:
323 // The class this slow path will load.
324 HLoadClass* const cls_;
325
326 // The instruction where this slow path is happening.
327 // (Might be the load class or an initialization check).
328 HInstruction* const at_;
329
330 // The dex PC of `at_`.
331 const uint32_t dex_pc_;
332
333 // Whether to initialize the class.
334 const bool do_clinit_;
335
336 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
337};
338
339class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
340 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000341 explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000342
343 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
344 LocationSummary* locations = instruction_->GetLocations();
345 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
346 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
347
348 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000349 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000350
351 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000352 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
353 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
Alexandre Rames67555f72014-11-18 10:55:16 +0000354 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000355 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100356 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000357 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000358 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000359
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000360 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000361 __ B(GetExitLabel());
362 }
363
Alexandre Rames9931f312015-06-19 14:47:01 +0100364 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
365
Alexandre Rames67555f72014-11-18 10:55:16 +0000366 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000367 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
368};
369
Alexandre Rames5319def2014-10-23 10:03:10 +0100370class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
371 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000372 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100373
Alexandre Rames67555f72014-11-18 10:55:16 +0000374 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
375 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100376 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000377 if (instruction_->CanThrowIntoCatchBlock()) {
378 // Live registers will be restored in the catch block if caught.
379 SaveLiveRegisters(codegen, instruction_->GetLocations());
380 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000381 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000382 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800383 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100384 }
385
Alexandre Rames8158f282015-08-07 10:26:17 +0100386 bool IsFatal() const OVERRIDE { return true; }
387
Alexandre Rames9931f312015-06-19 14:47:01 +0100388 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
389
Alexandre Rames5319def2014-10-23 10:03:10 +0100390 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100391 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
392};
393
394class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
395 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100396 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000397 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100398
Alexandre Rames67555f72014-11-18 10:55:16 +0000399 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
400 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100401 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000402 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000403 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000404 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800405 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000406 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000407 if (successor_ == nullptr) {
408 __ B(GetReturnLabel());
409 } else {
410 __ B(arm64_codegen->GetLabelOf(successor_));
411 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100412 }
413
414 vixl::Label* GetReturnLabel() {
415 DCHECK(successor_ == nullptr);
416 return &return_label_;
417 }
418
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100419 HBasicBlock* GetSuccessor() const {
420 return successor_;
421 }
422
Alexandre Rames9931f312015-06-19 14:47:01 +0100423 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
424
Alexandre Rames5319def2014-10-23 10:03:10 +0100425 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100426 // If not null, the block to branch to after the suspend check.
427 HBasicBlock* const successor_;
428
429 // If `successor_` is null, the label to branch to after the suspend check.
430 vixl::Label return_label_;
431
432 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
433};
434
Alexandre Rames67555f72014-11-18 10:55:16 +0000435class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
436 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000437 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000438 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000439
440 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000441 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100442 Location class_to_check = locations->InAt(1);
443 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
444 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000445 DCHECK(instruction_->IsCheckCast()
446 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
447 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100448 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000449
Alexandre Rames67555f72014-11-18 10:55:16 +0000450 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000451
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000452 if (!is_fatal_) {
453 SaveLiveRegisters(codegen, locations);
454 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000455
456 // We're moving two locations to locations that could overlap, so we need a parallel
457 // move resolver.
458 InvokeRuntimeCallingConvention calling_convention;
459 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100460 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
461 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000462
463 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000464 arm64_codegen->InvokeRuntime(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100465 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc, this);
Roland Levillain888d0672015-11-23 18:53:50 +0000466 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
467 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000468 Primitive::Type ret_type = instruction_->GetType();
469 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
470 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
471 } else {
472 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100473 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800474 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000475 }
476
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000477 if (!is_fatal_) {
478 RestoreLiveRegisters(codegen, locations);
479 __ B(GetExitLabel());
480 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000481 }
482
Alexandre Rames9931f312015-06-19 14:47:01 +0100483 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000484 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100485
Alexandre Rames67555f72014-11-18 10:55:16 +0000486 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000487 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000488
Alexandre Rames67555f72014-11-18 10:55:16 +0000489 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
490};
491
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700492class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
493 public:
Aart Bik42249c32016-01-07 15:33:50 -0800494 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000495 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700496
497 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800498 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700499 __ Bind(GetEntryLabel());
500 SaveLiveRegisters(codegen, instruction_->GetLocations());
Aart Bik42249c32016-01-07 15:33:50 -0800501 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
502 instruction_,
503 instruction_->GetDexPc(),
504 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000505 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700506 }
507
Alexandre Rames9931f312015-06-19 14:47:01 +0100508 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
509
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700510 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700511 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
512};
513
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100514class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
515 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000516 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100517
518 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
519 LocationSummary* locations = instruction_->GetLocations();
520 __ Bind(GetEntryLabel());
521 SaveLiveRegisters(codegen, locations);
522
523 InvokeRuntimeCallingConvention calling_convention;
524 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
525 parallel_move.AddMove(
526 locations->InAt(0),
527 LocationFrom(calling_convention.GetRegisterAt(0)),
528 Primitive::kPrimNot,
529 nullptr);
530 parallel_move.AddMove(
531 locations->InAt(1),
532 LocationFrom(calling_convention.GetRegisterAt(1)),
533 Primitive::kPrimInt,
534 nullptr);
535 parallel_move.AddMove(
536 locations->InAt(2),
537 LocationFrom(calling_convention.GetRegisterAt(2)),
538 Primitive::kPrimNot,
539 nullptr);
540 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
541
542 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
543 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
544 instruction_,
545 instruction_->GetDexPc(),
546 this);
547 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
548 RestoreLiveRegisters(codegen, locations);
549 __ B(GetExitLabel());
550 }
551
552 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
553
554 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100555 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
556};
557
Zheng Xu3927c8b2015-11-18 17:46:25 +0800558void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
559 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000560 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800561
562 // We are about to use the assembler to place literals directly. Make sure we have enough
563 // underlying code buffer and we have generated the jump table with right size.
564 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
565 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
566
567 __ Bind(&table_start_);
568 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
569 for (uint32_t i = 0; i < num_entries; i++) {
570 vixl::Label* target_label = codegen->GetLabelOf(successors[i]);
571 DCHECK(target_label->IsBound());
572 ptrdiff_t jump_offset = target_label->location() - table_start_.location();
573 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
574 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
575 Literal<int32_t> literal(jump_offset);
576 __ place(&literal);
577 }
578}
579
Roland Levillain44015862016-01-22 11:47:17 +0000580// Slow path marking an object during a read barrier.
581class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
582 public:
583 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location out, Location obj)
David Srbecky9cd6d372016-02-09 15:24:47 +0000584 : SlowPathCodeARM64(instruction), out_(out), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000585 DCHECK(kEmitCompilerReadBarrier);
586 }
587
588 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
589
590 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
591 LocationSummary* locations = instruction_->GetLocations();
592 Primitive::Type type = Primitive::kPrimNot;
593 DCHECK(locations->CanCall());
594 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
595 DCHECK(instruction_->IsInstanceFieldGet() ||
596 instruction_->IsStaticFieldGet() ||
597 instruction_->IsArrayGet() ||
598 instruction_->IsLoadClass() ||
599 instruction_->IsLoadString() ||
600 instruction_->IsInstanceOf() ||
601 instruction_->IsCheckCast())
602 << "Unexpected instruction in read barrier marking slow path: "
603 << instruction_->DebugName();
604
605 __ Bind(GetEntryLabel());
606 SaveLiveRegisters(codegen, locations);
607
608 InvokeRuntimeCallingConvention calling_convention;
609 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
610 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)), obj_, type);
611 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierMark),
612 instruction_,
613 instruction_->GetDexPc(),
614 this);
615 CheckEntrypointTypes<kQuickReadBarrierMark, mirror::Object*, mirror::Object*>();
616 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
617
618 RestoreLiveRegisters(codegen, locations);
619 __ B(GetExitLabel());
620 }
621
622 private:
Roland Levillain44015862016-01-22 11:47:17 +0000623 const Location out_;
624 const Location obj_;
625
626 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
627};
628
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000629// Slow path generating a read barrier for a heap reference.
630class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
631 public:
632 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
633 Location out,
634 Location ref,
635 Location obj,
636 uint32_t offset,
637 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000638 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000639 out_(out),
640 ref_(ref),
641 obj_(obj),
642 offset_(offset),
643 index_(index) {
644 DCHECK(kEmitCompilerReadBarrier);
645 // If `obj` is equal to `out` or `ref`, it means the initial object
646 // has been overwritten by (or after) the heap object reference load
647 // to be instrumented, e.g.:
648 //
649 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000650 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000651 //
652 // In that case, we have lost the information about the original
653 // object, and the emitted read barrier cannot work properly.
654 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
655 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
656 }
657
658 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
659 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
660 LocationSummary* locations = instruction_->GetLocations();
661 Primitive::Type type = Primitive::kPrimNot;
662 DCHECK(locations->CanCall());
663 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
664 DCHECK(!instruction_->IsInvoke() ||
665 (instruction_->IsInvokeStaticOrDirect() &&
Roland Levillain44015862016-01-22 11:47:17 +0000666 instruction_->GetLocations()->Intrinsified()))
667 << "Unexpected instruction in read barrier for heap reference slow path: "
668 << instruction_->DebugName();
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000669 // The read barrier instrumentation does not support the
670 // HArm64IntermediateAddress instruction yet.
671 DCHECK(!(instruction_->IsArrayGet() &&
672 instruction_->AsArrayGet()->GetArray()->IsArm64IntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000673
674 __ Bind(GetEntryLabel());
675
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000676 SaveLiveRegisters(codegen, locations);
677
678 // We may have to change the index's value, but as `index_` is a
679 // constant member (like other "inputs" of this slow path),
680 // introduce a copy of it, `index`.
681 Location index = index_;
682 if (index_.IsValid()) {
683 // Handle `index_` for HArrayGet and intrinsic UnsafeGetObject.
684 if (instruction_->IsArrayGet()) {
685 // Compute the actual memory offset and store it in `index`.
686 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
687 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
688 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
689 // We are about to change the value of `index_reg` (see the
690 // calls to vixl::MacroAssembler::Lsl and
691 // vixl::MacroAssembler::Mov below), but it has
692 // not been saved by the previous call to
693 // art::SlowPathCode::SaveLiveRegisters, as it is a
694 // callee-save register --
695 // art::SlowPathCode::SaveLiveRegisters does not consider
696 // callee-save registers, as it has been designed with the
697 // assumption that callee-save registers are supposed to be
698 // handled by the called function. So, as a callee-save
699 // register, `index_reg` _would_ eventually be saved onto
700 // the stack, but it would be too late: we would have
701 // changed its value earlier. Therefore, we manually save
702 // it here into another freely available register,
703 // `free_reg`, chosen of course among the caller-save
704 // registers (as a callee-save `free_reg` register would
705 // exhibit the same problem).
706 //
707 // Note we could have requested a temporary register from
708 // the register allocator instead; but we prefer not to, as
709 // this is a slow path, and we know we can find a
710 // caller-save register that is available.
711 Register free_reg = FindAvailableCallerSaveRegister(codegen);
712 __ Mov(free_reg.W(), index_reg);
713 index_reg = free_reg;
714 index = LocationFrom(index_reg);
715 } else {
716 // The initial register stored in `index_` has already been
717 // saved in the call to art::SlowPathCode::SaveLiveRegisters
718 // (as it is not a callee-save register), so we can freely
719 // use it.
720 }
721 // Shifting the index value contained in `index_reg` by the scale
722 // factor (2) cannot overflow in practice, as the runtime is
723 // unable to allocate object arrays with a size larger than
724 // 2^26 - 1 (that is, 2^28 - 4 bytes).
725 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
726 static_assert(
727 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
728 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
729 __ Add(index_reg, index_reg, Operand(offset_));
730 } else {
731 DCHECK(instruction_->IsInvoke());
732 DCHECK(instruction_->GetLocations()->Intrinsified());
733 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
734 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
735 << instruction_->AsInvoke()->GetIntrinsic();
736 DCHECK_EQ(offset_, 0U);
737 DCHECK(index_.IsRegisterPair());
738 // UnsafeGet's offset location is a register pair, the low
739 // part contains the correct offset.
740 index = index_.ToLow();
741 }
742 }
743
744 // We're moving two or three locations to locations that could
745 // overlap, so we need a parallel move resolver.
746 InvokeRuntimeCallingConvention calling_convention;
747 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
748 parallel_move.AddMove(ref_,
749 LocationFrom(calling_convention.GetRegisterAt(0)),
750 type,
751 nullptr);
752 parallel_move.AddMove(obj_,
753 LocationFrom(calling_convention.GetRegisterAt(1)),
754 type,
755 nullptr);
756 if (index.IsValid()) {
757 parallel_move.AddMove(index,
758 LocationFrom(calling_convention.GetRegisterAt(2)),
759 Primitive::kPrimInt,
760 nullptr);
761 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
762 } else {
763 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
764 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
765 }
766 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
767 instruction_,
768 instruction_->GetDexPc(),
769 this);
770 CheckEntrypointTypes<
771 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
772 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
773
774 RestoreLiveRegisters(codegen, locations);
775
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000776 __ B(GetExitLabel());
777 }
778
779 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
780
781 private:
782 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
783 size_t ref = static_cast<int>(XRegisterFrom(ref_).code());
784 size_t obj = static_cast<int>(XRegisterFrom(obj_).code());
785 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
786 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
787 return Register(VIXLRegCodeFromART(i), kXRegSize);
788 }
789 }
790 // We shall never fail to find a free caller-save register, as
791 // there are more than two core caller-save registers on ARM64
792 // (meaning it is possible to find one which is different from
793 // `ref` and `obj`).
794 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
795 LOG(FATAL) << "Could not find a free register";
796 UNREACHABLE();
797 }
798
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000799 const Location out_;
800 const Location ref_;
801 const Location obj_;
802 const uint32_t offset_;
803 // An additional location containing an index to an array.
804 // Only used for HArrayGet and the UnsafeGetObject &
805 // UnsafeGetObjectVolatile intrinsics.
806 const Location index_;
807
808 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
809};
810
811// Slow path generating a read barrier for a GC root.
812class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
813 public:
814 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000815 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000816 DCHECK(kEmitCompilerReadBarrier);
817 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000818
819 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
820 LocationSummary* locations = instruction_->GetLocations();
821 Primitive::Type type = Primitive::kPrimNot;
822 DCHECK(locations->CanCall());
823 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000824 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
825 << "Unexpected instruction in read barrier for GC root slow path: "
826 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000827
828 __ Bind(GetEntryLabel());
829 SaveLiveRegisters(codegen, locations);
830
831 InvokeRuntimeCallingConvention calling_convention;
832 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
833 // The argument of the ReadBarrierForRootSlow is not a managed
834 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
835 // thus we need a 64-bit move here, and we cannot use
836 //
837 // arm64_codegen->MoveLocation(
838 // LocationFrom(calling_convention.GetRegisterAt(0)),
839 // root_,
840 // type);
841 //
842 // which would emit a 32-bit move, as `type` is a (32-bit wide)
843 // reference type (`Primitive::kPrimNot`).
844 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
845 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
846 instruction_,
847 instruction_->GetDexPc(),
848 this);
849 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
850 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
851
852 RestoreLiveRegisters(codegen, locations);
853 __ B(GetExitLabel());
854 }
855
856 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
857
858 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000859 const Location out_;
860 const Location root_;
861
862 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
863};
864
Alexandre Rames5319def2014-10-23 10:03:10 +0100865#undef __
866
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100867Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100868 Location next_location;
869 if (type == Primitive::kPrimVoid) {
870 LOG(FATAL) << "Unreachable type " << type;
871 }
872
Alexandre Rames542361f2015-01-29 16:57:31 +0000873 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100874 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
875 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000876 } else if (!Primitive::IsFloatingPointType(type) &&
877 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000878 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
879 } else {
880 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000881 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
882 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100883 }
884
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000885 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000886 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100887 return next_location;
888}
889
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100890Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100891 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100892}
893
Serban Constantinescu579885a2015-02-22 20:51:33 +0000894CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
895 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100896 const CompilerOptions& compiler_options,
897 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100898 : CodeGenerator(graph,
899 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000900 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000901 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000902 callee_saved_core_registers.list(),
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000903 callee_saved_fp_registers.list(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100904 compiler_options,
905 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100906 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800907 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100908 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000909 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000910 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100911 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000912 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000913 uint32_literals_(std::less<uint32_t>(),
914 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100915 uint64_literals_(std::less<uint64_t>(),
916 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
917 method_patches_(MethodReferenceComparator(),
918 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
919 call_patches_(MethodReferenceComparator(),
920 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
921 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000922 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
923 boot_image_string_patches_(StringReferenceValueComparator(),
924 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
925 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100926 boot_image_type_patches_(TypeReferenceValueComparator(),
927 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
928 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000929 boot_image_address_patches_(std::less<uint32_t>(),
930 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000931 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000932 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000933}
Alexandre Rames5319def2014-10-23 10:03:10 +0100934
Alexandre Rames67555f72014-11-18 10:55:16 +0000935#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100936
Zheng Xu3927c8b2015-11-18 17:46:25 +0800937void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100938 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800939 jump_table->EmitTable(this);
940 }
941}
942
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000943void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800944 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000945 // Ensure we emit the literal pool.
946 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000947
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000948 CodeGenerator::Finalize(allocator);
949}
950
Zheng Xuad4450e2015-04-17 18:48:56 +0800951void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
952 // Note: There are 6 kinds of moves:
953 // 1. constant -> GPR/FPR (non-cycle)
954 // 2. constant -> stack (non-cycle)
955 // 3. GPR/FPR -> GPR/FPR
956 // 4. GPR/FPR -> stack
957 // 5. stack -> GPR/FPR
958 // 6. stack -> stack (non-cycle)
959 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
960 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
961 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
962 // dependency.
963 vixl_temps_.Open(GetVIXLAssembler());
964}
965
966void ParallelMoveResolverARM64::FinishEmitNativeCode() {
967 vixl_temps_.Close();
968}
969
970Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
971 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
972 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
973 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
974 Location scratch = GetScratchLocation(kind);
975 if (!scratch.Equals(Location::NoLocation())) {
976 return scratch;
977 }
978 // Allocate from VIXL temp registers.
979 if (kind == Location::kRegister) {
980 scratch = LocationFrom(vixl_temps_.AcquireX());
981 } else {
982 DCHECK(kind == Location::kFpuRegister);
983 scratch = LocationFrom(vixl_temps_.AcquireD());
984 }
985 AddScratchLocation(scratch);
986 return scratch;
987}
988
989void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
990 if (loc.IsRegister()) {
991 vixl_temps_.Release(XRegisterFrom(loc));
992 } else {
993 DCHECK(loc.IsFpuRegister());
994 vixl_temps_.Release(DRegisterFrom(loc));
995 }
996 RemoveScratchLocation(loc);
997}
998
Alexandre Rames3e69f162014-12-10 10:36:50 +0000999void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001000 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001001 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001002}
1003
Alexandre Rames5319def2014-10-23 10:03:10 +01001004void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001005 MacroAssembler* masm = GetVIXLAssembler();
1006 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001007 __ Bind(&frame_entry_label_);
1008
Serban Constantinescu02164b32014-11-13 14:05:07 +00001009 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1010 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001011 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001012 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001013 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001014 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001015 __ Ldr(wzr, MemOperand(temp, 0));
1016 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001017 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001018
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001019 if (!HasEmptyFrame()) {
1020 int frame_size = GetFrameSize();
1021 // Stack layout:
1022 // sp[frame_size - 8] : lr.
1023 // ... : other preserved core registers.
1024 // ... : other preserved fp registers.
1025 // ... : reserved frame space.
1026 // sp[0] : current method.
1027 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001028 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001029 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1030 frame_size - GetCoreSpillSize());
1031 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1032 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001033 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001034}
1035
1036void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001037 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001038 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001039 if (!HasEmptyFrame()) {
1040 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001041 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1042 frame_size - FrameEntrySpillSize());
1043 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1044 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001045 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001046 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001047 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001048 __ Ret();
1049 GetAssembler()->cfi().RestoreState();
1050 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001051}
1052
Zheng Xuda403092015-04-24 17:35:39 +08001053vixl::CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
1054 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
1055 return vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize,
1056 core_spill_mask_);
1057}
1058
1059vixl::CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
1060 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1061 GetNumberOfFloatingPointRegisters()));
1062 return vixl::CPURegList(vixl::CPURegister::kFPRegister, vixl::kDRegSize,
1063 fpu_spill_mask_);
1064}
1065
Alexandre Rames5319def2014-10-23 10:03:10 +01001066void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1067 __ Bind(GetLabelOf(block));
1068}
1069
Calin Juravle175dc732015-08-25 15:42:32 +01001070void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1071 DCHECK(location.IsRegister());
1072 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1073}
1074
Calin Juravlee460d1d2015-09-29 04:52:17 +01001075void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1076 if (location.IsRegister()) {
1077 locations->AddTemp(location);
1078 } else {
1079 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1080 }
1081}
1082
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001083void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001084 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001085 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001086 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +01001087 vixl::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001088 if (value_can_be_null) {
1089 __ Cbz(value, &done);
1090 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001091 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
1092 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001093 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001094 if (value_can_be_null) {
1095 __ Bind(&done);
1096 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001097}
1098
David Brazdil58282f42016-01-14 12:45:10 +00001099void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001100 // Blocked core registers:
1101 // lr : Runtime reserved.
1102 // tr : Runtime reserved.
1103 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1104 // ip1 : VIXL core temp.
1105 // ip0 : VIXL core temp.
1106 //
1107 // Blocked fp registers:
1108 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001109 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1110 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001111 while (!reserved_core_registers.IsEmpty()) {
1112 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
1113 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001114
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001115 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001116 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001117 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
1118 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001119
David Brazdil58282f42016-01-14 12:45:10 +00001120 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001121 // Stubs do not save callee-save floating point registers. If the graph
1122 // is debuggable, we need to deal with these registers differently. For
1123 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001124 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1125 while (!reserved_fp_registers_debuggable.IsEmpty()) {
1126 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().code()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001127 }
1128 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001129}
1130
Alexandre Rames3e69f162014-12-10 10:36:50 +00001131size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1132 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1133 __ Str(reg, MemOperand(sp, stack_index));
1134 return kArm64WordSize;
1135}
1136
1137size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1138 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1139 __ Ldr(reg, MemOperand(sp, stack_index));
1140 return kArm64WordSize;
1141}
1142
1143size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1144 FPRegister reg = FPRegister(reg_id, kDRegSize);
1145 __ Str(reg, MemOperand(sp, stack_index));
1146 return kArm64WordSize;
1147}
1148
1149size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1150 FPRegister reg = FPRegister(reg_id, kDRegSize);
1151 __ Ldr(reg, MemOperand(sp, stack_index));
1152 return kArm64WordSize;
1153}
1154
Alexandre Rames5319def2014-10-23 10:03:10 +01001155void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001156 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001157}
1158
1159void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001160 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001161}
1162
Alexandre Rames67555f72014-11-18 10:55:16 +00001163void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001164 if (constant->IsIntConstant()) {
1165 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1166 } else if (constant->IsLongConstant()) {
1167 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1168 } else if (constant->IsNullConstant()) {
1169 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001170 } else if (constant->IsFloatConstant()) {
1171 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1172 } else {
1173 DCHECK(constant->IsDoubleConstant());
1174 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1175 }
1176}
1177
Alexandre Rames3e69f162014-12-10 10:36:50 +00001178
1179static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1180 DCHECK(constant.IsConstant());
1181 HConstant* cst = constant.GetConstant();
1182 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001183 // Null is mapped to a core W register, which we associate with kPrimInt.
1184 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001185 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1186 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1187 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1188}
1189
Calin Juravlee460d1d2015-09-29 04:52:17 +01001190void CodeGeneratorARM64::MoveLocation(Location destination,
1191 Location source,
1192 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001193 if (source.Equals(destination)) {
1194 return;
1195 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001196
1197 // A valid move can always be inferred from the destination and source
1198 // locations. When moving from and to a register, the argument type can be
1199 // used to generate 32bit instead of 64bit moves. In debug mode we also
1200 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001201 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001202
1203 if (destination.IsRegister() || destination.IsFpuRegister()) {
1204 if (unspecified_type) {
1205 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1206 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001207 (src_cst != nullptr && (src_cst->IsIntConstant()
1208 || src_cst->IsFloatConstant()
1209 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001210 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001211 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001212 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001213 // If the source is a double stack slot or a 64bit constant, a 64bit
1214 // type is appropriate. Else the source is a register, and since the
1215 // type has not been specified, we chose a 64bit type to force a 64bit
1216 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001217 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001218 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001219 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001220 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1221 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1222 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001223 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1224 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1225 __ Ldr(dst, StackOperandFrom(source));
1226 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001227 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001228 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001229 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001230 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001231 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001232 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001233 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001234 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1235 ? Primitive::kPrimLong
1236 : Primitive::kPrimInt;
1237 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1238 }
1239 } else {
1240 DCHECK(source.IsFpuRegister());
1241 if (destination.IsRegister()) {
1242 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1243 ? Primitive::kPrimDouble
1244 : Primitive::kPrimFloat;
1245 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1246 } else {
1247 DCHECK(destination.IsFpuRegister());
1248 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001249 }
1250 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001251 } else { // The destination is not a register. It must be a stack slot.
1252 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1253 if (source.IsRegister() || source.IsFpuRegister()) {
1254 if (unspecified_type) {
1255 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001256 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001257 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001258 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001259 }
1260 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001261 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1262 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1263 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001264 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001265 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1266 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001267 UseScratchRegisterScope temps(GetVIXLAssembler());
1268 HConstant* src_cst = source.GetConstant();
1269 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001270 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001271 temp = temps.AcquireW();
1272 } else if (src_cst->IsLongConstant()) {
1273 temp = temps.AcquireX();
1274 } else if (src_cst->IsFloatConstant()) {
1275 temp = temps.AcquireS();
1276 } else {
1277 DCHECK(src_cst->IsDoubleConstant());
1278 temp = temps.AcquireD();
1279 }
1280 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001281 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001282 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001283 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001284 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001285 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001286 // There is generally less pressure on FP registers.
1287 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001288 __ Ldr(temp, StackOperandFrom(source));
1289 __ Str(temp, StackOperandFrom(destination));
1290 }
1291 }
1292}
1293
1294void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001295 CPURegister dst,
1296 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001297 switch (type) {
1298 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001299 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001300 break;
1301 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001302 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001303 break;
1304 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001305 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001306 break;
1307 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001308 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001309 break;
1310 case Primitive::kPrimInt:
1311 case Primitive::kPrimNot:
1312 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001313 case Primitive::kPrimFloat:
1314 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001315 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001316 __ Ldr(dst, src);
1317 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001318 case Primitive::kPrimVoid:
1319 LOG(FATAL) << "Unreachable type " << type;
1320 }
1321}
1322
Calin Juravle77520bc2015-01-12 18:45:46 +00001323void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001324 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001325 const MemOperand& src,
1326 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001327 MacroAssembler* masm = GetVIXLAssembler();
1328 BlockPoolsScope block_pools(masm);
1329 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001330 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001331 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001332
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001333 DCHECK(!src.IsPreIndex());
1334 DCHECK(!src.IsPostIndex());
1335
1336 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001337 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001338 MemOperand base = MemOperand(temp_base);
1339 switch (type) {
1340 case Primitive::kPrimBoolean:
1341 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001342 if (needs_null_check) {
1343 MaybeRecordImplicitNullCheck(instruction);
1344 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001345 break;
1346 case Primitive::kPrimByte:
1347 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001348 if (needs_null_check) {
1349 MaybeRecordImplicitNullCheck(instruction);
1350 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001351 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1352 break;
1353 case Primitive::kPrimChar:
1354 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001355 if (needs_null_check) {
1356 MaybeRecordImplicitNullCheck(instruction);
1357 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001358 break;
1359 case Primitive::kPrimShort:
1360 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001361 if (needs_null_check) {
1362 MaybeRecordImplicitNullCheck(instruction);
1363 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001364 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1365 break;
1366 case Primitive::kPrimInt:
1367 case Primitive::kPrimNot:
1368 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001369 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001370 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001371 if (needs_null_check) {
1372 MaybeRecordImplicitNullCheck(instruction);
1373 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001374 break;
1375 case Primitive::kPrimFloat:
1376 case Primitive::kPrimDouble: {
1377 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001378 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001379
1380 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1381 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001382 if (needs_null_check) {
1383 MaybeRecordImplicitNullCheck(instruction);
1384 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001385 __ Fmov(FPRegister(dst), temp);
1386 break;
1387 }
1388 case Primitive::kPrimVoid:
1389 LOG(FATAL) << "Unreachable type " << type;
1390 }
1391}
1392
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001393void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001394 CPURegister src,
1395 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001396 switch (type) {
1397 case Primitive::kPrimBoolean:
1398 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001399 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001400 break;
1401 case Primitive::kPrimChar:
1402 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001403 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001404 break;
1405 case Primitive::kPrimInt:
1406 case Primitive::kPrimNot:
1407 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001408 case Primitive::kPrimFloat:
1409 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001410 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001411 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001412 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001413 case Primitive::kPrimVoid:
1414 LOG(FATAL) << "Unreachable type " << type;
1415 }
1416}
1417
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001418void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1419 CPURegister src,
1420 const MemOperand& dst) {
1421 UseScratchRegisterScope temps(GetVIXLAssembler());
1422 Register temp_base = temps.AcquireX();
1423
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001424 DCHECK(!dst.IsPreIndex());
1425 DCHECK(!dst.IsPostIndex());
1426
1427 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001428 Operand op = OperandFromMemOperand(dst);
1429 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001430 MemOperand base = MemOperand(temp_base);
1431 switch (type) {
1432 case Primitive::kPrimBoolean:
1433 case Primitive::kPrimByte:
1434 __ Stlrb(Register(src), base);
1435 break;
1436 case Primitive::kPrimChar:
1437 case Primitive::kPrimShort:
1438 __ Stlrh(Register(src), base);
1439 break;
1440 case Primitive::kPrimInt:
1441 case Primitive::kPrimNot:
1442 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001443 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001444 __ Stlr(Register(src), base);
1445 break;
1446 case Primitive::kPrimFloat:
1447 case Primitive::kPrimDouble: {
1448 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001449 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001450
1451 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1452 __ Fmov(temp, FPRegister(src));
1453 __ Stlr(temp, base);
1454 break;
1455 }
1456 case Primitive::kPrimVoid:
1457 LOG(FATAL) << "Unreachable type " << type;
1458 }
1459}
1460
Calin Juravle175dc732015-08-25 15:42:32 +01001461void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1462 HInstruction* instruction,
1463 uint32_t dex_pc,
1464 SlowPathCode* slow_path) {
1465 InvokeRuntime(GetThreadOffset<kArm64WordSize>(entrypoint).Int32Value(),
1466 instruction,
1467 dex_pc,
1468 slow_path);
1469}
1470
Alexandre Rames67555f72014-11-18 10:55:16 +00001471void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1472 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001473 uint32_t dex_pc,
1474 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001475 ValidateInvokeRuntime(instruction, slow_path);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001476 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001477 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1478 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001479 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001480}
1481
1482void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1483 vixl::Register class_reg) {
1484 UseScratchRegisterScope temps(GetVIXLAssembler());
1485 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001486 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1487
Serban Constantinescu02164b32014-11-13 14:05:07 +00001488 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001489 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1490 __ Add(temp, class_reg, status_offset);
1491 __ Ldar(temp, HeapOperand(temp));
1492 __ Cmp(temp, mirror::Class::kStatusInitialized);
1493 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001494 __ Bind(slow_path->GetExitLabel());
1495}
Alexandre Rames5319def2014-10-23 10:03:10 +01001496
Roland Levillain44015862016-01-22 11:47:17 +00001497void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001498 BarrierType type = BarrierAll;
1499
1500 switch (kind) {
1501 case MemBarrierKind::kAnyAny:
1502 case MemBarrierKind::kAnyStore: {
1503 type = BarrierAll;
1504 break;
1505 }
1506 case MemBarrierKind::kLoadAny: {
1507 type = BarrierReads;
1508 break;
1509 }
1510 case MemBarrierKind::kStoreStore: {
1511 type = BarrierWrites;
1512 break;
1513 }
1514 default:
1515 LOG(FATAL) << "Unexpected memory barrier " << kind;
1516 }
1517 __ Dmb(InnerShareable, type);
1518}
1519
Serban Constantinescu02164b32014-11-13 14:05:07 +00001520void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1521 HBasicBlock* successor) {
1522 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001523 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1524 if (slow_path == nullptr) {
1525 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1526 instruction->SetSlowPath(slow_path);
1527 codegen_->AddSlowPath(slow_path);
1528 if (successor != nullptr) {
1529 DCHECK(successor->IsLoopHeader());
1530 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1531 }
1532 } else {
1533 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1534 }
1535
Serban Constantinescu02164b32014-11-13 14:05:07 +00001536 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1537 Register temp = temps.AcquireW();
1538
1539 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1540 if (successor == nullptr) {
1541 __ Cbnz(temp, slow_path->GetEntryLabel());
1542 __ Bind(slow_path->GetReturnLabel());
1543 } else {
1544 __ Cbz(temp, codegen_->GetLabelOf(successor));
1545 __ B(slow_path->GetEntryLabel());
1546 // slow_path will return to GetLabelOf(successor).
1547 }
1548}
1549
Alexandre Rames5319def2014-10-23 10:03:10 +01001550InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1551 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001552 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001553 assembler_(codegen->GetAssembler()),
1554 codegen_(codegen) {}
1555
1556#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001557 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001558
1559#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1560
1561enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001562 // Using a base helps identify when we hit such breakpoints.
1563 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001564#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1565 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1566#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1567};
1568
1569#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001570 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001571 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1572 } \
1573 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1574 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1575 locations->SetOut(Location::Any()); \
1576 }
1577 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1578#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1579
1580#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001581#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001582
Alexandre Rames67555f72014-11-18 10:55:16 +00001583void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001584 DCHECK_EQ(instr->InputCount(), 2U);
1585 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1586 Primitive::Type type = instr->GetResultType();
1587 switch (type) {
1588 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001589 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001590 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001591 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001592 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001593 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001594
1595 case Primitive::kPrimFloat:
1596 case Primitive::kPrimDouble:
1597 locations->SetInAt(0, Location::RequiresFpuRegister());
1598 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001599 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001600 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001601
Alexandre Rames5319def2014-10-23 10:03:10 +01001602 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001603 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001604 }
1605}
1606
Alexandre Rames09a99962015-04-15 11:47:56 +01001607void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001608 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1609
1610 bool object_field_get_with_read_barrier =
1611 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001612 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001613 new (GetGraph()->GetArena()) LocationSummary(instruction,
1614 object_field_get_with_read_barrier ?
1615 LocationSummary::kCallOnSlowPath :
1616 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001617 locations->SetInAt(0, Location::RequiresRegister());
1618 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1619 locations->SetOut(Location::RequiresFpuRegister());
1620 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001621 // The output overlaps for an object field get when read barriers
1622 // are enabled: we do not want the load to overwrite the object's
1623 // location, as we need it to emit the read barrier.
1624 locations->SetOut(
1625 Location::RequiresRegister(),
1626 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001627 }
1628}
1629
1630void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1631 const FieldInfo& field_info) {
1632 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001633 LocationSummary* locations = instruction->GetLocations();
1634 Location base_loc = locations->InAt(0);
1635 Location out = locations->Out();
1636 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001637 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001638 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001639 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001640
Roland Levillain44015862016-01-22 11:47:17 +00001641 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1642 // Object FieldGet with Baker's read barrier case.
1643 MacroAssembler* masm = GetVIXLAssembler();
1644 UseScratchRegisterScope temps(masm);
1645 // /* HeapReference<Object> */ out = *(base + offset)
1646 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1647 Register temp = temps.AcquireW();
1648 // Note that potential implicit null checks are handled in this
1649 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1650 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1651 instruction,
1652 out,
1653 base,
1654 offset,
1655 temp,
1656 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001657 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001658 } else {
1659 // General case.
1660 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001661 // Note that a potential implicit null check is handled in this
1662 // CodeGeneratorARM64::LoadAcquire call.
1663 // NB: LoadAcquire will record the pc info if needed.
1664 codegen_->LoadAcquire(
1665 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001666 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001667 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001668 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001669 }
Roland Levillain44015862016-01-22 11:47:17 +00001670 if (field_type == Primitive::kPrimNot) {
1671 // If read barriers are enabled, emit read barriers other than
1672 // Baker's using a slow path (and also unpoison the loaded
1673 // reference, if heap poisoning is enabled).
1674 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1675 }
Roland Levillain4d027112015-07-01 15:41:14 +01001676 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001677}
1678
1679void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1680 LocationSummary* locations =
1681 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1682 locations->SetInAt(0, Location::RequiresRegister());
1683 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1684 locations->SetInAt(1, Location::RequiresFpuRegister());
1685 } else {
1686 locations->SetInAt(1, Location::RequiresRegister());
1687 }
1688}
1689
1690void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001691 const FieldInfo& field_info,
1692 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001693 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001694 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001695
1696 Register obj = InputRegisterAt(instruction, 0);
1697 CPURegister value = InputCPURegisterAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001698 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001699 Offset offset = field_info.GetFieldOffset();
1700 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001701
Roland Levillain4d027112015-07-01 15:41:14 +01001702 {
1703 // We use a block to end the scratch scope before the write barrier, thus
1704 // freeing the temporary registers so they can be used in `MarkGCCard`.
1705 UseScratchRegisterScope temps(GetVIXLAssembler());
1706
1707 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1708 DCHECK(value.IsW());
1709 Register temp = temps.AcquireW();
1710 __ Mov(temp, value.W());
1711 GetAssembler()->PoisonHeapReference(temp.W());
1712 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001713 }
Roland Levillain4d027112015-07-01 15:41:14 +01001714
1715 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001716 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1717 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001718 } else {
1719 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1720 codegen_->MaybeRecordImplicitNullCheck(instruction);
1721 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001722 }
1723
1724 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001725 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001726 }
1727}
1728
Alexandre Rames67555f72014-11-18 10:55:16 +00001729void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001730 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001731
1732 switch (type) {
1733 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001734 case Primitive::kPrimLong: {
1735 Register dst = OutputRegister(instr);
1736 Register lhs = InputRegisterAt(instr, 0);
1737 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001738 if (instr->IsAdd()) {
1739 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001740 } else if (instr->IsAnd()) {
1741 __ And(dst, lhs, rhs);
1742 } else if (instr->IsOr()) {
1743 __ Orr(dst, lhs, rhs);
1744 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001745 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001746 } else if (instr->IsRor()) {
1747 if (rhs.IsImmediate()) {
1748 uint32_t shift = rhs.immediate() & (lhs.SizeInBits() - 1);
1749 __ Ror(dst, lhs, shift);
1750 } else {
1751 // Ensure shift distance is in the same size register as the result. If
1752 // we are rotating a long and the shift comes in a w register originally,
1753 // we don't need to sxtw for use as an x since the shift distances are
1754 // all & reg_bits - 1.
1755 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1756 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001757 } else {
1758 DCHECK(instr->IsXor());
1759 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001760 }
1761 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001762 }
1763 case Primitive::kPrimFloat:
1764 case Primitive::kPrimDouble: {
1765 FPRegister dst = OutputFPRegister(instr);
1766 FPRegister lhs = InputFPRegisterAt(instr, 0);
1767 FPRegister rhs = InputFPRegisterAt(instr, 1);
1768 if (instr->IsAdd()) {
1769 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001770 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001771 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001772 } else {
1773 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001774 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001775 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001776 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001777 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001778 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001779 }
1780}
1781
Serban Constantinescu02164b32014-11-13 14:05:07 +00001782void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1783 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1784
1785 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1786 Primitive::Type type = instr->GetResultType();
1787 switch (type) {
1788 case Primitive::kPrimInt:
1789 case Primitive::kPrimLong: {
1790 locations->SetInAt(0, Location::RequiresRegister());
1791 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1792 locations->SetOut(Location::RequiresRegister());
1793 break;
1794 }
1795 default:
1796 LOG(FATAL) << "Unexpected shift type " << type;
1797 }
1798}
1799
1800void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1801 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1802
1803 Primitive::Type type = instr->GetType();
1804 switch (type) {
1805 case Primitive::kPrimInt:
1806 case Primitive::kPrimLong: {
1807 Register dst = OutputRegister(instr);
1808 Register lhs = InputRegisterAt(instr, 0);
1809 Operand rhs = InputOperandAt(instr, 1);
1810 if (rhs.IsImmediate()) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001811 uint32_t shift_value = rhs.immediate() &
1812 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001813 if (instr->IsShl()) {
1814 __ Lsl(dst, lhs, shift_value);
1815 } else if (instr->IsShr()) {
1816 __ Asr(dst, lhs, shift_value);
1817 } else {
1818 __ Lsr(dst, lhs, shift_value);
1819 }
1820 } else {
1821 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1822
1823 if (instr->IsShl()) {
1824 __ Lsl(dst, lhs, rhs_reg);
1825 } else if (instr->IsShr()) {
1826 __ Asr(dst, lhs, rhs_reg);
1827 } else {
1828 __ Lsr(dst, lhs, rhs_reg);
1829 }
1830 }
1831 break;
1832 }
1833 default:
1834 LOG(FATAL) << "Unexpected shift operation type " << type;
1835 }
1836}
1837
Alexandre Rames5319def2014-10-23 10:03:10 +01001838void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001839 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001840}
1841
1842void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001843 HandleBinaryOp(instruction);
1844}
1845
1846void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1847 HandleBinaryOp(instruction);
1848}
1849
1850void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1851 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001852}
1853
Artem Serov7fc63502016-02-09 17:15:29 +00001854void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001855 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1856 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1857 locations->SetInAt(0, Location::RequiresRegister());
1858 // There is no immediate variant of negated bitwise instructions in AArch64.
1859 locations->SetInAt(1, Location::RequiresRegister());
1860 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1861}
1862
Artem Serov7fc63502016-02-09 17:15:29 +00001863void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001864 Register dst = OutputRegister(instr);
1865 Register lhs = InputRegisterAt(instr, 0);
1866 Register rhs = InputRegisterAt(instr, 1);
1867
1868 switch (instr->GetOpKind()) {
1869 case HInstruction::kAnd:
1870 __ Bic(dst, lhs, rhs);
1871 break;
1872 case HInstruction::kOr:
1873 __ Orn(dst, lhs, rhs);
1874 break;
1875 case HInstruction::kXor:
1876 __ Eon(dst, lhs, rhs);
1877 break;
1878 default:
1879 LOG(FATAL) << "Unreachable";
1880 }
1881}
1882
Alexandre Rames8626b742015-11-25 16:28:08 +00001883void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1884 HArm64DataProcWithShifterOp* instruction) {
1885 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1886 instruction->GetType() == Primitive::kPrimLong);
1887 LocationSummary* locations =
1888 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1889 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1890 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1891 } else {
1892 locations->SetInAt(0, Location::RequiresRegister());
1893 }
1894 locations->SetInAt(1, Location::RequiresRegister());
1895 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1896}
1897
1898void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1899 HArm64DataProcWithShifterOp* instruction) {
1900 Primitive::Type type = instruction->GetType();
1901 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1902 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1903 Register out = OutputRegister(instruction);
1904 Register left;
1905 if (kind != HInstruction::kNeg) {
1906 left = InputRegisterAt(instruction, 0);
1907 }
1908 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1909 // shifter operand operation, the IR generating `right_reg` (input to the type
1910 // conversion) can have a different type from the current instruction's type,
1911 // so we manually indicate the type.
1912 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001913 int64_t shift_amount = instruction->GetShiftAmount() &
1914 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001915
1916 Operand right_operand(0);
1917
1918 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1919 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1920 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1921 } else {
1922 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1923 }
1924
1925 // Logical binary operations do not support extension operations in the
1926 // operand. Note that VIXL would still manage if it was passed by generating
1927 // the extension as a separate instruction.
1928 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1929 DCHECK(!right_operand.IsExtendedRegister() ||
1930 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1931 kind != HInstruction::kNeg));
1932 switch (kind) {
1933 case HInstruction::kAdd:
1934 __ Add(out, left, right_operand);
1935 break;
1936 case HInstruction::kAnd:
1937 __ And(out, left, right_operand);
1938 break;
1939 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001940 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001941 __ Neg(out, right_operand);
1942 break;
1943 case HInstruction::kOr:
1944 __ Orr(out, left, right_operand);
1945 break;
1946 case HInstruction::kSub:
1947 __ Sub(out, left, right_operand);
1948 break;
1949 case HInstruction::kXor:
1950 __ Eor(out, left, right_operand);
1951 break;
1952 default:
1953 LOG(FATAL) << "Unexpected operation kind: " << kind;
1954 UNREACHABLE();
1955 }
1956}
1957
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001958void LocationsBuilderARM64::VisitArm64IntermediateAddress(HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001959 // The read barrier instrumentation does not support the
1960 // HArm64IntermediateAddress instruction yet.
1961 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001962 LocationSummary* locations =
1963 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1964 locations->SetInAt(0, Location::RequiresRegister());
1965 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1966 locations->SetOut(Location::RequiresRegister());
1967}
1968
1969void InstructionCodeGeneratorARM64::VisitArm64IntermediateAddress(
1970 HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001971 // The read barrier instrumentation does not support the
1972 // HArm64IntermediateAddress instruction yet.
1973 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001974 __ Add(OutputRegister(instruction),
1975 InputRegisterAt(instruction, 0),
1976 Operand(InputOperandAt(instruction, 1)));
1977}
1978
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001979void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001980 LocationSummary* locations =
1981 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001982 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
1983 if (instr->GetOpKind() == HInstruction::kSub &&
1984 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00001985 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001986 // Don't allocate register for Mneg instruction.
1987 } else {
1988 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
1989 Location::RequiresRegister());
1990 }
1991 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
1992 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00001993 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1994}
1995
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001996void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00001997 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03001998 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
1999 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002000
2001 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2002 // This fixup should be carried out for all multiply-accumulate instructions:
2003 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2004 if (instr->GetType() == Primitive::kPrimLong &&
2005 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2006 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
2007 vixl::Instruction* prev = masm->GetCursorAddress<vixl::Instruction*>() - vixl::kInstructionSize;
2008 if (prev->IsLoadOrStore()) {
2009 // Make sure we emit only exactly one nop.
2010 vixl::CodeBufferCheckScope scope(masm,
2011 vixl::kInstructionSize,
2012 vixl::CodeBufferCheckScope::kCheck,
2013 vixl::CodeBufferCheckScope::kExactSize);
2014 __ nop();
2015 }
2016 }
2017
2018 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002019 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002020 __ Madd(res, mul_left, mul_right, accumulator);
2021 } else {
2022 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002023 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002024 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002025 __ Mneg(res, mul_left, mul_right);
2026 } else {
2027 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2028 __ Msub(res, mul_left, mul_right, accumulator);
2029 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002030 }
2031}
2032
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002033void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002034 bool object_array_get_with_read_barrier =
2035 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002036 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002037 new (GetGraph()->GetArena()) LocationSummary(instruction,
2038 object_array_get_with_read_barrier ?
2039 LocationSummary::kCallOnSlowPath :
2040 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002041 locations->SetInAt(0, Location::RequiresRegister());
2042 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002043 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2044 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2045 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002046 // The output overlaps in the case of an object array get with
2047 // read barriers enabled: we do not want the move to overwrite the
2048 // array's location, as we need it to emit the read barrier.
2049 locations->SetOut(
2050 Location::RequiresRegister(),
2051 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002052 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002053}
2054
2055void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002056 Primitive::Type type = instruction->GetType();
2057 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002058 LocationSummary* locations = instruction->GetLocations();
2059 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002060 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002061 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002062
Alexandre Ramesd921d642015-04-16 15:07:16 +01002063 MacroAssembler* masm = GetVIXLAssembler();
2064 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002065 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002066 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002067
Roland Levillain44015862016-01-22 11:47:17 +00002068 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2069 // Object ArrayGet with Baker's read barrier case.
2070 Register temp = temps.AcquireW();
2071 // The read barrier instrumentation does not support the
2072 // HArm64IntermediateAddress instruction yet.
2073 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
2074 // Note that a potential implicit null check is handled in the
2075 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2076 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2077 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002078 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002079 // General case.
2080 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002081 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002082 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2083 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002084 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002085 Register temp = temps.AcquireSameSizeAs(obj);
2086 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
2087 // The read barrier instrumentation does not support the
2088 // HArm64IntermediateAddress instruction yet.
2089 DCHECK(!kEmitCompilerReadBarrier);
2090 // We do not need to compute the intermediate address from the array: the
2091 // input instruction has done it already. See the comment in
2092 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2093 if (kIsDebugBuild) {
2094 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2095 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2096 }
2097 temp = obj;
2098 } else {
2099 __ Add(temp, obj, offset);
2100 }
2101 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2102 }
2103
2104 codegen_->Load(type, OutputCPURegister(instruction), source);
2105 codegen_->MaybeRecordImplicitNullCheck(instruction);
2106
2107 if (type == Primitive::kPrimNot) {
2108 static_assert(
2109 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2110 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2111 Location obj_loc = locations->InAt(0);
2112 if (index.IsConstant()) {
2113 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2114 } else {
2115 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2116 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002117 }
Roland Levillain4d027112015-07-01 15:41:14 +01002118 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002119}
2120
Alexandre Rames5319def2014-10-23 10:03:10 +01002121void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2122 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2123 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002124 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002125}
2126
2127void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002128 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002129 BlockPoolsScope block_pools(GetVIXLAssembler());
Vladimir Markodce016e2016-04-28 13:10:02 +01002130 __ Ldr(OutputRegister(instruction), HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002131 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002132}
2133
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002134void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002135 Primitive::Type value_type = instruction->GetComponentType();
2136
2137 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2138 bool object_array_set_with_read_barrier =
2139 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002140 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2141 instruction,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002142 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
2143 LocationSummary::kCallOnSlowPath :
2144 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002145 locations->SetInAt(0, Location::RequiresRegister());
2146 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002147 if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002148 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002149 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002150 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002151 }
2152}
2153
2154void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2155 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002156 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002157 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002158 bool needs_write_barrier =
2159 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002160
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002161 Register array = InputRegisterAt(instruction, 0);
2162 CPURegister value = InputCPURegisterAt(instruction, 2);
2163 CPURegister source = value;
2164 Location index = locations->InAt(1);
2165 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2166 MemOperand destination = HeapOperand(array);
2167 MacroAssembler* masm = GetVIXLAssembler();
2168 BlockPoolsScope block_pools(masm);
2169
2170 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002171 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002172 if (index.IsConstant()) {
2173 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2174 destination = HeapOperand(array, offset);
2175 } else {
2176 UseScratchRegisterScope temps(masm);
2177 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002178 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00002179 // The read barrier instrumentation does not support the
2180 // HArm64IntermediateAddress instruction yet.
2181 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002182 // We do not need to compute the intermediate address from the array: the
2183 // input instruction has done it already. See the comment in
2184 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2185 if (kIsDebugBuild) {
2186 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2187 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2188 }
2189 temp = array;
2190 } else {
2191 __ Add(temp, array, offset);
2192 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002193 destination = HeapOperand(temp,
2194 XRegisterFrom(index),
2195 LSL,
2196 Primitive::ComponentSizeShift(value_type));
2197 }
2198 codegen_->Store(value_type, value, destination);
2199 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002200 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002201 DCHECK(needs_write_barrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002202 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002203 vixl::Label done;
2204 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002205 {
2206 // We use a block to end the scratch scope before the write barrier, thus
2207 // freeing the temporary registers so they can be used in `MarkGCCard`.
2208 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002209 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002210 if (index.IsConstant()) {
2211 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002212 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002213 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002214 destination = HeapOperand(temp,
2215 XRegisterFrom(index),
2216 LSL,
2217 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002218 }
2219
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002220 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2221 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2222 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2223
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002224 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002225 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2226 codegen_->AddSlowPath(slow_path);
2227 if (instruction->GetValueCanBeNull()) {
2228 vixl::Label non_zero;
2229 __ Cbnz(Register(value), &non_zero);
2230 if (!index.IsConstant()) {
2231 __ Add(temp, array, offset);
2232 }
2233 __ Str(wzr, destination);
2234 codegen_->MaybeRecordImplicitNullCheck(instruction);
2235 __ B(&done);
2236 __ Bind(&non_zero);
2237 }
2238
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002239 if (kEmitCompilerReadBarrier) {
2240 // When read barriers are enabled, the type checking
2241 // instrumentation requires two read barriers:
2242 //
2243 // __ Mov(temp2, temp);
2244 // // /* HeapReference<Class> */ temp = temp->component_type_
2245 // __ Ldr(temp, HeapOperand(temp, component_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002246 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002247 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2248 //
2249 // // /* HeapReference<Class> */ temp2 = value->klass_
2250 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002251 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002252 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2253 //
2254 // __ Cmp(temp, temp2);
2255 //
2256 // However, the second read barrier may trash `temp`, as it
2257 // is a temporary register, and as such would not be saved
2258 // along with live registers before calling the runtime (nor
2259 // restored afterwards). So in this case, we bail out and
2260 // delegate the work to the array set slow path.
2261 //
2262 // TODO: Extend the register allocator to support a new
2263 // "(locally) live temp" location so as to avoid always
2264 // going into the slow path when read barriers are enabled.
2265 __ B(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002266 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002267 Register temp2 = temps.AcquireSameSizeAs(array);
2268 // /* HeapReference<Class> */ temp = array->klass_
2269 __ Ldr(temp, HeapOperand(array, class_offset));
2270 codegen_->MaybeRecordImplicitNullCheck(instruction);
2271 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2272
2273 // /* HeapReference<Class> */ temp = temp->component_type_
2274 __ Ldr(temp, HeapOperand(temp, component_offset));
2275 // /* HeapReference<Class> */ temp2 = value->klass_
2276 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2277 // If heap poisoning is enabled, no need to unpoison `temp`
2278 // nor `temp2`, as we are comparing two poisoned references.
2279 __ Cmp(temp, temp2);
2280
2281 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2282 vixl::Label do_put;
2283 __ B(eq, &do_put);
2284 // If heap poisoning is enabled, the `temp` reference has
2285 // not been unpoisoned yet; unpoison it now.
2286 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2287
2288 // /* HeapReference<Class> */ temp = temp->super_class_
2289 __ Ldr(temp, HeapOperand(temp, super_offset));
2290 // If heap poisoning is enabled, no need to unpoison
2291 // `temp`, as we are comparing against null below.
2292 __ Cbnz(temp, slow_path->GetEntryLabel());
2293 __ Bind(&do_put);
2294 } else {
2295 __ B(ne, slow_path->GetEntryLabel());
2296 }
2297 temps.Release(temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002298 }
2299 }
2300
2301 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002302 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002303 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002304 __ Mov(temp2, value.W());
2305 GetAssembler()->PoisonHeapReference(temp2);
2306 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002307 }
2308
2309 if (!index.IsConstant()) {
2310 __ Add(temp, array, offset);
2311 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002312 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002313
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002314 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002315 codegen_->MaybeRecordImplicitNullCheck(instruction);
2316 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002317 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002318
2319 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2320
2321 if (done.IsLinked()) {
2322 __ Bind(&done);
2323 }
2324
2325 if (slow_path != nullptr) {
2326 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002327 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002328 }
2329}
2330
Alexandre Rames67555f72014-11-18 10:55:16 +00002331void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002332 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2333 ? LocationSummary::kCallOnSlowPath
2334 : LocationSummary::kNoCall;
2335 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002336 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002337 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002338 if (instruction->HasUses()) {
2339 locations->SetOut(Location::SameAsFirstInput());
2340 }
2341}
2342
2343void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002344 BoundsCheckSlowPathARM64* slow_path =
2345 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002346 codegen_->AddSlowPath(slow_path);
2347
2348 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2349 __ B(slow_path->GetEntryLabel(), hs);
2350}
2351
Alexandre Rames67555f72014-11-18 10:55:16 +00002352void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2353 LocationSummary* locations =
2354 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2355 locations->SetInAt(0, Location::RequiresRegister());
2356 if (check->HasUses()) {
2357 locations->SetOut(Location::SameAsFirstInput());
2358 }
2359}
2360
2361void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2362 // We assume the class is not null.
2363 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2364 check->GetLoadClass(), check, check->GetDexPc(), true);
2365 codegen_->AddSlowPath(slow_path);
2366 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2367}
2368
Roland Levillain1a653882016-03-18 18:05:57 +00002369static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2370 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2371 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2372}
2373
2374void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2375 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2376 Location rhs_loc = instruction->GetLocations()->InAt(1);
2377 if (rhs_loc.IsConstant()) {
2378 // 0.0 is the only immediate that can be encoded directly in
2379 // an FCMP instruction.
2380 //
2381 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2382 // specify that in a floating-point comparison, positive zero
2383 // and negative zero are considered equal, so we can use the
2384 // literal 0.0 for both cases here.
2385 //
2386 // Note however that some methods (Float.equal, Float.compare,
2387 // Float.compareTo, Double.equal, Double.compare,
2388 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2389 // StrictMath.min) consider 0.0 to be (strictly) greater than
2390 // -0.0. So if we ever translate calls to these methods into a
2391 // HCompare instruction, we must handle the -0.0 case with
2392 // care here.
2393 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2394 __ Fcmp(lhs_reg, 0.0);
2395 } else {
2396 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2397 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002398}
2399
Serban Constantinescu02164b32014-11-13 14:05:07 +00002400void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002401 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002402 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2403 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002404 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002405 case Primitive::kPrimBoolean:
2406 case Primitive::kPrimByte:
2407 case Primitive::kPrimShort:
2408 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002409 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002410 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002411 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002412 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002413 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2414 break;
2415 }
2416 case Primitive::kPrimFloat:
2417 case Primitive::kPrimDouble: {
2418 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002419 locations->SetInAt(1,
2420 IsFloatingPointZeroConstant(compare->InputAt(1))
2421 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2422 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002423 locations->SetOut(Location::RequiresRegister());
2424 break;
2425 }
2426 default:
2427 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2428 }
2429}
2430
2431void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2432 Primitive::Type in_type = compare->InputAt(0)->GetType();
2433
2434 // 0 if: left == right
2435 // 1 if: left > right
2436 // -1 if: left < right
2437 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002438 case Primitive::kPrimBoolean:
2439 case Primitive::kPrimByte:
2440 case Primitive::kPrimShort:
2441 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002442 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002443 case Primitive::kPrimLong: {
2444 Register result = OutputRegister(compare);
2445 Register left = InputRegisterAt(compare, 0);
2446 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002447 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002448 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2449 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002450 break;
2451 }
2452 case Primitive::kPrimFloat:
2453 case Primitive::kPrimDouble: {
2454 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002455 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002456 __ Cset(result, ne);
2457 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002458 break;
2459 }
2460 default:
2461 LOG(FATAL) << "Unimplemented compare type " << in_type;
2462 }
2463}
2464
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002465void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002466 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002467
2468 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2469 locations->SetInAt(0, Location::RequiresFpuRegister());
2470 locations->SetInAt(1,
2471 IsFloatingPointZeroConstant(instruction->InputAt(1))
2472 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2473 : Location::RequiresFpuRegister());
2474 } else {
2475 // Integer cases.
2476 locations->SetInAt(0, Location::RequiresRegister());
2477 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2478 }
2479
David Brazdilb3e773e2016-01-26 11:28:37 +00002480 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002481 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002482 }
2483}
2484
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002485void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002486 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002487 return;
2488 }
2489
2490 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002491 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002492 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002493
Roland Levillain7f63c522015-07-13 15:54:55 +00002494 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002495 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002496 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002497 } else {
2498 // Integer cases.
2499 Register lhs = InputRegisterAt(instruction, 0);
2500 Operand rhs = InputOperandAt(instruction, 1);
2501 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002502 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002503 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002504}
2505
2506#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2507 M(Equal) \
2508 M(NotEqual) \
2509 M(LessThan) \
2510 M(LessThanOrEqual) \
2511 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002512 M(GreaterThanOrEqual) \
2513 M(Below) \
2514 M(BelowOrEqual) \
2515 M(Above) \
2516 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002517#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002518void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2519void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002520FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002521#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002522#undef FOR_EACH_CONDITION_INSTRUCTION
2523
Zheng Xuc6667102015-05-15 16:08:45 +08002524void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2525 DCHECK(instruction->IsDiv() || instruction->IsRem());
2526
2527 LocationSummary* locations = instruction->GetLocations();
2528 Location second = locations->InAt(1);
2529 DCHECK(second.IsConstant());
2530
2531 Register out = OutputRegister(instruction);
2532 Register dividend = InputRegisterAt(instruction, 0);
2533 int64_t imm = Int64FromConstant(second.GetConstant());
2534 DCHECK(imm == 1 || imm == -1);
2535
2536 if (instruction->IsRem()) {
2537 __ Mov(out, 0);
2538 } else {
2539 if (imm == 1) {
2540 __ Mov(out, dividend);
2541 } else {
2542 __ Neg(out, dividend);
2543 }
2544 }
2545}
2546
2547void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2548 DCHECK(instruction->IsDiv() || instruction->IsRem());
2549
2550 LocationSummary* locations = instruction->GetLocations();
2551 Location second = locations->InAt(1);
2552 DCHECK(second.IsConstant());
2553
2554 Register out = OutputRegister(instruction);
2555 Register dividend = InputRegisterAt(instruction, 0);
2556 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002557 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002558 int ctz_imm = CTZ(abs_imm);
2559
2560 UseScratchRegisterScope temps(GetVIXLAssembler());
2561 Register temp = temps.AcquireSameSizeAs(out);
2562
2563 if (instruction->IsDiv()) {
2564 __ Add(temp, dividend, abs_imm - 1);
2565 __ Cmp(dividend, 0);
2566 __ Csel(out, temp, dividend, lt);
2567 if (imm > 0) {
2568 __ Asr(out, out, ctz_imm);
2569 } else {
2570 __ Neg(out, Operand(out, ASR, ctz_imm));
2571 }
2572 } else {
2573 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2574 __ Asr(temp, dividend, bits - 1);
2575 __ Lsr(temp, temp, bits - ctz_imm);
2576 __ Add(out, dividend, temp);
2577 __ And(out, out, abs_imm - 1);
2578 __ Sub(out, out, temp);
2579 }
2580}
2581
2582void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2583 DCHECK(instruction->IsDiv() || instruction->IsRem());
2584
2585 LocationSummary* locations = instruction->GetLocations();
2586 Location second = locations->InAt(1);
2587 DCHECK(second.IsConstant());
2588
2589 Register out = OutputRegister(instruction);
2590 Register dividend = InputRegisterAt(instruction, 0);
2591 int64_t imm = Int64FromConstant(second.GetConstant());
2592
2593 Primitive::Type type = instruction->GetResultType();
2594 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2595
2596 int64_t magic;
2597 int shift;
2598 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2599
2600 UseScratchRegisterScope temps(GetVIXLAssembler());
2601 Register temp = temps.AcquireSameSizeAs(out);
2602
2603 // temp = get_high(dividend * magic)
2604 __ Mov(temp, magic);
2605 if (type == Primitive::kPrimLong) {
2606 __ Smulh(temp, dividend, temp);
2607 } else {
2608 __ Smull(temp.X(), dividend, temp);
2609 __ Lsr(temp.X(), temp.X(), 32);
2610 }
2611
2612 if (imm > 0 && magic < 0) {
2613 __ Add(temp, temp, dividend);
2614 } else if (imm < 0 && magic > 0) {
2615 __ Sub(temp, temp, dividend);
2616 }
2617
2618 if (shift != 0) {
2619 __ Asr(temp, temp, shift);
2620 }
2621
2622 if (instruction->IsDiv()) {
2623 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2624 } else {
2625 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2626 // TODO: Strength reduction for msub.
2627 Register temp_imm = temps.AcquireSameSizeAs(out);
2628 __ Mov(temp_imm, imm);
2629 __ Msub(out, temp, temp_imm, dividend);
2630 }
2631}
2632
2633void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2634 DCHECK(instruction->IsDiv() || instruction->IsRem());
2635 Primitive::Type type = instruction->GetResultType();
2636 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2637
2638 LocationSummary* locations = instruction->GetLocations();
2639 Register out = OutputRegister(instruction);
2640 Location second = locations->InAt(1);
2641
2642 if (second.IsConstant()) {
2643 int64_t imm = Int64FromConstant(second.GetConstant());
2644
2645 if (imm == 0) {
2646 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2647 } else if (imm == 1 || imm == -1) {
2648 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002649 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002650 DivRemByPowerOfTwo(instruction);
2651 } else {
2652 DCHECK(imm <= -2 || imm >= 2);
2653 GenerateDivRemWithAnyConstant(instruction);
2654 }
2655 } else {
2656 Register dividend = InputRegisterAt(instruction, 0);
2657 Register divisor = InputRegisterAt(instruction, 1);
2658 if (instruction->IsDiv()) {
2659 __ Sdiv(out, dividend, divisor);
2660 } else {
2661 UseScratchRegisterScope temps(GetVIXLAssembler());
2662 Register temp = temps.AcquireSameSizeAs(out);
2663 __ Sdiv(temp, dividend, divisor);
2664 __ Msub(out, temp, divisor, dividend);
2665 }
2666 }
2667}
2668
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002669void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2670 LocationSummary* locations =
2671 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2672 switch (div->GetResultType()) {
2673 case Primitive::kPrimInt:
2674 case Primitive::kPrimLong:
2675 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002676 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002677 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2678 break;
2679
2680 case Primitive::kPrimFloat:
2681 case Primitive::kPrimDouble:
2682 locations->SetInAt(0, Location::RequiresFpuRegister());
2683 locations->SetInAt(1, Location::RequiresFpuRegister());
2684 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2685 break;
2686
2687 default:
2688 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2689 }
2690}
2691
2692void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2693 Primitive::Type type = div->GetResultType();
2694 switch (type) {
2695 case Primitive::kPrimInt:
2696 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002697 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002698 break;
2699
2700 case Primitive::kPrimFloat:
2701 case Primitive::kPrimDouble:
2702 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2703 break;
2704
2705 default:
2706 LOG(FATAL) << "Unexpected div type " << type;
2707 }
2708}
2709
Alexandre Rames67555f72014-11-18 10:55:16 +00002710void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002711 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2712 ? LocationSummary::kCallOnSlowPath
2713 : LocationSummary::kNoCall;
2714 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002715 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2716 if (instruction->HasUses()) {
2717 locations->SetOut(Location::SameAsFirstInput());
2718 }
2719}
2720
2721void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2722 SlowPathCodeARM64* slow_path =
2723 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2724 codegen_->AddSlowPath(slow_path);
2725 Location value = instruction->GetLocations()->InAt(0);
2726
Alexandre Rames3e69f162014-12-10 10:36:50 +00002727 Primitive::Type type = instruction->GetType();
2728
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002729 if (!Primitive::IsIntegralType(type)) {
2730 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002731 return;
2732 }
2733
Alexandre Rames67555f72014-11-18 10:55:16 +00002734 if (value.IsConstant()) {
2735 int64_t divisor = Int64ConstantFrom(value);
2736 if (divisor == 0) {
2737 __ B(slow_path->GetEntryLabel());
2738 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002739 // A division by a non-null constant is valid. We don't need to perform
2740 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002741 }
2742 } else {
2743 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2744 }
2745}
2746
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002747void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2748 LocationSummary* locations =
2749 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2750 locations->SetOut(Location::ConstantLocation(constant));
2751}
2752
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002753void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2754 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002755 // Will be generated at use site.
2756}
2757
Alexandre Rames5319def2014-10-23 10:03:10 +01002758void LocationsBuilderARM64::VisitExit(HExit* exit) {
2759 exit->SetLocations(nullptr);
2760}
2761
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002762void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002763}
2764
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002765void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2766 LocationSummary* locations =
2767 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2768 locations->SetOut(Location::ConstantLocation(constant));
2769}
2770
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002771void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002772 // Will be generated at use site.
2773}
2774
David Brazdilfc6a86a2015-06-26 10:33:45 +00002775void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002776 DCHECK(!successor->IsExitBlock());
2777 HBasicBlock* block = got->GetBlock();
2778 HInstruction* previous = got->GetPrevious();
2779 HLoopInformation* info = block->GetLoopInformation();
2780
David Brazdil46e2a392015-03-16 17:31:52 +00002781 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002782 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2783 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2784 return;
2785 }
2786 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2787 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2788 }
2789 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002790 __ B(codegen_->GetLabelOf(successor));
2791 }
2792}
2793
David Brazdilfc6a86a2015-06-26 10:33:45 +00002794void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2795 got->SetLocations(nullptr);
2796}
2797
2798void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2799 HandleGoto(got, got->GetSuccessor());
2800}
2801
2802void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2803 try_boundary->SetLocations(nullptr);
2804}
2805
2806void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2807 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2808 if (!successor->IsExitBlock()) {
2809 HandleGoto(try_boundary, successor);
2810 }
2811}
2812
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002813void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002814 size_t condition_input_index,
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002815 vixl::Label* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00002816 vixl::Label* false_target) {
2817 // FP branching requires both targets to be explicit. If either of the targets
2818 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
2819 vixl::Label fallthrough_target;
2820 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002821
David Brazdil0debae72015-11-12 18:37:00 +00002822 if (true_target == nullptr && false_target == nullptr) {
2823 // Nothing to do. The code always falls through.
2824 return;
2825 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002826 // Constant condition, statically compared against "true" (integer value 1).
2827 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002828 if (true_target != nullptr) {
2829 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002830 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002831 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002832 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002833 if (false_target != nullptr) {
2834 __ B(false_target);
2835 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002836 }
David Brazdil0debae72015-11-12 18:37:00 +00002837 return;
2838 }
2839
2840 // The following code generates these patterns:
2841 // (1) true_target == nullptr && false_target != nullptr
2842 // - opposite condition true => branch to false_target
2843 // (2) true_target != nullptr && false_target == nullptr
2844 // - condition true => branch to true_target
2845 // (3) true_target != nullptr && false_target != nullptr
2846 // - condition true => branch to true_target
2847 // - branch to false_target
2848 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002849 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002850 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002851 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002852 if (true_target == nullptr) {
2853 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2854 } else {
2855 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2856 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002857 } else {
2858 // The condition instruction has not been materialized, use its inputs as
2859 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002860 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002861
David Brazdil0debae72015-11-12 18:37:00 +00002862 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002863 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002864 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002865 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002866 IfCondition opposite_condition = condition->GetOppositeCondition();
2867 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002868 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002869 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002870 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002871 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002872 // Integer cases.
2873 Register lhs = InputRegisterAt(condition, 0);
2874 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002875
2876 Condition arm64_cond;
2877 vixl::Label* non_fallthrough_target;
2878 if (true_target == nullptr) {
2879 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2880 non_fallthrough_target = false_target;
2881 } else {
2882 arm64_cond = ARM64Condition(condition->GetCondition());
2883 non_fallthrough_target = true_target;
2884 }
2885
Aart Bik086d27e2016-01-20 17:02:00 -08002886 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
2887 rhs.IsImmediate() && (rhs.immediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002888 switch (arm64_cond) {
2889 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002890 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002891 break;
2892 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002893 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002894 break;
2895 case lt:
2896 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002897 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002898 break;
2899 case ge:
2900 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002901 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002902 break;
2903 default:
2904 // Without the `static_cast` the compiler throws an error for
2905 // `-Werror=sign-promo`.
2906 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2907 }
2908 } else {
2909 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002910 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002911 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002912 }
2913 }
David Brazdil0debae72015-11-12 18:37:00 +00002914
2915 // If neither branch falls through (case 3), the conditional branch to `true_target`
2916 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2917 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002918 __ B(false_target);
2919 }
David Brazdil0debae72015-11-12 18:37:00 +00002920
2921 if (fallthrough_target.IsLinked()) {
2922 __ Bind(&fallthrough_target);
2923 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002924}
2925
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002926void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2927 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002928 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002929 locations->SetInAt(0, Location::RequiresRegister());
2930 }
2931}
2932
2933void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002934 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2935 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
2936 vixl::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
2937 nullptr : codegen_->GetLabelOf(true_successor);
2938 vixl::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
2939 nullptr : codegen_->GetLabelOf(false_successor);
2940 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002941}
2942
2943void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2944 LocationSummary* locations = new (GetGraph()->GetArena())
2945 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00002946 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002947 locations->SetInAt(0, Location::RequiresRegister());
2948 }
2949}
2950
2951void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002952 SlowPathCodeARM64* slow_path =
2953 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002954 GenerateTestAndBranch(deoptimize,
2955 /* condition_input_index */ 0,
2956 slow_path->GetEntryLabel(),
2957 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002958}
2959
David Brazdilc0b601b2016-02-08 14:20:45 +00002960static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
2961 return condition->IsCondition() &&
2962 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
2963}
2964
Alexandre Rames880f1192016-06-13 16:04:50 +01002965static inline Condition GetConditionForSelect(HCondition* condition) {
2966 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00002967 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
2968 : ARM64Condition(cond);
2969}
2970
David Brazdil74eb1b22015-12-14 11:44:01 +00002971void LocationsBuilderARM64::VisitSelect(HSelect* select) {
2972 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01002973 if (Primitive::IsFloatingPointType(select->GetType())) {
2974 locations->SetInAt(0, Location::RequiresFpuRegister());
2975 locations->SetInAt(1, Location::RequiresFpuRegister());
2976 locations->SetOut(Location::RequiresFpuRegister());
2977 } else {
2978 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
2979 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
2980 bool is_true_value_constant = cst_true_value != nullptr;
2981 bool is_false_value_constant = cst_false_value != nullptr;
2982 // Ask VIXL whether we should synthesize constants in registers.
2983 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
2984 Operand true_op = is_true_value_constant ?
2985 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
2986 Operand false_op = is_false_value_constant ?
2987 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
2988 bool true_value_in_register = false;
2989 bool false_value_in_register = false;
2990 MacroAssembler::GetCselSynthesisInformation(
2991 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
2992 true_value_in_register |= !is_true_value_constant;
2993 false_value_in_register |= !is_false_value_constant;
2994
2995 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
2996 : Location::ConstantLocation(cst_true_value));
2997 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
2998 : Location::ConstantLocation(cst_false_value));
2999 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003000 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003001
David Brazdil74eb1b22015-12-14 11:44:01 +00003002 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3003 locations->SetInAt(2, Location::RequiresRegister());
3004 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003005}
3006
3007void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003008 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003009 Condition csel_cond;
3010
3011 if (IsBooleanValueOrMaterializedCondition(cond)) {
3012 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003013 // Use the condition flags set by the previous instruction.
3014 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003015 } else {
3016 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003017 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003018 }
3019 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003020 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003021 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003022 } else {
3023 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003024 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003025 }
3026
Alexandre Rames880f1192016-06-13 16:04:50 +01003027 if (Primitive::IsFloatingPointType(select->GetType())) {
3028 __ Fcsel(OutputFPRegister(select),
3029 InputFPRegisterAt(select, 1),
3030 InputFPRegisterAt(select, 0),
3031 csel_cond);
3032 } else {
3033 __ Csel(OutputRegister(select),
3034 InputOperandAt(select, 1),
3035 InputOperandAt(select, 0),
3036 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003037 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003038}
3039
David Srbecky0cf44932015-12-09 14:09:59 +00003040void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3041 new (GetGraph()->GetArena()) LocationSummary(info);
3042}
3043
David Srbeckyd28f4a02016-03-14 17:14:24 +00003044void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3045 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003046}
3047
3048void CodeGeneratorARM64::GenerateNop() {
3049 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003050}
3051
Alexandre Rames5319def2014-10-23 10:03:10 +01003052void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003053 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003054}
3055
3056void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003057 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003058}
3059
3060void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003061 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003062}
3063
3064void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003065 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003066}
3067
Roland Levillain44015862016-01-22 11:47:17 +00003068static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3069 return kEmitCompilerReadBarrier &&
3070 (kUseBakerReadBarrier ||
3071 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3072 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3073 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3074}
3075
Alexandre Rames67555f72014-11-18 10:55:16 +00003076void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003077 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003078 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3079 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003080 case TypeCheckKind::kExactCheck:
3081 case TypeCheckKind::kAbstractClassCheck:
3082 case TypeCheckKind::kClassHierarchyCheck:
3083 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003084 call_kind =
3085 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003086 break;
3087 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003088 case TypeCheckKind::kUnresolvedCheck:
3089 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003090 call_kind = LocationSummary::kCallOnSlowPath;
3091 break;
3092 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003093
Alexandre Rames67555f72014-11-18 10:55:16 +00003094 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003095 locations->SetInAt(0, Location::RequiresRegister());
3096 locations->SetInAt(1, Location::RequiresRegister());
3097 // The "out" register is used as a temporary, so it overlaps with the inputs.
3098 // Note that TypeCheckSlowPathARM64 uses this register too.
3099 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3100 // When read barriers are enabled, we need a temporary register for
3101 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003102 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003103 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003104 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003105}
3106
3107void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003108 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003109 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003110 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003111 Register obj = InputRegisterAt(instruction, 0);
3112 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003113 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003114 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003115 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3116 locations->GetTemp(0) :
3117 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003118 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3119 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3120 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3121 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003122
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003123 vixl::Label done, zero;
3124 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003125
3126 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003127 // Avoid null check if we know `obj` is not null.
3128 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003129 __ Cbz(obj, &zero);
3130 }
3131
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003132 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003133 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003134
Roland Levillain44015862016-01-22 11:47:17 +00003135 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003136 case TypeCheckKind::kExactCheck: {
3137 __ Cmp(out, cls);
3138 __ Cset(out, eq);
3139 if (zero.IsLinked()) {
3140 __ B(&done);
3141 }
3142 break;
3143 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003144
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003145 case TypeCheckKind::kAbstractClassCheck: {
3146 // If the class is abstract, we eagerly fetch the super class of the
3147 // object to avoid doing a comparison we know will fail.
3148 vixl::Label loop, success;
3149 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003150 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003151 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003152 // If `out` is null, we use it for the result, and jump to `done`.
3153 __ Cbz(out, &done);
3154 __ Cmp(out, cls);
3155 __ B(ne, &loop);
3156 __ Mov(out, 1);
3157 if (zero.IsLinked()) {
3158 __ B(&done);
3159 }
3160 break;
3161 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003162
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003163 case TypeCheckKind::kClassHierarchyCheck: {
3164 // Walk over the class hierarchy to find a match.
3165 vixl::Label loop, success;
3166 __ Bind(&loop);
3167 __ Cmp(out, cls);
3168 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003169 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003170 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003171 __ Cbnz(out, &loop);
3172 // If `out` is null, we use it for the result, and jump to `done`.
3173 __ B(&done);
3174 __ Bind(&success);
3175 __ Mov(out, 1);
3176 if (zero.IsLinked()) {
3177 __ B(&done);
3178 }
3179 break;
3180 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003181
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003182 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003183 // Do an exact check.
3184 vixl::Label exact_check;
3185 __ Cmp(out, cls);
3186 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003187 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003188 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003189 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003190 // If `out` is null, we use it for the result, and jump to `done`.
3191 __ Cbz(out, &done);
3192 __ Ldrh(out, HeapOperand(out, primitive_offset));
3193 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3194 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003195 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003196 __ Mov(out, 1);
3197 __ B(&done);
3198 break;
3199 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003200
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003201 case TypeCheckKind::kArrayCheck: {
3202 __ Cmp(out, cls);
3203 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003204 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3205 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003206 codegen_->AddSlowPath(slow_path);
3207 __ B(ne, slow_path->GetEntryLabel());
3208 __ Mov(out, 1);
3209 if (zero.IsLinked()) {
3210 __ B(&done);
3211 }
3212 break;
3213 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003214
Calin Juravle98893e12015-10-02 21:05:03 +01003215 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003216 case TypeCheckKind::kInterfaceCheck: {
3217 // Note that we indeed only call on slow path, but we always go
3218 // into the slow path for the unresolved and interface check
3219 // cases.
3220 //
3221 // We cannot directly call the InstanceofNonTrivial runtime
3222 // entry point without resorting to a type checking slow path
3223 // here (i.e. by calling InvokeRuntime directly), as it would
3224 // require to assign fixed registers for the inputs of this
3225 // HInstanceOf instruction (following the runtime calling
3226 // convention), which might be cluttered by the potential first
3227 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003228 //
3229 // TODO: Introduce a new runtime entry point taking the object
3230 // to test (instead of its class) as argument, and let it deal
3231 // with the read barrier issues. This will let us refactor this
3232 // case of the `switch` code as it was previously (with a direct
3233 // call to the runtime not using a type checking slow path).
3234 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003235 DCHECK(locations->OnlyCallsOnSlowPath());
3236 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3237 /* is_fatal */ false);
3238 codegen_->AddSlowPath(slow_path);
3239 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003240 if (zero.IsLinked()) {
3241 __ B(&done);
3242 }
3243 break;
3244 }
3245 }
3246
3247 if (zero.IsLinked()) {
3248 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003249 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003250 }
3251
3252 if (done.IsLinked()) {
3253 __ Bind(&done);
3254 }
3255
3256 if (slow_path != nullptr) {
3257 __ Bind(slow_path->GetExitLabel());
3258 }
3259}
3260
3261void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3262 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3263 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3264
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003265 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3266 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003267 case TypeCheckKind::kExactCheck:
3268 case TypeCheckKind::kAbstractClassCheck:
3269 case TypeCheckKind::kClassHierarchyCheck:
3270 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003271 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3272 LocationSummary::kCallOnSlowPath :
3273 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003274 break;
3275 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003276 case TypeCheckKind::kUnresolvedCheck:
3277 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003278 call_kind = LocationSummary::kCallOnSlowPath;
3279 break;
3280 }
3281
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003282 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3283 locations->SetInAt(0, Location::RequiresRegister());
3284 locations->SetInAt(1, Location::RequiresRegister());
3285 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3286 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003287 // When read barriers are enabled, we need an additional temporary
3288 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003289 if (TypeCheckNeedsATemporary(type_check_kind)) {
3290 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003291 }
3292}
3293
3294void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003295 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003296 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003297 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003298 Register obj = InputRegisterAt(instruction, 0);
3299 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003300 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003301 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3302 locations->GetTemp(1) :
3303 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003304 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003305 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3306 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3307 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3308 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003309
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003310 bool is_type_check_slow_path_fatal =
3311 (type_check_kind == TypeCheckKind::kExactCheck ||
3312 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3313 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3314 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3315 !instruction->CanThrowIntoCatchBlock();
3316 SlowPathCodeARM64* type_check_slow_path =
3317 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3318 is_type_check_slow_path_fatal);
3319 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003320
3321 vixl::Label done;
3322 // Avoid null check if we know obj is not null.
3323 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003324 __ Cbz(obj, &done);
3325 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003326
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003327 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003328 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003329
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003330 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003331 case TypeCheckKind::kExactCheck:
3332 case TypeCheckKind::kArrayCheck: {
3333 __ Cmp(temp, cls);
3334 // Jump to slow path for throwing the exception or doing a
3335 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003336 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003337 break;
3338 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003339
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003340 case TypeCheckKind::kAbstractClassCheck: {
3341 // If the class is abstract, we eagerly fetch the super class of the
3342 // object to avoid doing a comparison we know will fail.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003343 vixl::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003344 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003345 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003346 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003347
3348 // If the class reference currently in `temp` is not null, jump
3349 // to the `compare_classes` label to compare it with the checked
3350 // class.
3351 __ Cbnz(temp, &compare_classes);
3352 // Otherwise, jump to the slow path to throw the exception.
3353 //
3354 // But before, move back the object's class into `temp` before
3355 // going into the slow path, as it has been overwritten in the
3356 // meantime.
3357 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003358 GenerateReferenceLoadTwoRegisters(
3359 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003360 __ B(type_check_slow_path->GetEntryLabel());
3361
3362 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003363 __ Cmp(temp, cls);
3364 __ B(ne, &loop);
3365 break;
3366 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003367
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003368 case TypeCheckKind::kClassHierarchyCheck: {
3369 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003370 vixl::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003371 __ Bind(&loop);
3372 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003373 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003374
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003375 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003376 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003377
3378 // If the class reference currently in `temp` is not null, jump
3379 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003380 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003381 // Otherwise, jump to the slow path to throw the exception.
3382 //
3383 // But before, move back the object's class into `temp` before
3384 // going into the slow path, as it has been overwritten in the
3385 // meantime.
3386 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003387 GenerateReferenceLoadTwoRegisters(
3388 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003389 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003390 break;
3391 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003392
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003393 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003394 // Do an exact check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003395 vixl::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003396 __ Cmp(temp, cls);
3397 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003398
3399 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003400 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003401 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003402
3403 // If the component type is not null (i.e. the object is indeed
3404 // an array), jump to label `check_non_primitive_component_type`
3405 // to further check that this component type is not a primitive
3406 // type.
3407 __ Cbnz(temp, &check_non_primitive_component_type);
3408 // Otherwise, jump to the slow path to throw the exception.
3409 //
3410 // But before, move back the object's class into `temp` before
3411 // going into the slow path, as it has been overwritten in the
3412 // meantime.
3413 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003414 GenerateReferenceLoadTwoRegisters(
3415 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003416 __ B(type_check_slow_path->GetEntryLabel());
3417
3418 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003419 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3420 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003421 __ Cbz(temp, &done);
3422 // Same comment as above regarding `temp` and the slow path.
3423 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003424 GenerateReferenceLoadTwoRegisters(
3425 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003426 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003427 break;
3428 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003429
Calin Juravle98893e12015-10-02 21:05:03 +01003430 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003431 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003432 // We always go into the type check slow path for the unresolved
3433 // and interface check cases.
3434 //
3435 // We cannot directly call the CheckCast runtime entry point
3436 // without resorting to a type checking slow path here (i.e. by
3437 // calling InvokeRuntime directly), as it would require to
3438 // assign fixed registers for the inputs of this HInstanceOf
3439 // instruction (following the runtime calling convention), which
3440 // might be cluttered by the potential first read barrier
3441 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003442 //
3443 // TODO: Introduce a new runtime entry point taking the object
3444 // to test (instead of its class) as argument, and let it deal
3445 // with the read barrier issues. This will let us refactor this
3446 // case of the `switch` code as it was previously (with a direct
3447 // call to the runtime not using a type checking slow path).
3448 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003449 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003450 break;
3451 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003452 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003453
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003454 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003455}
3456
Alexandre Rames5319def2014-10-23 10:03:10 +01003457void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3458 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3459 locations->SetOut(Location::ConstantLocation(constant));
3460}
3461
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003462void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003463 // Will be generated at use site.
3464}
3465
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003466void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3467 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3468 locations->SetOut(Location::ConstantLocation(constant));
3469}
3470
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003471void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003472 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003473}
3474
Calin Juravle175dc732015-08-25 15:42:32 +01003475void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3476 // The trampoline uses the same calling convention as dex calling conventions,
3477 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3478 // the method_idx.
3479 HandleInvoke(invoke);
3480}
3481
3482void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3483 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3484}
3485
Alexandre Rames5319def2014-10-23 10:03:10 +01003486void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003487 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003488 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003489}
3490
Alexandre Rames67555f72014-11-18 10:55:16 +00003491void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3492 HandleInvoke(invoke);
3493}
3494
3495void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3496 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003497 LocationSummary* locations = invoke->GetLocations();
3498 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003499 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003500 Offset class_offset = mirror::Object::ClassOffset();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003501 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003502
3503 // The register ip1 is required to be used for the hidden argument in
3504 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003505 MacroAssembler* masm = GetVIXLAssembler();
3506 UseScratchRegisterScope scratch_scope(masm);
3507 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003508 scratch_scope.Exclude(ip1);
3509 __ Mov(ip1, invoke->GetDexMethodIndex());
3510
Alexandre Rames67555f72014-11-18 10:55:16 +00003511 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003512 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003513 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003514 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003515 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003516 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003517 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003518 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003519 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003520 // Instead of simply (possibly) unpoisoning `temp` here, we should
3521 // emit a read barrier for the previous class reference load.
3522 // However this is not required in practice, as this is an
3523 // intermediate/temporary reference and because the current
3524 // concurrent copying collector keeps the from-space memory
3525 // intact/accessible until the end of the marking phase (the
3526 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003527 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Nelli Kimbadee982016-05-13 13:08:53 +03003528 __ Ldr(temp,
3529 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3530 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity50706432016-06-14 11:31:04 -07003531 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003532 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003533 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003534 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003535 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003536 // lr();
3537 __ Blr(lr);
3538 DCHECK(!codegen_->IsLeafMethod());
3539 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3540}
3541
3542void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003543 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3544 if (intrinsic.TryDispatch(invoke)) {
3545 return;
3546 }
3547
Alexandre Rames67555f72014-11-18 10:55:16 +00003548 HandleInvoke(invoke);
3549}
3550
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003551void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003552 // Explicit clinit checks triggered by static invokes must have been pruned by
3553 // art::PrepareForRegisterAllocation.
3554 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003555
Andreas Gampe878d58c2015-01-15 23:24:00 -08003556 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3557 if (intrinsic.TryDispatch(invoke)) {
3558 return;
3559 }
3560
Alexandre Rames67555f72014-11-18 10:55:16 +00003561 HandleInvoke(invoke);
3562}
3563
Andreas Gampe878d58c2015-01-15 23:24:00 -08003564static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3565 if (invoke->GetLocations()->Intrinsified()) {
3566 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3567 intrinsic.Dispatch(invoke);
3568 return true;
3569 }
3570 return false;
3571}
3572
Vladimir Markodc151b22015-10-15 18:02:30 +01003573HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3574 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3575 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003576 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003577 return desired_dispatch_info;
3578}
3579
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003580void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003581 // For better instruction scheduling we load the direct code pointer before the method pointer.
3582 bool direct_code_loaded = false;
3583 switch (invoke->GetCodePtrLocation()) {
3584 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3585 // LR = code address from literal pool with link-time patch.
3586 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3587 direct_code_loaded = true;
3588 break;
3589 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3590 // LR = invoke->GetDirectCodePtr();
3591 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3592 direct_code_loaded = true;
3593 break;
3594 default:
3595 break;
3596 }
3597
Andreas Gampe878d58c2015-01-15 23:24:00 -08003598 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003599 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3600 switch (invoke->GetMethodLoadKind()) {
3601 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3602 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003603 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003604 break;
3605 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003606 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003607 break;
3608 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3609 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003610 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003611 break;
3612 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3613 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003614 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003615 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3616 break;
3617 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3618 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003619 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3620 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
3621 vixl::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003622 {
3623 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003624 __ Bind(adrp_label);
3625 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003626 }
Vladimir Marko58155012015-08-19 12:49:41 +00003627 // Add LDR with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003628 vixl::Label* ldr_label =
3629 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003630 {
3631 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003632 __ Bind(ldr_label);
3633 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003634 }
Vladimir Marko58155012015-08-19 12:49:41 +00003635 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003636 }
Vladimir Marko58155012015-08-19 12:49:41 +00003637 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003638 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003639 Register reg = XRegisterFrom(temp);
3640 Register method_reg;
3641 if (current_method.IsRegister()) {
3642 method_reg = XRegisterFrom(current_method);
3643 } else {
3644 DCHECK(invoke->GetLocations()->Intrinsified());
3645 DCHECK(!current_method.IsValid());
3646 method_reg = reg;
3647 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3648 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003649
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003650 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003651 __ Ldr(reg.X(),
3652 MemOperand(method_reg.X(),
3653 ArtMethod::DexCacheResolvedMethodsOffset(kArm64WordSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003654 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003655 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3656 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003657 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3658 break;
3659 }
3660 }
3661
3662 switch (invoke->GetCodePtrLocation()) {
3663 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3664 __ Bl(&frame_entry_label_);
3665 break;
3666 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3667 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
3668 vixl::Label* label = &relative_call_patches_.back().label;
Alexandre Rames6dc01742015-11-12 14:44:19 +00003669 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
3670 __ Bind(label);
3671 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003672 break;
3673 }
3674 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3675 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3676 // LR prepared above for better instruction scheduling.
3677 DCHECK(direct_code_loaded);
3678 // lr()
3679 __ Blr(lr);
3680 break;
3681 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3682 // LR = callee_method->entry_point_from_quick_compiled_code_;
3683 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003684 XRegisterFrom(callee_method),
Vladimir Marko58155012015-08-19 12:49:41 +00003685 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize).Int32Value()));
3686 // lr()
3687 __ Blr(lr);
3688 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003689 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003690
Andreas Gampe878d58c2015-01-15 23:24:00 -08003691 DCHECK(!IsLeafMethod());
3692}
3693
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003694void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003695 // Use the calling convention instead of the location of the receiver, as
3696 // intrinsics may have put the receiver in a different register. In the intrinsics
3697 // slow path, the arguments have been moved to the right place, so here we are
3698 // guaranteed that the receiver is the first register of the calling convention.
3699 InvokeDexCallingConvention calling_convention;
3700 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003701 Register temp = XRegisterFrom(temp_in);
3702 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3703 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3704 Offset class_offset = mirror::Object::ClassOffset();
3705 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
3706
3707 BlockPoolsScope block_pools(GetVIXLAssembler());
3708
3709 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003710 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003711 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003712 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003713 // Instead of simply (possibly) unpoisoning `temp` here, we should
3714 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003715 // intermediate/temporary reference and because the current
3716 // concurrent copying collector keeps the from-space memory
3717 // intact/accessible until the end of the marking phase (the
3718 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003719 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3720 // temp = temp->GetMethodAt(method_offset);
3721 __ Ldr(temp, MemOperand(temp, method_offset));
3722 // lr = temp->GetEntryPoint();
3723 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3724 // lr();
3725 __ Blr(lr);
3726}
3727
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003728vixl::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(const DexFile& dex_file,
3729 uint32_t string_index,
3730 vixl::Label* adrp_label) {
3731 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3732}
3733
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003734vixl::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(const DexFile& dex_file,
3735 uint32_t type_index,
3736 vixl::Label* adrp_label) {
3737 return NewPcRelativePatch(dex_file, type_index, adrp_label, &pc_relative_type_patches_);
3738}
3739
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003740vixl::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
3741 uint32_t element_offset,
3742 vixl::Label* adrp_label) {
3743 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3744}
3745
3746vixl::Label* CodeGeneratorARM64::NewPcRelativePatch(const DexFile& dex_file,
3747 uint32_t offset_or_index,
3748 vixl::Label* adrp_label,
3749 ArenaDeque<PcRelativePatchInfo>* patches) {
3750 // Add a patch entry and return the label.
3751 patches->emplace_back(dex_file, offset_or_index);
3752 PcRelativePatchInfo* info = &patches->back();
3753 vixl::Label* label = &info->label;
3754 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3755 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3756 return label;
3757}
3758
3759vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
3760 const DexFile& dex_file, uint32_t string_index) {
3761 return boot_image_string_patches_.GetOrCreate(
3762 StringReference(&dex_file, string_index),
3763 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3764}
3765
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003766vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
3767 const DexFile& dex_file, uint32_t type_index) {
3768 return boot_image_type_patches_.GetOrCreate(
3769 TypeReference(&dex_file, type_index),
3770 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3771}
3772
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003773vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(uint64_t address) {
3774 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3775 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3776 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3777}
3778
3779vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(uint64_t address) {
3780 return DeduplicateUint64Literal(address);
3781}
3782
Vladimir Marko58155012015-08-19 12:49:41 +00003783void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3784 DCHECK(linker_patches->empty());
3785 size_t size =
3786 method_patches_.size() +
3787 call_patches_.size() +
3788 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003789 pc_relative_dex_cache_patches_.size() +
3790 boot_image_string_patches_.size() +
3791 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003792 boot_image_type_patches_.size() +
3793 pc_relative_type_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003794 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003795 linker_patches->reserve(size);
3796 for (const auto& entry : method_patches_) {
3797 const MethodReference& target_method = entry.first;
3798 vixl::Literal<uint64_t>* literal = entry.second;
3799 linker_patches->push_back(LinkerPatch::MethodPatch(literal->offset(),
3800 target_method.dex_file,
3801 target_method.dex_method_index));
3802 }
3803 for (const auto& entry : call_patches_) {
3804 const MethodReference& target_method = entry.first;
3805 vixl::Literal<uint64_t>* literal = entry.second;
3806 linker_patches->push_back(LinkerPatch::CodePatch(literal->offset(),
3807 target_method.dex_file,
3808 target_method.dex_method_index));
3809 }
3810 for (const MethodPatchInfo<vixl::Label>& info : relative_call_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003811 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003812 info.target_method.dex_file,
3813 info.target_method.dex_method_index));
3814 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003815 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003816 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003817 &info.target_dex_file,
Alexandre Rames6dc01742015-11-12 14:44:19 +00003818 info.pc_insn_label->location(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003819 info.offset_or_index));
3820 }
3821 for (const auto& entry : boot_image_string_patches_) {
3822 const StringReference& target_string = entry.first;
3823 vixl::Literal<uint32_t>* literal = entry.second;
3824 linker_patches->push_back(LinkerPatch::StringPatch(literal->offset(),
3825 target_string.dex_file,
3826 target_string.string_index));
3827 }
3828 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
3829 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.location(),
3830 &info.target_dex_file,
3831 info.pc_insn_label->location(),
3832 info.offset_or_index));
3833 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003834 for (const auto& entry : boot_image_type_patches_) {
3835 const TypeReference& target_type = entry.first;
3836 vixl::Literal<uint32_t>* literal = entry.second;
3837 linker_patches->push_back(LinkerPatch::TypePatch(literal->offset(),
3838 target_type.dex_file,
3839 target_type.type_index));
3840 }
3841 for (const PcRelativePatchInfo& info : pc_relative_type_patches_) {
3842 linker_patches->push_back(LinkerPatch::RelativeTypePatch(info.label.location(),
3843 &info.target_dex_file,
3844 info.pc_insn_label->location(),
3845 info.offset_or_index));
3846 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003847 for (const auto& entry : boot_image_address_patches_) {
3848 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
3849 vixl::Literal<uint32_t>* literal = entry.second;
3850 linker_patches->push_back(LinkerPatch::RecordPosition(literal->offset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003851 }
3852}
3853
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003854vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
3855 Uint32ToLiteralMap* map) {
3856 return map->GetOrCreate(
3857 value,
3858 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3859}
3860
Vladimir Marko58155012015-08-19 12:49:41 +00003861vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003862 return uint64_literals_.GetOrCreate(
3863 value,
3864 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003865}
3866
3867vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
3868 MethodReference target_method,
3869 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003870 return map->GetOrCreate(
3871 target_method,
3872 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003873}
3874
3875vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
3876 MethodReference target_method) {
3877 return DeduplicateMethodLiteral(target_method, &method_patches_);
3878}
3879
3880vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
3881 MethodReference target_method) {
3882 return DeduplicateMethodLiteral(target_method, &call_patches_);
3883}
3884
3885
Andreas Gampe878d58c2015-01-15 23:24:00 -08003886void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003887 // Explicit clinit checks triggered by static invokes must have been pruned by
3888 // art::PrepareForRegisterAllocation.
3889 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003890
Andreas Gampe878d58c2015-01-15 23:24:00 -08003891 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3892 return;
3893 }
3894
Alexandre Ramesd921d642015-04-16 15:07:16 +01003895 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003896 LocationSummary* locations = invoke->GetLocations();
3897 codegen_->GenerateStaticOrDirectCall(
3898 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003899 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003900}
3901
3902void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003903 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3904 return;
3905 }
3906
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003907 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003908 DCHECK(!codegen_->IsLeafMethod());
3909 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3910}
3911
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003912HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
3913 HLoadClass::LoadKind desired_class_load_kind) {
3914 if (kEmitCompilerReadBarrier) {
3915 switch (desired_class_load_kind) {
3916 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3917 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3918 case HLoadClass::LoadKind::kBootImageAddress:
3919 // TODO: Implement for read barrier.
3920 return HLoadClass::LoadKind::kDexCacheViaMethod;
3921 default:
3922 break;
3923 }
3924 }
3925 switch (desired_class_load_kind) {
3926 case HLoadClass::LoadKind::kReferrersClass:
3927 break;
3928 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3929 DCHECK(!GetCompilerOptions().GetCompilePic());
3930 break;
3931 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3932 DCHECK(GetCompilerOptions().GetCompilePic());
3933 break;
3934 case HLoadClass::LoadKind::kBootImageAddress:
3935 break;
3936 case HLoadClass::LoadKind::kDexCacheAddress:
3937 DCHECK(Runtime::Current()->UseJitCompilation());
3938 break;
3939 case HLoadClass::LoadKind::kDexCachePcRelative:
3940 DCHECK(!Runtime::Current()->UseJitCompilation());
3941 break;
3942 case HLoadClass::LoadKind::kDexCacheViaMethod:
3943 break;
3944 }
3945 return desired_class_load_kind;
3946}
3947
Alexandre Rames67555f72014-11-18 10:55:16 +00003948void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003949 if (cls->NeedsAccessCheck()) {
3950 InvokeRuntimeCallingConvention calling_convention;
3951 CodeGenerator::CreateLoadClassLocationSummary(
3952 cls,
3953 LocationFrom(calling_convention.GetRegisterAt(0)),
3954 LocationFrom(vixl::x0),
3955 /* code_generator_supports_read_barrier */ true);
3956 return;
3957 }
3958
3959 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
3960 ? LocationSummary::kCallOnSlowPath
3961 : LocationSummary::kNoCall;
3962 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3963 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3964 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
3965 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
3966 locations->SetInAt(0, Location::RequiresRegister());
3967 }
3968 locations->SetOut(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00003969}
3970
3971void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003972 if (cls->NeedsAccessCheck()) {
3973 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
3974 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
3975 cls,
3976 cls->GetDexPc(),
3977 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00003978 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003979 return;
3980 }
3981
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003982 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01003983 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00003984
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003985 bool generate_null_check = false;
3986 switch (cls->GetLoadKind()) {
3987 case HLoadClass::LoadKind::kReferrersClass: {
3988 DCHECK(!cls->CanCallRuntime());
3989 DCHECK(!cls->MustGenerateClinitCheck());
3990 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3991 Register current_method = InputRegisterAt(cls, 0);
3992 GenerateGcRootFieldLoad(
3993 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
3994 break;
3995 }
3996 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3997 DCHECK(!kEmitCompilerReadBarrier);
3998 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
3999 cls->GetTypeIndex()));
4000 break;
4001 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
4002 DCHECK(!kEmitCompilerReadBarrier);
4003 // Add ADRP with its PC-relative type patch.
4004 const DexFile& dex_file = cls->GetDexFile();
4005 uint32_t type_index = cls->GetTypeIndex();
4006 vixl::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
4007 {
4008 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4009 __ Bind(adrp_label);
4010 __ adrp(out.X(), /* offset placeholder */ 0);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004011 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004012 // Add ADD with its PC-relative type patch.
4013 vixl::Label* add_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
4014 {
4015 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4016 __ Bind(add_label);
4017 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004018 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004019 break;
4020 }
4021 case HLoadClass::LoadKind::kBootImageAddress: {
4022 DCHECK(!kEmitCompilerReadBarrier);
4023 DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress()));
4024 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress()));
4025 break;
4026 }
4027 case HLoadClass::LoadKind::kDexCacheAddress: {
4028 DCHECK_NE(cls->GetAddress(), 0u);
4029 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4030 // that gives a 16KiB range. To try and reduce the number of literals if we load
4031 // multiple types, simply split the dex cache address to a 16KiB aligned base
4032 // loaded from a literal and the remaining offset embedded in the load.
4033 static_assert(sizeof(GcRoot<mirror::Class>) == 4u, "Expected GC root to be 4 bytes.");
4034 DCHECK_ALIGNED(cls->GetAddress(), 4u);
4035 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4036 uint64_t base_address = cls->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4037 uint32_t offset = cls->GetAddress() & MaxInt<uint64_t>(offset_bits);
4038 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4039 // /* GcRoot<mirror::Class> */ out = *(base_address + offset)
4040 GenerateGcRootFieldLoad(cls, out_loc, out.X(), offset);
4041 generate_null_check = !cls->IsInDexCache();
4042 break;
4043 }
4044 case HLoadClass::LoadKind::kDexCachePcRelative: {
4045 // Add ADRP with its PC-relative DexCache access patch.
4046 const DexFile& dex_file = cls->GetDexFile();
4047 uint32_t element_offset = cls->GetDexCacheElementOffset();
4048 vixl::Label* adrp_label = codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
4049 {
4050 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4051 __ Bind(adrp_label);
4052 __ adrp(out.X(), /* offset placeholder */ 0);
4053 }
4054 // Add LDR with its PC-relative DexCache access patch.
4055 vixl::Label* ldr_label =
4056 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4057 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4058 GenerateGcRootFieldLoad(cls, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4059 generate_null_check = !cls->IsInDexCache();
4060 break;
4061 }
4062 case HLoadClass::LoadKind::kDexCacheViaMethod: {
4063 MemberOffset resolved_types_offset =
4064 ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
4065 // /* GcRoot<mirror::Class>[] */ out =
4066 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
4067 Register current_method = InputRegisterAt(cls, 0);
4068 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
4069 // /* GcRoot<mirror::Class> */ out = out[type_index]
4070 GenerateGcRootFieldLoad(
4071 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
4072 generate_null_check = !cls->IsInDexCache();
4073 break;
4074 }
4075 }
4076
4077 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4078 DCHECK(cls->CanCallRuntime());
4079 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
4080 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4081 codegen_->AddSlowPath(slow_path);
4082 if (generate_null_check) {
4083 __ Cbz(out, slow_path->GetEntryLabel());
4084 }
4085 if (cls->MustGenerateClinitCheck()) {
4086 GenerateClassInitializationCheck(slow_path, out);
4087 } else {
4088 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004089 }
4090 }
4091}
4092
David Brazdilcb1c0552015-08-04 16:22:25 +01004093static MemOperand GetExceptionTlsAddress() {
4094 return MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
4095}
4096
Alexandre Rames67555f72014-11-18 10:55:16 +00004097void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4098 LocationSummary* locations =
4099 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4100 locations->SetOut(Location::RequiresRegister());
4101}
4102
4103void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004104 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4105}
4106
4107void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4108 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4109}
4110
4111void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4112 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004113}
4114
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004115HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4116 HLoadString::LoadKind desired_string_load_kind) {
4117 if (kEmitCompilerReadBarrier) {
4118 switch (desired_string_load_kind) {
4119 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4120 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4121 case HLoadString::LoadKind::kBootImageAddress:
4122 // TODO: Implement for read barrier.
4123 return HLoadString::LoadKind::kDexCacheViaMethod;
4124 default:
4125 break;
4126 }
4127 }
4128 switch (desired_string_load_kind) {
4129 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4130 DCHECK(!GetCompilerOptions().GetCompilePic());
4131 break;
4132 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4133 DCHECK(GetCompilerOptions().GetCompilePic());
4134 break;
4135 case HLoadString::LoadKind::kBootImageAddress:
4136 break;
4137 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01004138 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004139 break;
4140 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01004141 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004142 break;
4143 case HLoadString::LoadKind::kDexCacheViaMethod:
4144 break;
4145 }
4146 return desired_string_load_kind;
4147}
4148
Alexandre Rames67555f72014-11-18 10:55:16 +00004149void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004150 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004151 ? LocationSummary::kCallOnSlowPath
4152 : LocationSummary::kNoCall;
4153 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004154 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4155 locations->SetInAt(0, Location::RequiresRegister());
4156 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004157 locations->SetOut(Location::RequiresRegister());
4158}
4159
4160void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004161 Location out_loc = load->GetLocations()->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00004162 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004163
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004164 switch (load->GetLoadKind()) {
4165 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4166 DCHECK(!kEmitCompilerReadBarrier);
4167 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4168 load->GetStringIndex()));
4169 return; // No dex cache slow path.
4170 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
4171 DCHECK(!kEmitCompilerReadBarrier);
4172 // Add ADRP with its PC-relative String patch.
4173 const DexFile& dex_file = load->GetDexFile();
4174 uint32_t string_index = load->GetStringIndex();
4175 vixl::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
4176 {
4177 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4178 __ Bind(adrp_label);
4179 __ adrp(out.X(), /* offset placeholder */ 0);
4180 }
4181 // Add ADD with its PC-relative String patch.
4182 vixl::Label* add_label =
4183 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4184 {
4185 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4186 __ Bind(add_label);
4187 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4188 }
4189 return; // No dex cache slow path.
4190 }
4191 case HLoadString::LoadKind::kBootImageAddress: {
4192 DCHECK(!kEmitCompilerReadBarrier);
4193 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4194 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4195 return; // No dex cache slow path.
4196 }
4197 case HLoadString::LoadKind::kDexCacheAddress: {
4198 DCHECK_NE(load->GetAddress(), 0u);
4199 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4200 // that gives a 16KiB range. To try and reduce the number of literals if we load
4201 // multiple strings, simply split the dex cache address to a 16KiB aligned base
4202 // loaded from a literal and the remaining offset embedded in the load.
4203 static_assert(sizeof(GcRoot<mirror::String>) == 4u, "Expected GC root to be 4 bytes.");
4204 DCHECK_ALIGNED(load->GetAddress(), 4u);
4205 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4206 uint64_t base_address = load->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4207 uint32_t offset = load->GetAddress() & MaxInt<uint64_t>(offset_bits);
4208 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004209 // /* GcRoot<mirror::String> */ out = *(base_address + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004210 GenerateGcRootFieldLoad(load, out_loc, out.X(), offset);
4211 break;
4212 }
4213 case HLoadString::LoadKind::kDexCachePcRelative: {
4214 // Add ADRP with its PC-relative DexCache access patch.
4215 const DexFile& dex_file = load->GetDexFile();
4216 uint32_t element_offset = load->GetDexCacheElementOffset();
4217 vixl::Label* adrp_label = codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
4218 {
4219 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4220 __ Bind(adrp_label);
4221 __ adrp(out.X(), /* offset placeholder */ 0);
4222 }
4223 // Add LDR with its PC-relative DexCache access patch.
4224 vixl::Label* ldr_label =
4225 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004226 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004227 GenerateGcRootFieldLoad(load, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4228 break;
4229 }
4230 case HLoadString::LoadKind::kDexCacheViaMethod: {
4231 Register current_method = InputRegisterAt(load, 0);
4232 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4233 GenerateGcRootFieldLoad(
4234 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4235 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
4236 __ Ldr(out.X(), HeapOperand(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
4237 // /* GcRoot<mirror::String> */ out = out[string_index]
4238 GenerateGcRootFieldLoad(
4239 load, out_loc, out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex()));
4240 break;
4241 }
4242 default:
4243 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
4244 UNREACHABLE();
4245 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004246
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004247 if (!load->IsInDexCache()) {
4248 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
4249 codegen_->AddSlowPath(slow_path);
4250 __ Cbz(out, slow_path->GetEntryLabel());
4251 __ Bind(slow_path->GetExitLabel());
4252 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004253}
4254
Alexandre Rames5319def2014-10-23 10:03:10 +01004255void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4256 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4257 locations->SetOut(Location::ConstantLocation(constant));
4258}
4259
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004260void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004261 // Will be generated at use site.
4262}
4263
Alexandre Rames67555f72014-11-18 10:55:16 +00004264void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4265 LocationSummary* locations =
4266 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4267 InvokeRuntimeCallingConvention calling_convention;
4268 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4269}
4270
4271void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4272 codegen_->InvokeRuntime(instruction->IsEnter()
4273 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
4274 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004275 instruction->GetDexPc(),
4276 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004277 if (instruction->IsEnter()) {
4278 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4279 } else {
4280 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4281 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004282}
4283
Alexandre Rames42d641b2014-10-27 14:00:51 +00004284void LocationsBuilderARM64::VisitMul(HMul* mul) {
4285 LocationSummary* locations =
4286 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4287 switch (mul->GetResultType()) {
4288 case Primitive::kPrimInt:
4289 case Primitive::kPrimLong:
4290 locations->SetInAt(0, Location::RequiresRegister());
4291 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004292 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004293 break;
4294
4295 case Primitive::kPrimFloat:
4296 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004297 locations->SetInAt(0, Location::RequiresFpuRegister());
4298 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004299 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004300 break;
4301
4302 default:
4303 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4304 }
4305}
4306
4307void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4308 switch (mul->GetResultType()) {
4309 case Primitive::kPrimInt:
4310 case Primitive::kPrimLong:
4311 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4312 break;
4313
4314 case Primitive::kPrimFloat:
4315 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004316 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004317 break;
4318
4319 default:
4320 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4321 }
4322}
4323
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004324void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4325 LocationSummary* locations =
4326 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4327 switch (neg->GetResultType()) {
4328 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004329 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004330 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004331 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004332 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004333
4334 case Primitive::kPrimFloat:
4335 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004336 locations->SetInAt(0, Location::RequiresFpuRegister());
4337 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004338 break;
4339
4340 default:
4341 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4342 }
4343}
4344
4345void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4346 switch (neg->GetResultType()) {
4347 case Primitive::kPrimInt:
4348 case Primitive::kPrimLong:
4349 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4350 break;
4351
4352 case Primitive::kPrimFloat:
4353 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004354 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004355 break;
4356
4357 default:
4358 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4359 }
4360}
4361
4362void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4363 LocationSummary* locations =
4364 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4365 InvokeRuntimeCallingConvention calling_convention;
4366 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004367 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004368 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004369 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004370}
4371
4372void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4373 LocationSummary* locations = instruction->GetLocations();
4374 InvokeRuntimeCallingConvention calling_convention;
4375 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4376 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004377 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004378 // Note: if heap poisoning is enabled, the entry point takes cares
4379 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01004380 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4381 instruction,
4382 instruction->GetDexPc(),
4383 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004384 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004385}
4386
Alexandre Rames5319def2014-10-23 10:03:10 +01004387void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4388 LocationSummary* locations =
4389 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4390 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004391 if (instruction->IsStringAlloc()) {
4392 locations->AddTemp(LocationFrom(kArtMethodRegister));
4393 } else {
4394 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4395 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4396 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004397 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4398}
4399
4400void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004401 // Note: if heap poisoning is enabled, the entry point takes cares
4402 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004403 if (instruction->IsStringAlloc()) {
4404 // String is allocated through StringFactory. Call NewEmptyString entry point.
4405 Location temp = instruction->GetLocations()->GetTemp(0);
4406 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
4407 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4408 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4409 __ Blr(lr);
4410 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4411 } else {
4412 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4413 instruction,
4414 instruction->GetDexPc(),
4415 nullptr);
4416 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4417 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004418}
4419
4420void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4421 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004422 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004423 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004424}
4425
4426void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004427 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004428 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004429 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004430 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004431 break;
4432
4433 default:
4434 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4435 }
4436}
4437
David Brazdil66d126e2015-04-03 16:02:44 +01004438void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4439 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4440 locations->SetInAt(0, Location::RequiresRegister());
4441 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4442}
4443
4444void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01004445 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
4446}
4447
Alexandre Rames5319def2014-10-23 10:03:10 +01004448void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004449 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4450 ? LocationSummary::kCallOnSlowPath
4451 : LocationSummary::kNoCall;
4452 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004453 locations->SetInAt(0, Location::RequiresRegister());
4454 if (instruction->HasUses()) {
4455 locations->SetOut(Location::SameAsFirstInput());
4456 }
4457}
4458
Calin Juravle2ae48182016-03-16 14:05:09 +00004459void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4460 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004461 return;
4462 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004463
Alexandre Ramesd921d642015-04-16 15:07:16 +01004464 BlockPoolsScope block_pools(GetVIXLAssembler());
4465 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004466 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004467 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004468}
4469
Calin Juravle2ae48182016-03-16 14:05:09 +00004470void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004471 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004472 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004473
4474 LocationSummary* locations = instruction->GetLocations();
4475 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004476
4477 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004478}
4479
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004480void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004481 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004482}
4483
Alexandre Rames67555f72014-11-18 10:55:16 +00004484void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4485 HandleBinaryOp(instruction);
4486}
4487
4488void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4489 HandleBinaryOp(instruction);
4490}
4491
Alexandre Rames3e69f162014-12-10 10:36:50 +00004492void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4493 LOG(FATAL) << "Unreachable";
4494}
4495
4496void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4497 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4498}
4499
Alexandre Rames5319def2014-10-23 10:03:10 +01004500void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4501 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4502 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4503 if (location.IsStackSlot()) {
4504 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4505 } else if (location.IsDoubleStackSlot()) {
4506 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4507 }
4508 locations->SetOut(location);
4509}
4510
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004511void InstructionCodeGeneratorARM64::VisitParameterValue(
4512 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004513 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004514}
4515
4516void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4517 LocationSummary* locations =
4518 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004519 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004520}
4521
4522void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4523 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4524 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004525}
4526
4527void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4528 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004529 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004530 locations->SetInAt(i, Location::Any());
4531 }
4532 locations->SetOut(Location::Any());
4533}
4534
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004535void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004536 LOG(FATAL) << "Unreachable";
4537}
4538
Serban Constantinescu02164b32014-11-13 14:05:07 +00004539void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004540 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004541 LocationSummary::CallKind call_kind =
4542 Primitive::IsFloatingPointType(type) ? LocationSummary::kCall : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004543 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4544
4545 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004546 case Primitive::kPrimInt:
4547 case Primitive::kPrimLong:
4548 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004549 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004550 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4551 break;
4552
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004553 case Primitive::kPrimFloat:
4554 case Primitive::kPrimDouble: {
4555 InvokeRuntimeCallingConvention calling_convention;
4556 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4557 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4558 locations->SetOut(calling_convention.GetReturnLocation(type));
4559
4560 break;
4561 }
4562
Serban Constantinescu02164b32014-11-13 14:05:07 +00004563 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004564 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004565 }
4566}
4567
4568void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4569 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004570
Serban Constantinescu02164b32014-11-13 14:05:07 +00004571 switch (type) {
4572 case Primitive::kPrimInt:
4573 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004574 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004575 break;
4576 }
4577
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004578 case Primitive::kPrimFloat:
4579 case Primitive::kPrimDouble: {
4580 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
4581 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004582 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004583 if (type == Primitive::kPrimFloat) {
4584 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4585 } else {
4586 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4587 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004588 break;
4589 }
4590
Serban Constantinescu02164b32014-11-13 14:05:07 +00004591 default:
4592 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004593 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004594 }
4595}
4596
Calin Juravle27df7582015-04-17 19:12:31 +01004597void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4598 memory_barrier->SetLocations(nullptr);
4599}
4600
4601void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004602 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004603}
4604
Alexandre Rames5319def2014-10-23 10:03:10 +01004605void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4606 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4607 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004608 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004609}
4610
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004611void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004612 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004613}
4614
4615void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4616 instruction->SetLocations(nullptr);
4617}
4618
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004619void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004620 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004621}
4622
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004623void LocationsBuilderARM64::VisitRor(HRor* ror) {
4624 HandleBinaryOp(ror);
4625}
4626
4627void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4628 HandleBinaryOp(ror);
4629}
4630
Serban Constantinescu02164b32014-11-13 14:05:07 +00004631void LocationsBuilderARM64::VisitShl(HShl* shl) {
4632 HandleShift(shl);
4633}
4634
4635void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4636 HandleShift(shl);
4637}
4638
4639void LocationsBuilderARM64::VisitShr(HShr* shr) {
4640 HandleShift(shr);
4641}
4642
4643void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4644 HandleShift(shr);
4645}
4646
Alexandre Rames5319def2014-10-23 10:03:10 +01004647void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004648 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004649}
4650
4651void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004652 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004653}
4654
Alexandre Rames67555f72014-11-18 10:55:16 +00004655void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004656 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004657}
4658
4659void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004660 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004661}
4662
4663void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004664 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004665}
4666
Alexandre Rames67555f72014-11-18 10:55:16 +00004667void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004668 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004669}
4670
Calin Juravlee460d1d2015-09-29 04:52:17 +01004671void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4672 HUnresolvedInstanceFieldGet* instruction) {
4673 FieldAccessCallingConventionARM64 calling_convention;
4674 codegen_->CreateUnresolvedFieldLocationSummary(
4675 instruction, instruction->GetFieldType(), calling_convention);
4676}
4677
4678void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4679 HUnresolvedInstanceFieldGet* instruction) {
4680 FieldAccessCallingConventionARM64 calling_convention;
4681 codegen_->GenerateUnresolvedFieldAccess(instruction,
4682 instruction->GetFieldType(),
4683 instruction->GetFieldIndex(),
4684 instruction->GetDexPc(),
4685 calling_convention);
4686}
4687
4688void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4689 HUnresolvedInstanceFieldSet* instruction) {
4690 FieldAccessCallingConventionARM64 calling_convention;
4691 codegen_->CreateUnresolvedFieldLocationSummary(
4692 instruction, instruction->GetFieldType(), calling_convention);
4693}
4694
4695void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4696 HUnresolvedInstanceFieldSet* instruction) {
4697 FieldAccessCallingConventionARM64 calling_convention;
4698 codegen_->GenerateUnresolvedFieldAccess(instruction,
4699 instruction->GetFieldType(),
4700 instruction->GetFieldIndex(),
4701 instruction->GetDexPc(),
4702 calling_convention);
4703}
4704
4705void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4706 HUnresolvedStaticFieldGet* instruction) {
4707 FieldAccessCallingConventionARM64 calling_convention;
4708 codegen_->CreateUnresolvedFieldLocationSummary(
4709 instruction, instruction->GetFieldType(), calling_convention);
4710}
4711
4712void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4713 HUnresolvedStaticFieldGet* instruction) {
4714 FieldAccessCallingConventionARM64 calling_convention;
4715 codegen_->GenerateUnresolvedFieldAccess(instruction,
4716 instruction->GetFieldType(),
4717 instruction->GetFieldIndex(),
4718 instruction->GetDexPc(),
4719 calling_convention);
4720}
4721
4722void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4723 HUnresolvedStaticFieldSet* instruction) {
4724 FieldAccessCallingConventionARM64 calling_convention;
4725 codegen_->CreateUnresolvedFieldLocationSummary(
4726 instruction, instruction->GetFieldType(), calling_convention);
4727}
4728
4729void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4730 HUnresolvedStaticFieldSet* instruction) {
4731 FieldAccessCallingConventionARM64 calling_convention;
4732 codegen_->GenerateUnresolvedFieldAccess(instruction,
4733 instruction->GetFieldType(),
4734 instruction->GetFieldIndex(),
4735 instruction->GetDexPc(),
4736 calling_convention);
4737}
4738
Alexandre Rames5319def2014-10-23 10:03:10 +01004739void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4740 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4741}
4742
4743void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004744 HBasicBlock* block = instruction->GetBlock();
4745 if (block->GetLoopInformation() != nullptr) {
4746 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4747 // The back edge will generate the suspend check.
4748 return;
4749 }
4750 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4751 // The goto will generate the suspend check.
4752 return;
4753 }
4754 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004755}
4756
Alexandre Rames67555f72014-11-18 10:55:16 +00004757void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4758 LocationSummary* locations =
4759 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
4760 InvokeRuntimeCallingConvention calling_convention;
4761 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4762}
4763
4764void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
4765 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004766 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004767 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004768}
4769
4770void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4771 LocationSummary* locations =
4772 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4773 Primitive::Type input_type = conversion->GetInputType();
4774 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004775 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004776 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4777 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4778 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4779 }
4780
Alexandre Rames542361f2015-01-29 16:57:31 +00004781 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004782 locations->SetInAt(0, Location::RequiresFpuRegister());
4783 } else {
4784 locations->SetInAt(0, Location::RequiresRegister());
4785 }
4786
Alexandre Rames542361f2015-01-29 16:57:31 +00004787 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004788 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4789 } else {
4790 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4791 }
4792}
4793
4794void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4795 Primitive::Type result_type = conversion->GetResultType();
4796 Primitive::Type input_type = conversion->GetInputType();
4797
4798 DCHECK_NE(input_type, result_type);
4799
Alexandre Rames542361f2015-01-29 16:57:31 +00004800 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004801 int result_size = Primitive::ComponentSize(result_type);
4802 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004803 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004804 Register output = OutputRegister(conversion);
4805 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004806 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004807 // 'int' values are used directly as W registers, discarding the top
4808 // bits, so we don't need to sign-extend and can just perform a move.
4809 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4810 // top 32 bits of the target register. We theoretically could leave those
4811 // bits unchanged, but we would have to make sure that no code uses a
4812 // 32bit input value as a 64bit value assuming that the top 32 bits are
4813 // zero.
4814 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004815 } else if (result_type == Primitive::kPrimChar ||
4816 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4817 __ Ubfx(output,
4818 output.IsX() ? source.X() : source.W(),
4819 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004820 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004821 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004822 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004823 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004824 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004825 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004826 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4827 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004828 } else if (Primitive::IsFloatingPointType(result_type) &&
4829 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004830 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4831 } else {
4832 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4833 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004834 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004835}
Alexandre Rames67555f72014-11-18 10:55:16 +00004836
Serban Constantinescu02164b32014-11-13 14:05:07 +00004837void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4838 HandleShift(ushr);
4839}
4840
4841void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4842 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004843}
4844
4845void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4846 HandleBinaryOp(instruction);
4847}
4848
4849void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4850 HandleBinaryOp(instruction);
4851}
4852
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004853void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004854 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004855 LOG(FATAL) << "Unreachable";
4856}
4857
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004858void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004859 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004860 LOG(FATAL) << "Unreachable";
4861}
4862
Mark Mendellfe57faa2015-09-18 09:26:15 -04004863// Simple implementation of packed switch - generate cascaded compare/jumps.
4864void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4865 LocationSummary* locations =
4866 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4867 locations->SetInAt(0, Location::RequiresRegister());
4868}
4869
4870void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4871 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004872 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004873 Register value_reg = InputRegisterAt(switch_instr, 0);
4874 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4875
Zheng Xu3927c8b2015-11-18 17:46:25 +08004876 // Roughly set 16 as max average assemblies generated per HIR in a graph.
4877 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * vixl::kInstructionSize;
4878 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4879 // make sure we don't emit it if the target may run out of range.
4880 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4881 // ranges and emit the tables only as required.
4882 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004883
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004884 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004885 // Current instruction id is an upper bound of the number of HIRs in the graph.
4886 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4887 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004888 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4889 Register temp = temps.AcquireW();
4890 __ Subs(temp, value_reg, Operand(lower_bound));
4891
Zheng Xu3927c8b2015-11-18 17:46:25 +08004892 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004893 // Jump to successors[0] if value == lower_bound.
4894 __ B(eq, codegen_->GetLabelOf(successors[0]));
4895 int32_t last_index = 0;
4896 for (; num_entries - last_index > 2; last_index += 2) {
4897 __ Subs(temp, temp, Operand(2));
4898 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4899 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4900 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4901 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4902 }
4903 if (num_entries - last_index == 2) {
4904 // The last missing case_value.
4905 __ Cmp(temp, Operand(1));
4906 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004907 }
4908
4909 // And the default for any other value.
4910 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4911 __ B(codegen_->GetLabelOf(default_block));
4912 }
4913 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01004914 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004915
4916 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4917
4918 // Below instructions should use at most one blocked register. Since there are two blocked
4919 // registers, we are free to block one.
4920 Register temp_w = temps.AcquireW();
4921 Register index;
4922 // Remove the bias.
4923 if (lower_bound != 0) {
4924 index = temp_w;
4925 __ Sub(index, value_reg, Operand(lower_bound));
4926 } else {
4927 index = value_reg;
4928 }
4929
4930 // Jump to default block if index is out of the range.
4931 __ Cmp(index, Operand(num_entries));
4932 __ B(hs, codegen_->GetLabelOf(default_block));
4933
4934 // In current VIXL implementation, it won't require any blocked registers to encode the
4935 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4936 // register pressure.
4937 Register table_base = temps.AcquireX();
4938 // Load jump offset from the table.
4939 __ Adr(table_base, jump_table->GetTableStartLabel());
4940 Register jump_offset = temp_w;
4941 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4942
4943 // Jump to target block by branching to table_base(pc related) + offset.
4944 Register target_address = table_base;
4945 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4946 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004947 }
4948}
4949
Roland Levillain44015862016-01-22 11:47:17 +00004950void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4951 Location out,
4952 uint32_t offset,
4953 Location maybe_temp) {
4954 Primitive::Type type = Primitive::kPrimNot;
4955 Register out_reg = RegisterFrom(out, type);
4956 if (kEmitCompilerReadBarrier) {
4957 Register temp_reg = RegisterFrom(maybe_temp, type);
4958 if (kUseBakerReadBarrier) {
4959 // Load with fast path based Baker's read barrier.
4960 // /* HeapReference<Object> */ out = *(out + offset)
4961 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4962 out,
4963 out_reg,
4964 offset,
4965 temp_reg,
4966 /* needs_null_check */ false,
4967 /* use_load_acquire */ false);
4968 } else {
4969 // Load with slow path based read barrier.
4970 // Save the value of `out` into `maybe_temp` before overwriting it
4971 // in the following move operation, as we will need it for the
4972 // read barrier below.
4973 __ Mov(temp_reg, out_reg);
4974 // /* HeapReference<Object> */ out = *(out + offset)
4975 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4976 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4977 }
4978 } else {
4979 // Plain load with no read barrier.
4980 // /* HeapReference<Object> */ out = *(out + offset)
4981 __ Ldr(out_reg, HeapOperand(out_reg, offset));
4982 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
4983 }
4984}
4985
4986void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
4987 Location out,
4988 Location obj,
4989 uint32_t offset,
4990 Location maybe_temp) {
4991 Primitive::Type type = Primitive::kPrimNot;
4992 Register out_reg = RegisterFrom(out, type);
4993 Register obj_reg = RegisterFrom(obj, type);
4994 if (kEmitCompilerReadBarrier) {
4995 if (kUseBakerReadBarrier) {
4996 // Load with fast path based Baker's read barrier.
4997 Register temp_reg = RegisterFrom(maybe_temp, type);
4998 // /* HeapReference<Object> */ out = *(obj + offset)
4999 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5000 out,
5001 obj_reg,
5002 offset,
5003 temp_reg,
5004 /* needs_null_check */ false,
5005 /* use_load_acquire */ false);
5006 } else {
5007 // Load with slow path based read barrier.
5008 // /* HeapReference<Object> */ out = *(obj + offset)
5009 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5010 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5011 }
5012 } else {
5013 // Plain load with no read barrier.
5014 // /* HeapReference<Object> */ out = *(obj + offset)
5015 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5016 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5017 }
5018}
5019
5020void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
5021 Location root,
5022 vixl::Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005023 uint32_t offset,
5024 vixl::Label* fixup_label) {
Roland Levillain44015862016-01-22 11:47:17 +00005025 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
5026 if (kEmitCompilerReadBarrier) {
5027 if (kUseBakerReadBarrier) {
5028 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5029 // Baker's read barrier are used:
5030 //
5031 // root = obj.field;
5032 // if (Thread::Current()->GetIsGcMarking()) {
5033 // root = ReadBarrier::Mark(root)
5034 // }
5035
5036 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005037 if (fixup_label == nullptr) {
5038 __ Ldr(root_reg, MemOperand(obj, offset));
5039 } else {
5040 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
5041 __ Bind(fixup_label);
5042 __ ldr(root_reg, MemOperand(obj, offset));
5043 }
Roland Levillain44015862016-01-22 11:47:17 +00005044 static_assert(
5045 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5046 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5047 "have different sizes.");
5048 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5049 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5050 "have different sizes.");
5051
5052 // Slow path used to mark the GC root `root`.
5053 SlowPathCodeARM64* slow_path =
5054 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root, root);
5055 codegen_->AddSlowPath(slow_path);
5056
5057 MacroAssembler* masm = GetVIXLAssembler();
5058 UseScratchRegisterScope temps(masm);
5059 Register temp = temps.AcquireW();
5060 // temp = Thread::Current()->GetIsGcMarking()
5061 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64WordSize>().Int32Value()));
5062 __ Cbnz(temp, slow_path->GetEntryLabel());
5063 __ Bind(slow_path->GetExitLabel());
5064 } else {
5065 // GC root loaded through a slow path for read barriers other
5066 // than Baker's.
5067 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005068 if (fixup_label == nullptr) {
5069 __ Add(root_reg.X(), obj.X(), offset);
5070 } else {
5071 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
5072 __ Bind(fixup_label);
5073 __ add(root_reg.X(), obj.X(), offset);
5074 }
Roland Levillain44015862016-01-22 11:47:17 +00005075 // /* mirror::Object* */ root = root->Read()
5076 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5077 }
5078 } else {
5079 // Plain GC root load with no read barrier.
5080 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005081 if (fixup_label == nullptr) {
5082 __ Ldr(root_reg, MemOperand(obj, offset));
5083 } else {
5084 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
5085 __ Bind(fixup_label);
5086 __ ldr(root_reg, MemOperand(obj, offset));
5087 }
Roland Levillain44015862016-01-22 11:47:17 +00005088 // Note that GC roots are not affected by heap poisoning, thus we
5089 // do not have to unpoison `root_reg` here.
5090 }
5091}
5092
5093void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5094 Location ref,
5095 vixl::Register obj,
5096 uint32_t offset,
5097 Register temp,
5098 bool needs_null_check,
5099 bool use_load_acquire) {
5100 DCHECK(kEmitCompilerReadBarrier);
5101 DCHECK(kUseBakerReadBarrier);
5102
5103 // /* HeapReference<Object> */ ref = *(obj + offset)
5104 Location no_index = Location::NoLocation();
5105 GenerateReferenceLoadWithBakerReadBarrier(
5106 instruction, ref, obj, offset, no_index, temp, needs_null_check, use_load_acquire);
5107}
5108
5109void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5110 Location ref,
5111 vixl::Register obj,
5112 uint32_t data_offset,
5113 Location index,
5114 Register temp,
5115 bool needs_null_check) {
5116 DCHECK(kEmitCompilerReadBarrier);
5117 DCHECK(kUseBakerReadBarrier);
5118
5119 // Array cells are never volatile variables, therefore array loads
5120 // never use Load-Acquire instructions on ARM64.
5121 const bool use_load_acquire = false;
5122
5123 // /* HeapReference<Object> */ ref =
5124 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5125 GenerateReferenceLoadWithBakerReadBarrier(
5126 instruction, ref, obj, data_offset, index, temp, needs_null_check, use_load_acquire);
5127}
5128
5129void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5130 Location ref,
5131 vixl::Register obj,
5132 uint32_t offset,
5133 Location index,
5134 Register temp,
5135 bool needs_null_check,
5136 bool use_load_acquire) {
5137 DCHECK(kEmitCompilerReadBarrier);
5138 DCHECK(kUseBakerReadBarrier);
5139 // If `index` is a valid location, then we are emitting an array
5140 // load, so we shouldn't be using a Load Acquire instruction.
5141 // In other words: `index.IsValid()` => `!use_load_acquire`.
5142 DCHECK(!index.IsValid() || !use_load_acquire);
5143
5144 MacroAssembler* masm = GetVIXLAssembler();
5145 UseScratchRegisterScope temps(masm);
5146
5147 // In slow path based read barriers, the read barrier call is
5148 // inserted after the original load. However, in fast path based
5149 // Baker's read barriers, we need to perform the load of
5150 // mirror::Object::monitor_ *before* the original reference load.
5151 // This load-load ordering is required by the read barrier.
5152 // The fast path/slow path (for Baker's algorithm) should look like:
5153 //
5154 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5155 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5156 // HeapReference<Object> ref = *src; // Original reference load.
5157 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5158 // if (is_gray) {
5159 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5160 // }
5161 //
5162 // Note: the original implementation in ReadBarrier::Barrier is
5163 // slightly more complex as it performs additional checks that we do
5164 // not do here for performance reasons.
5165
5166 Primitive::Type type = Primitive::kPrimNot;
5167 Register ref_reg = RegisterFrom(ref, type);
5168 DCHECK(obj.IsW());
5169 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5170
5171 // /* int32_t */ monitor = obj->monitor_
5172 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5173 if (needs_null_check) {
5174 MaybeRecordImplicitNullCheck(instruction);
5175 }
5176 // /* LockWord */ lock_word = LockWord(monitor)
5177 static_assert(sizeof(LockWord) == sizeof(int32_t),
5178 "art::LockWord and int32_t have different sizes.");
5179 // /* uint32_t */ rb_state = lock_word.ReadBarrierState()
5180 __ Lsr(temp, temp, LockWord::kReadBarrierStateShift);
5181 __ And(temp, temp, Operand(LockWord::kReadBarrierStateMask));
5182 static_assert(
5183 LockWord::kReadBarrierStateMask == ReadBarrier::rb_ptr_mask_,
5184 "art::LockWord::kReadBarrierStateMask is not equal to art::ReadBarrier::rb_ptr_mask_.");
5185
5186 // Introduce a dependency on the high bits of rb_state, which shall
5187 // be all zeroes, to prevent load-load reordering, and without using
5188 // a memory barrier (which would be more expensive).
5189 // temp2 = rb_state & ~LockWord::kReadBarrierStateMask = 0
5190 Register temp2 = temps.AcquireW();
5191 __ Bic(temp2, temp, Operand(LockWord::kReadBarrierStateMask));
5192 // obj is unchanged by this operation, but its value now depends on
5193 // temp2, which depends on temp.
5194 __ Add(obj, obj, Operand(temp2));
5195 temps.Release(temp2);
5196
5197 // The actual reference load.
5198 if (index.IsValid()) {
5199 static_assert(
5200 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5201 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005202 // /* HeapReference<Object> */ ref =
5203 // *(obj + offset + index * sizeof(HeapReference<Object>))
Roland Levillainca0bf032016-02-09 12:49:18 +00005204 const size_t shift_amount = Primitive::ComponentSizeShift(type);
Roland Levillain44015862016-01-22 11:47:17 +00005205 if (index.IsConstant()) {
Roland Levillainca0bf032016-02-09 12:49:18 +00005206 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << shift_amount);
5207 Load(type, ref_reg, HeapOperand(obj, computed_offset));
Roland Levillain44015862016-01-22 11:47:17 +00005208 } else {
Roland Levillainca0bf032016-02-09 12:49:18 +00005209 temp2 = temps.AcquireW();
Roland Levillain44015862016-01-22 11:47:17 +00005210 __ Add(temp2, obj, offset);
Roland Levillainca0bf032016-02-09 12:49:18 +00005211 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, shift_amount));
5212 temps.Release(temp2);
Roland Levillain44015862016-01-22 11:47:17 +00005213 }
Roland Levillain44015862016-01-22 11:47:17 +00005214 } else {
5215 // /* HeapReference<Object> */ ref = *(obj + offset)
5216 MemOperand field = HeapOperand(obj, offset);
5217 if (use_load_acquire) {
5218 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5219 } else {
5220 Load(type, ref_reg, field);
5221 }
5222 }
5223
5224 // Object* ref = ref_addr->AsMirrorPtr()
5225 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5226
5227 // Slow path used to mark the object `ref` when it is gray.
5228 SlowPathCodeARM64* slow_path =
5229 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref, ref);
5230 AddSlowPath(slow_path);
5231
5232 // if (rb_state == ReadBarrier::gray_ptr_)
5233 // ref = ReadBarrier::Mark(ref);
5234 __ Cmp(temp, ReadBarrier::gray_ptr_);
5235 __ B(eq, slow_path->GetEntryLabel());
5236 __ Bind(slow_path->GetExitLabel());
5237}
5238
5239void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5240 Location out,
5241 Location ref,
5242 Location obj,
5243 uint32_t offset,
5244 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005245 DCHECK(kEmitCompilerReadBarrier);
5246
Roland Levillain44015862016-01-22 11:47:17 +00005247 // Insert a slow path based read barrier *after* the reference load.
5248 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005249 // If heap poisoning is enabled, the unpoisoning of the loaded
5250 // reference will be carried out by the runtime within the slow
5251 // path.
5252 //
5253 // Note that `ref` currently does not get unpoisoned (when heap
5254 // poisoning is enabled), which is alright as the `ref` argument is
5255 // not used by the artReadBarrierSlow entry point.
5256 //
5257 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5258 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5259 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5260 AddSlowPath(slow_path);
5261
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005262 __ B(slow_path->GetEntryLabel());
5263 __ Bind(slow_path->GetExitLabel());
5264}
5265
Roland Levillain44015862016-01-22 11:47:17 +00005266void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5267 Location out,
5268 Location ref,
5269 Location obj,
5270 uint32_t offset,
5271 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005272 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005273 // Baker's read barriers shall be handled by the fast path
5274 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5275 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005276 // If heap poisoning is enabled, unpoisoning will be taken care of
5277 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005278 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005279 } else if (kPoisonHeapReferences) {
5280 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5281 }
5282}
5283
Roland Levillain44015862016-01-22 11:47:17 +00005284void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5285 Location out,
5286 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005287 DCHECK(kEmitCompilerReadBarrier);
5288
Roland Levillain44015862016-01-22 11:47:17 +00005289 // Insert a slow path based read barrier *after* the GC root load.
5290 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005291 // Note that GC roots are not affected by heap poisoning, so we do
5292 // not need to do anything special for this here.
5293 SlowPathCodeARM64* slow_path =
5294 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5295 AddSlowPath(slow_path);
5296
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005297 __ B(slow_path->GetEntryLabel());
5298 __ Bind(slow_path->GetExitLabel());
5299}
5300
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005301void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5302 LocationSummary* locations =
5303 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5304 locations->SetInAt(0, Location::RequiresRegister());
5305 locations->SetOut(Location::RequiresRegister());
5306}
5307
5308void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5309 LocationSummary* locations = instruction->GetLocations();
5310 uint32_t method_offset = 0;
Vladimir Markoa1de9182016-02-25 11:37:38 +00005311 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005312 method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5313 instruction->GetIndex(), kArm64PointerSize).SizeValue();
5314 } else {
Nelli Kimbadee982016-05-13 13:08:53 +03005315 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5316 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
5317 method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity50706432016-06-14 11:31:04 -07005318 instruction->GetIndex(), kArm64PointerSize));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005319 }
5320 __ Ldr(XRegisterFrom(locations->Out()),
5321 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
5322}
5323
5324
5325
Alexandre Rames67555f72014-11-18 10:55:16 +00005326#undef __
5327#undef QUICK_ENTRY_POINT
5328
Alexandre Rames5319def2014-10-23 10:03:10 +01005329} // namespace arm64
5330} // namespace art