blob: d9d675e0a7be4c7a6477fca1bcdc094093fe03e9 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
36
37using namespace vixl; // NOLINT(build/namespaces)
38
39#ifdef __
40#error "ARM64 Codegen VIXL macro-assembler macro already defined."
41#endif
42
Alexandre Rames5319def2014-10-23 10:03:10 +010043namespace art {
44
Roland Levillain22ccc3a2015-11-24 13:10:05 +000045template<class MirrorType>
46class GcRoot;
47
Alexandre Rames5319def2014-10-23 10:03:10 +010048namespace arm64 {
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050using helpers::CPURegisterFrom;
51using helpers::DRegisterFrom;
52using helpers::FPRegisterFrom;
53using helpers::HeapOperand;
54using helpers::HeapOperandFrom;
55using helpers::InputCPURegisterAt;
56using helpers::InputFPRegisterAt;
57using helpers::InputRegisterAt;
58using helpers::InputOperandAt;
59using helpers::Int64ConstantFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080060using helpers::LocationFrom;
61using helpers::OperandFromMemOperand;
62using helpers::OutputCPURegister;
63using helpers::OutputFPRegister;
64using helpers::OutputRegister;
65using helpers::RegisterFrom;
66using helpers::StackOperandFrom;
67using helpers::VIXLRegCodeFromART;
68using helpers::WRegisterFrom;
69using helpers::XRegisterFrom;
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +000070using helpers::ARM64EncodableConstantOrRegister;
Zheng Xuda403092015-04-24 17:35:39 +080071using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080072
Alexandre Rames5319def2014-10-23 10:03:10 +010073static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000074// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080075// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
76// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000077static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010078
Alexandre Rames5319def2014-10-23 10:03:10 +010079inline Condition ARM64Condition(IfCondition cond) {
80 switch (cond) {
81 case kCondEQ: return eq;
82 case kCondNE: return ne;
83 case kCondLT: return lt;
84 case kCondLE: return le;
85 case kCondGT: return gt;
86 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070087 case kCondB: return lo;
88 case kCondBE: return ls;
89 case kCondA: return hi;
90 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010091 }
Roland Levillain7f63c522015-07-13 15:54:55 +000092 LOG(FATAL) << "Unreachable";
93 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010094}
95
Vladimir Markod6e069b2016-01-18 11:11:01 +000096inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
97 // The ARM64 condition codes can express all the necessary branches, see the
98 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
99 // There is no dex instruction or HIR that would need the missing conditions
100 // "equal or unordered" or "not equal".
101 switch (cond) {
102 case kCondEQ: return eq;
103 case kCondNE: return ne /* unordered */;
104 case kCondLT: return gt_bias ? cc : lt /* unordered */;
105 case kCondLE: return gt_bias ? ls : le /* unordered */;
106 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
107 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
108 default:
109 LOG(FATAL) << "UNREACHABLE";
110 UNREACHABLE();
111 }
112}
113
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000114Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000115 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
116 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
117 // but we use the exact registers for clarity.
118 if (return_type == Primitive::kPrimFloat) {
119 return LocationFrom(s0);
120 } else if (return_type == Primitive::kPrimDouble) {
121 return LocationFrom(d0);
122 } else if (return_type == Primitive::kPrimLong) {
123 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100124 } else if (return_type == Primitive::kPrimVoid) {
125 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000126 } else {
127 return LocationFrom(w0);
128 }
129}
130
Alexandre Rames5319def2014-10-23 10:03:10 +0100131Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000132 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100133}
134
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700135// NOLINT on __ macro to suppress wrong warning/fix from clang-tidy.
136#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Alexandre Rames67555f72014-11-18 10:55:16 +0000137#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100138
Zheng Xuda403092015-04-24 17:35:39 +0800139// Calculate memory accessing operand for save/restore live registers.
140static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
141 RegisterSet* register_set,
142 int64_t spill_offset,
143 bool is_save) {
144 DCHECK(ArtVixlRegCodeCoherentForRegSet(register_set->GetCoreRegisters(),
145 codegen->GetNumberOfCoreRegisters(),
146 register_set->GetFloatingPointRegisters(),
147 codegen->GetNumberOfFloatingPointRegisters()));
148
149 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize,
150 register_set->GetCoreRegisters() & (~callee_saved_core_registers.list()));
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000151 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize,
152 register_set->GetFloatingPointRegisters() & (~callee_saved_fp_registers.list()));
Zheng Xuda403092015-04-24 17:35:39 +0800153
154 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
155 UseScratchRegisterScope temps(masm);
156
157 Register base = masm->StackPointer();
158 int64_t core_spill_size = core_list.TotalSizeInBytes();
159 int64_t fp_spill_size = fp_list.TotalSizeInBytes();
160 int64_t reg_size = kXRegSizeInBytes;
161 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
162 uint32_t ls_access_size = WhichPowerOf2(reg_size);
163 if (((core_list.Count() > 1) || (fp_list.Count() > 1)) &&
164 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
165 // If the offset does not fit in the instruction's immediate field, use an alternate register
166 // to compute the base address(float point registers spill base address).
167 Register new_base = temps.AcquireSameSizeAs(base);
168 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
169 base = new_base;
170 spill_offset = -core_spill_size;
171 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
172 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
173 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
174 }
175
176 if (is_save) {
177 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
178 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
179 } else {
180 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
181 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
182 }
183}
184
185void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
186 RegisterSet* register_set = locations->GetLiveRegisters();
187 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
188 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
189 if (!codegen->IsCoreCalleeSaveRegister(i) && register_set->ContainsCoreRegister(i)) {
190 // If the register holds an object, update the stack mask.
191 if (locations->RegisterContainsObject(i)) {
192 locations->SetStackBit(stack_offset / kVRegSize);
193 }
194 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
195 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
196 saved_core_stack_offsets_[i] = stack_offset;
197 stack_offset += kXRegSizeInBytes;
198 }
199 }
200
201 for (size_t i = 0, e = codegen->GetNumberOfFloatingPointRegisters(); i < e; ++i) {
202 if (!codegen->IsFloatingPointCalleeSaveRegister(i) &&
203 register_set->ContainsFloatingPointRegister(i)) {
204 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
205 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
206 saved_fpu_stack_offsets_[i] = stack_offset;
207 stack_offset += kDRegSizeInBytes;
208 }
209 }
210
211 SaveRestoreLiveRegistersHelper(codegen, register_set,
212 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
213}
214
215void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
216 RegisterSet* register_set = locations->GetLiveRegisters();
217 SaveRestoreLiveRegistersHelper(codegen, register_set,
218 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
219}
220
Alexandre Rames5319def2014-10-23 10:03:10 +0100221class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
222 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000223 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100224
Alexandre Rames67555f72014-11-18 10:55:16 +0000225 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100226 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000227 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100228
Alexandre Rames5319def2014-10-23 10:03:10 +0100229 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000230 if (instruction_->CanThrowIntoCatchBlock()) {
231 // Live registers will be restored in the catch block if caught.
232 SaveLiveRegisters(codegen, instruction_->GetLocations());
233 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000234 // We're moving two locations to locations that could overlap, so we need a parallel
235 // move resolver.
236 InvokeRuntimeCallingConvention calling_convention;
237 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100238 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
239 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100240 uint32_t entry_point_offset = instruction_->AsBoundsCheck()->IsStringCharAt()
241 ? QUICK_ENTRY_POINT(pThrowStringBounds)
242 : QUICK_ENTRY_POINT(pThrowArrayBounds);
243 arm64_codegen->InvokeRuntime(entry_point_offset, instruction_, instruction_->GetDexPc(), this);
244 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800245 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100246 }
247
Alexandre Rames8158f282015-08-07 10:26:17 +0100248 bool IsFatal() const OVERRIDE { return true; }
249
Alexandre Rames9931f312015-06-19 14:47:01 +0100250 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
251
Alexandre Rames5319def2014-10-23 10:03:10 +0100252 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100253 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
254};
255
Alexandre Rames67555f72014-11-18 10:55:16 +0000256class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
257 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000258 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000259
260 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
261 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
262 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000263 if (instruction_->CanThrowIntoCatchBlock()) {
264 // Live registers will be restored in the catch block if caught.
265 SaveLiveRegisters(codegen, instruction_->GetLocations());
266 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000267 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000268 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800269 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000270 }
271
Alexandre Rames8158f282015-08-07 10:26:17 +0100272 bool IsFatal() const OVERRIDE { return true; }
273
Alexandre Rames9931f312015-06-19 14:47:01 +0100274 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
275
Alexandre Rames67555f72014-11-18 10:55:16 +0000276 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000277 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
278};
279
280class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
281 public:
282 LoadClassSlowPathARM64(HLoadClass* cls,
283 HInstruction* at,
284 uint32_t dex_pc,
285 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000286 : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000287 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
288 }
289
290 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
291 LocationSummary* locations = at_->GetLocations();
292 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
293
294 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000295 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000296
297 InvokeRuntimeCallingConvention calling_convention;
298 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000299 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
300 : QUICK_ENTRY_POINT(pInitializeType);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000301 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800302 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100303 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800304 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100305 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800306 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000307
308 // Move the class to the desired location.
309 Location out = locations->Out();
310 if (out.IsValid()) {
311 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
312 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000313 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000314 }
315
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000316 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000317 __ B(GetExitLabel());
318 }
319
Alexandre Rames9931f312015-06-19 14:47:01 +0100320 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
321
Alexandre Rames67555f72014-11-18 10:55:16 +0000322 private:
323 // The class this slow path will load.
324 HLoadClass* const cls_;
325
326 // The instruction where this slow path is happening.
327 // (Might be the load class or an initialization check).
328 HInstruction* const at_;
329
330 // The dex PC of `at_`.
331 const uint32_t dex_pc_;
332
333 // Whether to initialize the class.
334 const bool do_clinit_;
335
336 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
337};
338
339class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
340 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000341 explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000342
343 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
344 LocationSummary* locations = instruction_->GetLocations();
345 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
346 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
347
348 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000349 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000350
351 InvokeRuntimeCallingConvention calling_convention;
David Srbecky9cd6d372016-02-09 15:24:47 +0000352 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
353 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
Alexandre Rames67555f72014-11-18 10:55:16 +0000354 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000355 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc(), this);
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100356 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000357 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000358 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000359
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000360 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000361 __ B(GetExitLabel());
362 }
363
Alexandre Rames9931f312015-06-19 14:47:01 +0100364 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
365
Alexandre Rames67555f72014-11-18 10:55:16 +0000366 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000367 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
368};
369
Alexandre Rames5319def2014-10-23 10:03:10 +0100370class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
371 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000372 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100373
Alexandre Rames67555f72014-11-18 10:55:16 +0000374 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
375 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100376 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000377 if (instruction_->CanThrowIntoCatchBlock()) {
378 // Live registers will be restored in the catch block if caught.
379 SaveLiveRegisters(codegen, instruction_->GetLocations());
380 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000381 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000382 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800383 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100384 }
385
Alexandre Rames8158f282015-08-07 10:26:17 +0100386 bool IsFatal() const OVERRIDE { return true; }
387
Alexandre Rames9931f312015-06-19 14:47:01 +0100388 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
389
Alexandre Rames5319def2014-10-23 10:03:10 +0100390 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100391 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
392};
393
394class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
395 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100396 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000397 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100398
Alexandre Rames67555f72014-11-18 10:55:16 +0000399 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
400 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100401 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000402 SaveLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000403 arm64_codegen->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000404 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800405 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000406 RestoreLiveRegisters(codegen, instruction_->GetLocations());
Alexandre Rames67555f72014-11-18 10:55:16 +0000407 if (successor_ == nullptr) {
408 __ B(GetReturnLabel());
409 } else {
410 __ B(arm64_codegen->GetLabelOf(successor_));
411 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100412 }
413
414 vixl::Label* GetReturnLabel() {
415 DCHECK(successor_ == nullptr);
416 return &return_label_;
417 }
418
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100419 HBasicBlock* GetSuccessor() const {
420 return successor_;
421 }
422
Alexandre Rames9931f312015-06-19 14:47:01 +0100423 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
424
Alexandre Rames5319def2014-10-23 10:03:10 +0100425 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100426 // If not null, the block to branch to after the suspend check.
427 HBasicBlock* const successor_;
428
429 // If `successor_` is null, the label to branch to after the suspend check.
430 vixl::Label return_label_;
431
432 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
433};
434
Alexandre Rames67555f72014-11-18 10:55:16 +0000435class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
436 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000437 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000438 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000439
440 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000441 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100442 Location class_to_check = locations->InAt(1);
443 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
444 : locations->Out();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000445 DCHECK(instruction_->IsCheckCast()
446 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
447 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100448 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000449
Alexandre Rames67555f72014-11-18 10:55:16 +0000450 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000451
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000452 if (!is_fatal_) {
453 SaveLiveRegisters(codegen, locations);
454 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000455
456 // We're moving two locations to locations that could overlap, so we need a parallel
457 // move resolver.
458 InvokeRuntimeCallingConvention calling_convention;
459 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100460 class_to_check, LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimNot,
461 object_class, LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000462
463 if (instruction_->IsInstanceOf()) {
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000464 arm64_codegen->InvokeRuntime(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100465 QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc, this);
Roland Levillain888d0672015-11-23 18:53:50 +0000466 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
467 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000468 Primitive::Type ret_type = instruction_->GetType();
469 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
470 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
471 } else {
472 DCHECK(instruction_->IsCheckCast());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100473 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800474 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000475 }
476
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000477 if (!is_fatal_) {
478 RestoreLiveRegisters(codegen, locations);
479 __ B(GetExitLabel());
480 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000481 }
482
Alexandre Rames9931f312015-06-19 14:47:01 +0100483 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000484 bool IsFatal() const { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100485
Alexandre Rames67555f72014-11-18 10:55:16 +0000486 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000487 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000488
Alexandre Rames67555f72014-11-18 10:55:16 +0000489 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
490};
491
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700492class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
493 public:
Aart Bik42249c32016-01-07 15:33:50 -0800494 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000495 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700496
497 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800498 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700499 __ Bind(GetEntryLabel());
500 SaveLiveRegisters(codegen, instruction_->GetLocations());
Aart Bik42249c32016-01-07 15:33:50 -0800501 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pDeoptimize),
502 instruction_,
503 instruction_->GetDexPc(),
504 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000505 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700506 }
507
Alexandre Rames9931f312015-06-19 14:47:01 +0100508 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
509
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700510 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700511 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
512};
513
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100514class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
515 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000516 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100517
518 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
519 LocationSummary* locations = instruction_->GetLocations();
520 __ Bind(GetEntryLabel());
521 SaveLiveRegisters(codegen, locations);
522
523 InvokeRuntimeCallingConvention calling_convention;
524 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
525 parallel_move.AddMove(
526 locations->InAt(0),
527 LocationFrom(calling_convention.GetRegisterAt(0)),
528 Primitive::kPrimNot,
529 nullptr);
530 parallel_move.AddMove(
531 locations->InAt(1),
532 LocationFrom(calling_convention.GetRegisterAt(1)),
533 Primitive::kPrimInt,
534 nullptr);
535 parallel_move.AddMove(
536 locations->InAt(2),
537 LocationFrom(calling_convention.GetRegisterAt(2)),
538 Primitive::kPrimNot,
539 nullptr);
540 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
541
542 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
543 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject),
544 instruction_,
545 instruction_->GetDexPc(),
546 this);
547 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
548 RestoreLiveRegisters(codegen, locations);
549 __ B(GetExitLabel());
550 }
551
552 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
553
554 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100555 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
556};
557
Zheng Xu3927c8b2015-11-18 17:46:25 +0800558void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
559 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000560 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800561
562 // We are about to use the assembler to place literals directly. Make sure we have enough
563 // underlying code buffer and we have generated the jump table with right size.
564 CodeBufferCheckScope scope(codegen->GetVIXLAssembler(), num_entries * sizeof(int32_t),
565 CodeBufferCheckScope::kCheck, CodeBufferCheckScope::kExactSize);
566
567 __ Bind(&table_start_);
568 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
569 for (uint32_t i = 0; i < num_entries; i++) {
570 vixl::Label* target_label = codegen->GetLabelOf(successors[i]);
571 DCHECK(target_label->IsBound());
572 ptrdiff_t jump_offset = target_label->location() - table_start_.location();
573 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
574 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
575 Literal<int32_t> literal(jump_offset);
576 __ place(&literal);
577 }
578}
579
Roland Levillain44015862016-01-22 11:47:17 +0000580// Slow path marking an object during a read barrier.
581class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
582 public:
Roland Levillain02b75802016-07-13 11:54:35 +0100583 ReadBarrierMarkSlowPathARM64(HInstruction* instruction, Location obj)
584 : SlowPathCodeARM64(instruction), obj_(obj) {
Roland Levillain44015862016-01-22 11:47:17 +0000585 DCHECK(kEmitCompilerReadBarrier);
586 }
587
588 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
589
590 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
591 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain44015862016-01-22 11:47:17 +0000592 DCHECK(locations->CanCall());
Roland Levillain02b75802016-07-13 11:54:35 +0100593 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(obj_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000594 DCHECK(instruction_->IsInstanceFieldGet() ||
595 instruction_->IsStaticFieldGet() ||
596 instruction_->IsArrayGet() ||
597 instruction_->IsLoadClass() ||
598 instruction_->IsLoadString() ||
599 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100600 instruction_->IsCheckCast() ||
601 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
602 instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000603 << "Unexpected instruction in read barrier marking slow path: "
604 << instruction_->DebugName();
605
606 __ Bind(GetEntryLabel());
Roland Levillain02b75802016-07-13 11:54:35 +0100607 // Save live registers before the runtime call, and in particular
608 // W0 (if it is live), as it is clobbered by functions
609 // art_quick_read_barrier_mark_regX.
Roland Levillain44015862016-01-22 11:47:17 +0000610 SaveLiveRegisters(codegen, locations);
611
612 InvokeRuntimeCallingConvention calling_convention;
613 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Roland Levillain02b75802016-07-13 11:54:35 +0100614 DCHECK_NE(obj_.reg(), LR);
615 DCHECK_NE(obj_.reg(), WSP);
616 DCHECK_NE(obj_.reg(), WZR);
617 DCHECK(0 <= obj_.reg() && obj_.reg() < kNumberOfWRegisters) << obj_.reg();
618 // "Compact" slow path, saving two moves.
619 //
620 // Instead of using the standard runtime calling convention (input
621 // and output in W0):
622 //
623 // W0 <- obj
624 // W0 <- ReadBarrierMark(W0)
625 // obj <- W0
626 //
627 // we just use rX (the register holding `obj`) as input and output
628 // of a dedicated entrypoint:
629 //
630 // rX <- ReadBarrierMarkRegX(rX)
631 //
632 int32_t entry_point_offset =
633 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64WordSize>(obj_.reg());
634 // TODO: Do not emit a stack map for this runtime call.
635 arm64_codegen->InvokeRuntime(entry_point_offset,
Roland Levillain44015862016-01-22 11:47:17 +0000636 instruction_,
637 instruction_->GetDexPc(),
638 this);
Roland Levillain44015862016-01-22 11:47:17 +0000639
640 RestoreLiveRegisters(codegen, locations);
641 __ B(GetExitLabel());
642 }
643
644 private:
Roland Levillain44015862016-01-22 11:47:17 +0000645 const Location obj_;
646
647 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
648};
649
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000650// Slow path generating a read barrier for a heap reference.
651class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
652 public:
653 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
654 Location out,
655 Location ref,
656 Location obj,
657 uint32_t offset,
658 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000659 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000660 out_(out),
661 ref_(ref),
662 obj_(obj),
663 offset_(offset),
664 index_(index) {
665 DCHECK(kEmitCompilerReadBarrier);
666 // If `obj` is equal to `out` or `ref`, it means the initial object
667 // has been overwritten by (or after) the heap object reference load
668 // to be instrumented, e.g.:
669 //
670 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000671 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000672 //
673 // In that case, we have lost the information about the original
674 // object, and the emitted read barrier cannot work properly.
675 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
676 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
677 }
678
679 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
680 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
681 LocationSummary* locations = instruction_->GetLocations();
682 Primitive::Type type = Primitive::kPrimNot;
683 DCHECK(locations->CanCall());
684 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100685 DCHECK(instruction_->IsInstanceFieldGet() ||
686 instruction_->IsStaticFieldGet() ||
687 instruction_->IsArrayGet() ||
688 instruction_->IsInstanceOf() ||
689 instruction_->IsCheckCast() ||
690 ((instruction_->IsInvokeStaticOrDirect() || instruction_->IsInvokeVirtual()) &&
Roland Levillain44015862016-01-22 11:47:17 +0000691 instruction_->GetLocations()->Intrinsified()))
692 << "Unexpected instruction in read barrier for heap reference slow path: "
693 << instruction_->DebugName();
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000694 // The read barrier instrumentation does not support the
695 // HArm64IntermediateAddress instruction yet.
696 DCHECK(!(instruction_->IsArrayGet() &&
697 instruction_->AsArrayGet()->GetArray()->IsArm64IntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000698
699 __ Bind(GetEntryLabel());
700
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000701 SaveLiveRegisters(codegen, locations);
702
703 // We may have to change the index's value, but as `index_` is a
704 // constant member (like other "inputs" of this slow path),
705 // introduce a copy of it, `index`.
706 Location index = index_;
707 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100708 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000709 if (instruction_->IsArrayGet()) {
710 // Compute the actual memory offset and store it in `index`.
711 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
712 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
713 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
714 // We are about to change the value of `index_reg` (see the
715 // calls to vixl::MacroAssembler::Lsl and
716 // vixl::MacroAssembler::Mov below), but it has
717 // not been saved by the previous call to
718 // art::SlowPathCode::SaveLiveRegisters, as it is a
719 // callee-save register --
720 // art::SlowPathCode::SaveLiveRegisters does not consider
721 // callee-save registers, as it has been designed with the
722 // assumption that callee-save registers are supposed to be
723 // handled by the called function. So, as a callee-save
724 // register, `index_reg` _would_ eventually be saved onto
725 // the stack, but it would be too late: we would have
726 // changed its value earlier. Therefore, we manually save
727 // it here into another freely available register,
728 // `free_reg`, chosen of course among the caller-save
729 // registers (as a callee-save `free_reg` register would
730 // exhibit the same problem).
731 //
732 // Note we could have requested a temporary register from
733 // the register allocator instead; but we prefer not to, as
734 // this is a slow path, and we know we can find a
735 // caller-save register that is available.
736 Register free_reg = FindAvailableCallerSaveRegister(codegen);
737 __ Mov(free_reg.W(), index_reg);
738 index_reg = free_reg;
739 index = LocationFrom(index_reg);
740 } else {
741 // The initial register stored in `index_` has already been
742 // saved in the call to art::SlowPathCode::SaveLiveRegisters
743 // (as it is not a callee-save register), so we can freely
744 // use it.
745 }
746 // Shifting the index value contained in `index_reg` by the scale
747 // factor (2) cannot overflow in practice, as the runtime is
748 // unable to allocate object arrays with a size larger than
749 // 2^26 - 1 (that is, 2^28 - 4 bytes).
750 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
751 static_assert(
752 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
753 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
754 __ Add(index_reg, index_reg, Operand(offset_));
755 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100756 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
757 // intrinsics, `index_` is not shifted by a scale factor of 2
758 // (as in the case of ArrayGet), as it is actually an offset
759 // to an object field within an object.
760 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000761 DCHECK(instruction_->GetLocations()->Intrinsified());
762 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
763 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
764 << instruction_->AsInvoke()->GetIntrinsic();
765 DCHECK_EQ(offset_, 0U);
766 DCHECK(index_.IsRegisterPair());
767 // UnsafeGet's offset location is a register pair, the low
768 // part contains the correct offset.
769 index = index_.ToLow();
770 }
771 }
772
773 // We're moving two or three locations to locations that could
774 // overlap, so we need a parallel move resolver.
775 InvokeRuntimeCallingConvention calling_convention;
776 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
777 parallel_move.AddMove(ref_,
778 LocationFrom(calling_convention.GetRegisterAt(0)),
779 type,
780 nullptr);
781 parallel_move.AddMove(obj_,
782 LocationFrom(calling_convention.GetRegisterAt(1)),
783 type,
784 nullptr);
785 if (index.IsValid()) {
786 parallel_move.AddMove(index,
787 LocationFrom(calling_convention.GetRegisterAt(2)),
788 Primitive::kPrimInt,
789 nullptr);
790 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
791 } else {
792 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
793 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
794 }
795 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierSlow),
796 instruction_,
797 instruction_->GetDexPc(),
798 this);
799 CheckEntrypointTypes<
800 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
801 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
802
803 RestoreLiveRegisters(codegen, locations);
804
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000805 __ B(GetExitLabel());
806 }
807
808 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
809
810 private:
811 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
812 size_t ref = static_cast<int>(XRegisterFrom(ref_).code());
813 size_t obj = static_cast<int>(XRegisterFrom(obj_).code());
814 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
815 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
816 return Register(VIXLRegCodeFromART(i), kXRegSize);
817 }
818 }
819 // We shall never fail to find a free caller-save register, as
820 // there are more than two core caller-save registers on ARM64
821 // (meaning it is possible to find one which is different from
822 // `ref` and `obj`).
823 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
824 LOG(FATAL) << "Could not find a free register";
825 UNREACHABLE();
826 }
827
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000828 const Location out_;
829 const Location ref_;
830 const Location obj_;
831 const uint32_t offset_;
832 // An additional location containing an index to an array.
833 // Only used for HArrayGet and the UnsafeGetObject &
834 // UnsafeGetObjectVolatile intrinsics.
835 const Location index_;
836
837 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
838};
839
840// Slow path generating a read barrier for a GC root.
841class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
842 public:
843 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000844 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000845 DCHECK(kEmitCompilerReadBarrier);
846 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000847
848 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
849 LocationSummary* locations = instruction_->GetLocations();
850 Primitive::Type type = Primitive::kPrimNot;
851 DCHECK(locations->CanCall());
852 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000853 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
854 << "Unexpected instruction in read barrier for GC root slow path: "
855 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000856
857 __ Bind(GetEntryLabel());
858 SaveLiveRegisters(codegen, locations);
859
860 InvokeRuntimeCallingConvention calling_convention;
861 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
862 // The argument of the ReadBarrierForRootSlow is not a managed
863 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
864 // thus we need a 64-bit move here, and we cannot use
865 //
866 // arm64_codegen->MoveLocation(
867 // LocationFrom(calling_convention.GetRegisterAt(0)),
868 // root_,
869 // type);
870 //
871 // which would emit a 32-bit move, as `type` is a (32-bit wide)
872 // reference type (`Primitive::kPrimNot`).
873 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
874 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pReadBarrierForRootSlow),
875 instruction_,
876 instruction_->GetDexPc(),
877 this);
878 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
879 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
880
881 RestoreLiveRegisters(codegen, locations);
882 __ B(GetExitLabel());
883 }
884
885 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
886
887 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000888 const Location out_;
889 const Location root_;
890
891 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
892};
893
Alexandre Rames5319def2014-10-23 10:03:10 +0100894#undef __
895
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100896Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100897 Location next_location;
898 if (type == Primitive::kPrimVoid) {
899 LOG(FATAL) << "Unreachable type " << type;
900 }
901
Alexandre Rames542361f2015-01-29 16:57:31 +0000902 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100903 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
904 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +0000905 } else if (!Primitive::IsFloatingPointType(type) &&
906 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000907 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
908 } else {
909 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +0000910 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
911 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100912 }
913
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000914 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +0000915 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100916 return next_location;
917}
918
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100919Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100920 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100921}
922
Serban Constantinescu579885a2015-02-22 20:51:33 +0000923CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
924 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100925 const CompilerOptions& compiler_options,
926 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100927 : CodeGenerator(graph,
928 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000929 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000930 kNumberOfAllocatableRegisterPairs,
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000931 callee_saved_core_registers.list(),
Nicolas Geoffray75d5b9b2015-10-05 07:40:35 +0000932 callee_saved_fp_registers.list(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100933 compiler_options,
934 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100935 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +0800936 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100937 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000938 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +0000939 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100940 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +0000941 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000942 uint32_literals_(std::less<uint32_t>(),
943 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100944 uint64_literals_(std::less<uint64_t>(),
945 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
946 method_patches_(MethodReferenceComparator(),
947 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
948 call_patches_(MethodReferenceComparator(),
949 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
950 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000951 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
952 boot_image_string_patches_(StringReferenceValueComparator(),
953 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
954 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100955 boot_image_type_patches_(TypeReferenceValueComparator(),
956 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
957 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000958 boot_image_address_patches_(std::less<uint32_t>(),
959 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000960 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000961 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000962}
Alexandre Rames5319def2014-10-23 10:03:10 +0100963
Alexandre Rames67555f72014-11-18 10:55:16 +0000964#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100965
Zheng Xu3927c8b2015-11-18 17:46:25 +0800966void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100967 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800968 jump_table->EmitTable(this);
969 }
970}
971
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000972void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800973 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000974 // Ensure we emit the literal pool.
975 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000976
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000977 CodeGenerator::Finalize(allocator);
978}
979
Zheng Xuad4450e2015-04-17 18:48:56 +0800980void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
981 // Note: There are 6 kinds of moves:
982 // 1. constant -> GPR/FPR (non-cycle)
983 // 2. constant -> stack (non-cycle)
984 // 3. GPR/FPR -> GPR/FPR
985 // 4. GPR/FPR -> stack
986 // 5. stack -> GPR/FPR
987 // 6. stack -> stack (non-cycle)
988 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
989 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
990 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
991 // dependency.
992 vixl_temps_.Open(GetVIXLAssembler());
993}
994
995void ParallelMoveResolverARM64::FinishEmitNativeCode() {
996 vixl_temps_.Close();
997}
998
999Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
1000 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
1001 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
1002 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
1003 Location scratch = GetScratchLocation(kind);
1004 if (!scratch.Equals(Location::NoLocation())) {
1005 return scratch;
1006 }
1007 // Allocate from VIXL temp registers.
1008 if (kind == Location::kRegister) {
1009 scratch = LocationFrom(vixl_temps_.AcquireX());
1010 } else {
1011 DCHECK(kind == Location::kFpuRegister);
1012 scratch = LocationFrom(vixl_temps_.AcquireD());
1013 }
1014 AddScratchLocation(scratch);
1015 return scratch;
1016}
1017
1018void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1019 if (loc.IsRegister()) {
1020 vixl_temps_.Release(XRegisterFrom(loc));
1021 } else {
1022 DCHECK(loc.IsFpuRegister());
1023 vixl_temps_.Release(DRegisterFrom(loc));
1024 }
1025 RemoveScratchLocation(loc);
1026}
1027
Alexandre Rames3e69f162014-12-10 10:36:50 +00001028void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001029 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001030 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001031}
1032
Alexandre Rames5319def2014-10-23 10:03:10 +01001033void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001034 MacroAssembler* masm = GetVIXLAssembler();
1035 BlockPoolsScope block_pools(masm);
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001036 __ Bind(&frame_entry_label_);
1037
Serban Constantinescu02164b32014-11-13 14:05:07 +00001038 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1039 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001040 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001041 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001042 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001043 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001044 __ Ldr(wzr, MemOperand(temp, 0));
1045 RecordPcInfo(nullptr, 0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001046 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001047
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001048 if (!HasEmptyFrame()) {
1049 int frame_size = GetFrameSize();
1050 // Stack layout:
1051 // sp[frame_size - 8] : lr.
1052 // ... : other preserved core registers.
1053 // ... : other preserved fp registers.
1054 // ... : reserved frame space.
1055 // sp[0] : current method.
1056 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001057 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001058 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1059 frame_size - GetCoreSpillSize());
1060 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1061 frame_size - FrameEntrySpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001062 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001063}
1064
1065void CodeGeneratorARM64::GenerateFrameExit() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001066 BlockPoolsScope block_pools(GetVIXLAssembler());
David Srbeckyc34dc932015-04-12 09:27:43 +01001067 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001068 if (!HasEmptyFrame()) {
1069 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001070 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1071 frame_size - FrameEntrySpillSize());
1072 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1073 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001074 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001075 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001076 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001077 __ Ret();
1078 GetAssembler()->cfi().RestoreState();
1079 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001080}
1081
Zheng Xuda403092015-04-24 17:35:39 +08001082vixl::CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
1083 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
1084 return vixl::CPURegList(vixl::CPURegister::kRegister, vixl::kXRegSize,
1085 core_spill_mask_);
1086}
1087
1088vixl::CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
1089 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1090 GetNumberOfFloatingPointRegisters()));
1091 return vixl::CPURegList(vixl::CPURegister::kFPRegister, vixl::kDRegSize,
1092 fpu_spill_mask_);
1093}
1094
Alexandre Rames5319def2014-10-23 10:03:10 +01001095void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1096 __ Bind(GetLabelOf(block));
1097}
1098
Calin Juravle175dc732015-08-25 15:42:32 +01001099void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1100 DCHECK(location.IsRegister());
1101 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1102}
1103
Calin Juravlee460d1d2015-09-29 04:52:17 +01001104void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1105 if (location.IsRegister()) {
1106 locations->AddTemp(location);
1107 } else {
1108 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1109 }
1110}
1111
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001112void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001113 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001114 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001115 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +01001116 vixl::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001117 if (value_can_be_null) {
1118 __ Cbz(value, &done);
1119 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001120 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
1121 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001122 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001123 if (value_can_be_null) {
1124 __ Bind(&done);
1125 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001126}
1127
David Brazdil58282f42016-01-14 12:45:10 +00001128void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001129 // Blocked core registers:
1130 // lr : Runtime reserved.
1131 // tr : Runtime reserved.
1132 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1133 // ip1 : VIXL core temp.
1134 // ip0 : VIXL core temp.
1135 //
1136 // Blocked fp registers:
1137 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001138 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1139 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001140 while (!reserved_core_registers.IsEmpty()) {
1141 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
1142 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001143
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001144 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001145 while (!reserved_fp_registers.IsEmpty()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001146 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
1147 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001148
David Brazdil58282f42016-01-14 12:45:10 +00001149 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001150 // Stubs do not save callee-save floating point registers. If the graph
1151 // is debuggable, we need to deal with these registers differently. For
1152 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001153 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1154 while (!reserved_fp_registers_debuggable.IsEmpty()) {
1155 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().code()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001156 }
1157 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001158}
1159
Alexandre Rames3e69f162014-12-10 10:36:50 +00001160size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1161 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1162 __ Str(reg, MemOperand(sp, stack_index));
1163 return kArm64WordSize;
1164}
1165
1166size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1167 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1168 __ Ldr(reg, MemOperand(sp, stack_index));
1169 return kArm64WordSize;
1170}
1171
1172size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1173 FPRegister reg = FPRegister(reg_id, kDRegSize);
1174 __ Str(reg, MemOperand(sp, stack_index));
1175 return kArm64WordSize;
1176}
1177
1178size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1179 FPRegister reg = FPRegister(reg_id, kDRegSize);
1180 __ Ldr(reg, MemOperand(sp, stack_index));
1181 return kArm64WordSize;
1182}
1183
Alexandre Rames5319def2014-10-23 10:03:10 +01001184void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001185 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001186}
1187
1188void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001189 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001190}
1191
Alexandre Rames67555f72014-11-18 10:55:16 +00001192void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001193 if (constant->IsIntConstant()) {
1194 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1195 } else if (constant->IsLongConstant()) {
1196 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1197 } else if (constant->IsNullConstant()) {
1198 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001199 } else if (constant->IsFloatConstant()) {
1200 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1201 } else {
1202 DCHECK(constant->IsDoubleConstant());
1203 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1204 }
1205}
1206
Alexandre Rames3e69f162014-12-10 10:36:50 +00001207
1208static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1209 DCHECK(constant.IsConstant());
1210 HConstant* cst = constant.GetConstant();
1211 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001212 // Null is mapped to a core W register, which we associate with kPrimInt.
1213 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001214 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1215 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1216 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1217}
1218
Calin Juravlee460d1d2015-09-29 04:52:17 +01001219void CodeGeneratorARM64::MoveLocation(Location destination,
1220 Location source,
1221 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001222 if (source.Equals(destination)) {
1223 return;
1224 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001225
1226 // A valid move can always be inferred from the destination and source
1227 // locations. When moving from and to a register, the argument type can be
1228 // used to generate 32bit instead of 64bit moves. In debug mode we also
1229 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001230 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001231
1232 if (destination.IsRegister() || destination.IsFpuRegister()) {
1233 if (unspecified_type) {
1234 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1235 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001236 (src_cst != nullptr && (src_cst->IsIntConstant()
1237 || src_cst->IsFloatConstant()
1238 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001239 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001240 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001241 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001242 // If the source is a double stack slot or a 64bit constant, a 64bit
1243 // type is appropriate. Else the source is a register, and since the
1244 // type has not been specified, we chose a 64bit type to force a 64bit
1245 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001246 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001247 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001248 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001249 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1250 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1251 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001252 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1253 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1254 __ Ldr(dst, StackOperandFrom(source));
1255 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001256 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001257 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001258 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001259 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001260 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001261 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001262 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001263 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1264 ? Primitive::kPrimLong
1265 : Primitive::kPrimInt;
1266 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1267 }
1268 } else {
1269 DCHECK(source.IsFpuRegister());
1270 if (destination.IsRegister()) {
1271 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1272 ? Primitive::kPrimDouble
1273 : Primitive::kPrimFloat;
1274 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1275 } else {
1276 DCHECK(destination.IsFpuRegister());
1277 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001278 }
1279 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001280 } else { // The destination is not a register. It must be a stack slot.
1281 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1282 if (source.IsRegister() || source.IsFpuRegister()) {
1283 if (unspecified_type) {
1284 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001285 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001286 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001287 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001288 }
1289 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001290 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1291 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1292 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001293 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001294 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1295 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001296 UseScratchRegisterScope temps(GetVIXLAssembler());
1297 HConstant* src_cst = source.GetConstant();
1298 CPURegister temp;
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001299 if (src_cst->IsIntConstant() || src_cst->IsNullConstant()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001300 temp = temps.AcquireW();
1301 } else if (src_cst->IsLongConstant()) {
1302 temp = temps.AcquireX();
1303 } else if (src_cst->IsFloatConstant()) {
1304 temp = temps.AcquireS();
1305 } else {
1306 DCHECK(src_cst->IsDoubleConstant());
1307 temp = temps.AcquireD();
1308 }
1309 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001310 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001311 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001312 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001313 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001314 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001315 // There is generally less pressure on FP registers.
1316 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001317 __ Ldr(temp, StackOperandFrom(source));
1318 __ Str(temp, StackOperandFrom(destination));
1319 }
1320 }
1321}
1322
1323void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001324 CPURegister dst,
1325 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001326 switch (type) {
1327 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001328 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001329 break;
1330 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001331 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001332 break;
1333 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001334 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001335 break;
1336 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001337 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001338 break;
1339 case Primitive::kPrimInt:
1340 case Primitive::kPrimNot:
1341 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001342 case Primitive::kPrimFloat:
1343 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001344 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001345 __ Ldr(dst, src);
1346 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001347 case Primitive::kPrimVoid:
1348 LOG(FATAL) << "Unreachable type " << type;
1349 }
1350}
1351
Calin Juravle77520bc2015-01-12 18:45:46 +00001352void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001353 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001354 const MemOperand& src,
1355 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001356 MacroAssembler* masm = GetVIXLAssembler();
1357 BlockPoolsScope block_pools(masm);
1358 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001359 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001360 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001361
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001362 DCHECK(!src.IsPreIndex());
1363 DCHECK(!src.IsPostIndex());
1364
1365 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001366 __ Add(temp_base, src.base(), OperandFromMemOperand(src));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001367 MemOperand base = MemOperand(temp_base);
1368 switch (type) {
1369 case Primitive::kPrimBoolean:
1370 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001371 if (needs_null_check) {
1372 MaybeRecordImplicitNullCheck(instruction);
1373 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001374 break;
1375 case Primitive::kPrimByte:
1376 __ Ldarb(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001377 if (needs_null_check) {
1378 MaybeRecordImplicitNullCheck(instruction);
1379 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001380 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1381 break;
1382 case Primitive::kPrimChar:
1383 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001384 if (needs_null_check) {
1385 MaybeRecordImplicitNullCheck(instruction);
1386 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001387 break;
1388 case Primitive::kPrimShort:
1389 __ Ldarh(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001390 if (needs_null_check) {
1391 MaybeRecordImplicitNullCheck(instruction);
1392 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001393 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1394 break;
1395 case Primitive::kPrimInt:
1396 case Primitive::kPrimNot:
1397 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001398 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001399 __ Ldar(Register(dst), base);
Roland Levillain44015862016-01-22 11:47:17 +00001400 if (needs_null_check) {
1401 MaybeRecordImplicitNullCheck(instruction);
1402 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001403 break;
1404 case Primitive::kPrimFloat:
1405 case Primitive::kPrimDouble: {
1406 DCHECK(dst.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001407 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001408
1409 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1410 __ Ldar(temp, base);
Roland Levillain44015862016-01-22 11:47:17 +00001411 if (needs_null_check) {
1412 MaybeRecordImplicitNullCheck(instruction);
1413 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001414 __ Fmov(FPRegister(dst), temp);
1415 break;
1416 }
1417 case Primitive::kPrimVoid:
1418 LOG(FATAL) << "Unreachable type " << type;
1419 }
1420}
1421
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001422void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001423 CPURegister src,
1424 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001425 switch (type) {
1426 case Primitive::kPrimBoolean:
1427 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001428 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001429 break;
1430 case Primitive::kPrimChar:
1431 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001432 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001433 break;
1434 case Primitive::kPrimInt:
1435 case Primitive::kPrimNot:
1436 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001437 case Primitive::kPrimFloat:
1438 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001439 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001440 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001441 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001442 case Primitive::kPrimVoid:
1443 LOG(FATAL) << "Unreachable type " << type;
1444 }
1445}
1446
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001447void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1448 CPURegister src,
1449 const MemOperand& dst) {
1450 UseScratchRegisterScope temps(GetVIXLAssembler());
1451 Register temp_base = temps.AcquireX();
1452
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001453 DCHECK(!dst.IsPreIndex());
1454 DCHECK(!dst.IsPostIndex());
1455
1456 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001457 Operand op = OperandFromMemOperand(dst);
1458 __ Add(temp_base, dst.base(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001459 MemOperand base = MemOperand(temp_base);
1460 switch (type) {
1461 case Primitive::kPrimBoolean:
1462 case Primitive::kPrimByte:
1463 __ Stlrb(Register(src), base);
1464 break;
1465 case Primitive::kPrimChar:
1466 case Primitive::kPrimShort:
1467 __ Stlrh(Register(src), base);
1468 break;
1469 case Primitive::kPrimInt:
1470 case Primitive::kPrimNot:
1471 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001472 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001473 __ Stlr(Register(src), base);
1474 break;
1475 case Primitive::kPrimFloat:
1476 case Primitive::kPrimDouble: {
1477 DCHECK(src.IsFPRegister());
Alexandre Rames542361f2015-01-29 16:57:31 +00001478 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001479
1480 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1481 __ Fmov(temp, FPRegister(src));
1482 __ Stlr(temp, base);
1483 break;
1484 }
1485 case Primitive::kPrimVoid:
1486 LOG(FATAL) << "Unreachable type " << type;
1487 }
1488}
1489
Calin Juravle175dc732015-08-25 15:42:32 +01001490void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1491 HInstruction* instruction,
1492 uint32_t dex_pc,
1493 SlowPathCode* slow_path) {
1494 InvokeRuntime(GetThreadOffset<kArm64WordSize>(entrypoint).Int32Value(),
1495 instruction,
1496 dex_pc,
1497 slow_path);
1498}
1499
Alexandre Rames67555f72014-11-18 10:55:16 +00001500void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1501 HInstruction* instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00001502 uint32_t dex_pc,
1503 SlowPathCode* slow_path) {
Alexandre Rames78e3ef62015-08-12 13:43:29 +01001504 ValidateInvokeRuntime(instruction, slow_path);
Alexandre Ramesd921d642015-04-16 15:07:16 +01001505 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames67555f72014-11-18 10:55:16 +00001506 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1507 __ Blr(lr);
Roland Levillain896e32d2015-05-05 18:07:10 +01001508 RecordPcInfo(instruction, dex_pc, slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00001509}
1510
1511void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1512 vixl::Register class_reg) {
1513 UseScratchRegisterScope temps(GetVIXLAssembler());
1514 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001515 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1516
Serban Constantinescu02164b32014-11-13 14:05:07 +00001517 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001518 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1519 __ Add(temp, class_reg, status_offset);
1520 __ Ldar(temp, HeapOperand(temp));
1521 __ Cmp(temp, mirror::Class::kStatusInitialized);
1522 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001523 __ Bind(slow_path->GetExitLabel());
1524}
Alexandre Rames5319def2014-10-23 10:03:10 +01001525
Roland Levillain44015862016-01-22 11:47:17 +00001526void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001527 BarrierType type = BarrierAll;
1528
1529 switch (kind) {
1530 case MemBarrierKind::kAnyAny:
1531 case MemBarrierKind::kAnyStore: {
1532 type = BarrierAll;
1533 break;
1534 }
1535 case MemBarrierKind::kLoadAny: {
1536 type = BarrierReads;
1537 break;
1538 }
1539 case MemBarrierKind::kStoreStore: {
1540 type = BarrierWrites;
1541 break;
1542 }
1543 default:
1544 LOG(FATAL) << "Unexpected memory barrier " << kind;
1545 }
1546 __ Dmb(InnerShareable, type);
1547}
1548
Serban Constantinescu02164b32014-11-13 14:05:07 +00001549void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1550 HBasicBlock* successor) {
1551 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001552 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1553 if (slow_path == nullptr) {
1554 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1555 instruction->SetSlowPath(slow_path);
1556 codegen_->AddSlowPath(slow_path);
1557 if (successor != nullptr) {
1558 DCHECK(successor->IsLoopHeader());
1559 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1560 }
1561 } else {
1562 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1563 }
1564
Serban Constantinescu02164b32014-11-13 14:05:07 +00001565 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1566 Register temp = temps.AcquireW();
1567
1568 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1569 if (successor == nullptr) {
1570 __ Cbnz(temp, slow_path->GetEntryLabel());
1571 __ Bind(slow_path->GetReturnLabel());
1572 } else {
1573 __ Cbz(temp, codegen_->GetLabelOf(successor));
1574 __ B(slow_path->GetEntryLabel());
1575 // slow_path will return to GetLabelOf(successor).
1576 }
1577}
1578
Alexandre Rames5319def2014-10-23 10:03:10 +01001579InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1580 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001581 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001582 assembler_(codegen->GetAssembler()),
1583 codegen_(codegen) {}
1584
1585#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001586 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001587
1588#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1589
1590enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001591 // Using a base helps identify when we hit such breakpoints.
1592 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001593#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1594 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1595#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1596};
1597
1598#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001599 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001600 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1601 } \
1602 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1603 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1604 locations->SetOut(Location::Any()); \
1605 }
1606 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1607#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1608
1609#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001610#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001611
Alexandre Rames67555f72014-11-18 10:55:16 +00001612void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001613 DCHECK_EQ(instr->InputCount(), 2U);
1614 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1615 Primitive::Type type = instr->GetResultType();
1616 switch (type) {
1617 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001618 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001619 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001620 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001621 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001622 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001623
1624 case Primitive::kPrimFloat:
1625 case Primitive::kPrimDouble:
1626 locations->SetInAt(0, Location::RequiresFpuRegister());
1627 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001628 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001629 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001630
Alexandre Rames5319def2014-10-23 10:03:10 +01001631 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001632 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001633 }
1634}
1635
Alexandre Rames09a99962015-04-15 11:47:56 +01001636void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001637 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1638
1639 bool object_field_get_with_read_barrier =
1640 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001641 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001642 new (GetGraph()->GetArena()) LocationSummary(instruction,
1643 object_field_get_with_read_barrier ?
1644 LocationSummary::kCallOnSlowPath :
1645 LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001646 locations->SetInAt(0, Location::RequiresRegister());
1647 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1648 locations->SetOut(Location::RequiresFpuRegister());
1649 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001650 // The output overlaps for an object field get when read barriers
1651 // are enabled: we do not want the load to overwrite the object's
1652 // location, as we need it to emit the read barrier.
1653 locations->SetOut(
1654 Location::RequiresRegister(),
1655 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001656 }
1657}
1658
1659void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1660 const FieldInfo& field_info) {
1661 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001662 LocationSummary* locations = instruction->GetLocations();
1663 Location base_loc = locations->InAt(0);
1664 Location out = locations->Out();
1665 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01001666 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001667 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001668 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001669
Roland Levillain44015862016-01-22 11:47:17 +00001670 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1671 // Object FieldGet with Baker's read barrier case.
1672 MacroAssembler* masm = GetVIXLAssembler();
1673 UseScratchRegisterScope temps(masm);
1674 // /* HeapReference<Object> */ out = *(base + offset)
1675 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
1676 Register temp = temps.AcquireW();
1677 // Note that potential implicit null checks are handled in this
1678 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1679 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1680 instruction,
1681 out,
1682 base,
1683 offset,
1684 temp,
1685 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001686 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001687 } else {
1688 // General case.
1689 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001690 // Note that a potential implicit null check is handled in this
1691 // CodeGeneratorARM64::LoadAcquire call.
1692 // NB: LoadAcquire will record the pc info if needed.
1693 codegen_->LoadAcquire(
1694 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001695 } else {
Roland Levillain4d027112015-07-01 15:41:14 +01001696 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001697 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001698 }
Roland Levillain44015862016-01-22 11:47:17 +00001699 if (field_type == Primitive::kPrimNot) {
1700 // If read barriers are enabled, emit read barriers other than
1701 // Baker's using a slow path (and also unpoison the loaded
1702 // reference, if heap poisoning is enabled).
1703 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1704 }
Roland Levillain4d027112015-07-01 15:41:14 +01001705 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001706}
1707
1708void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1709 LocationSummary* locations =
1710 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1711 locations->SetInAt(0, Location::RequiresRegister());
1712 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
1713 locations->SetInAt(1, Location::RequiresFpuRegister());
1714 } else {
1715 locations->SetInAt(1, Location::RequiresRegister());
1716 }
1717}
1718
1719void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001720 const FieldInfo& field_info,
1721 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001722 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
Alexandre Ramesd921d642015-04-16 15:07:16 +01001723 BlockPoolsScope block_pools(GetVIXLAssembler());
Alexandre Rames09a99962015-04-15 11:47:56 +01001724
1725 Register obj = InputRegisterAt(instruction, 0);
1726 CPURegister value = InputCPURegisterAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001727 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001728 Offset offset = field_info.GetFieldOffset();
1729 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001730
Roland Levillain4d027112015-07-01 15:41:14 +01001731 {
1732 // We use a block to end the scratch scope before the write barrier, thus
1733 // freeing the temporary registers so they can be used in `MarkGCCard`.
1734 UseScratchRegisterScope temps(GetVIXLAssembler());
1735
1736 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
1737 DCHECK(value.IsW());
1738 Register temp = temps.AcquireW();
1739 __ Mov(temp, value.W());
1740 GetAssembler()->PoisonHeapReference(temp.W());
1741 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001742 }
Roland Levillain4d027112015-07-01 15:41:14 +01001743
1744 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001745 codegen_->StoreRelease(field_type, source, HeapOperand(obj, offset));
1746 codegen_->MaybeRecordImplicitNullCheck(instruction);
Roland Levillain4d027112015-07-01 15:41:14 +01001747 } else {
1748 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1749 codegen_->MaybeRecordImplicitNullCheck(instruction);
1750 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001751 }
1752
1753 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001754 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001755 }
1756}
1757
Alexandre Rames67555f72014-11-18 10:55:16 +00001758void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001759 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001760
1761 switch (type) {
1762 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001763 case Primitive::kPrimLong: {
1764 Register dst = OutputRegister(instr);
1765 Register lhs = InputRegisterAt(instr, 0);
1766 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001767 if (instr->IsAdd()) {
1768 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001769 } else if (instr->IsAnd()) {
1770 __ And(dst, lhs, rhs);
1771 } else if (instr->IsOr()) {
1772 __ Orr(dst, lhs, rhs);
1773 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001774 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001775 } else if (instr->IsRor()) {
1776 if (rhs.IsImmediate()) {
1777 uint32_t shift = rhs.immediate() & (lhs.SizeInBits() - 1);
1778 __ Ror(dst, lhs, shift);
1779 } else {
1780 // Ensure shift distance is in the same size register as the result. If
1781 // we are rotating a long and the shift comes in a w register originally,
1782 // we don't need to sxtw for use as an x since the shift distances are
1783 // all & reg_bits - 1.
1784 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1785 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001786 } else {
1787 DCHECK(instr->IsXor());
1788 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001789 }
1790 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001791 }
1792 case Primitive::kPrimFloat:
1793 case Primitive::kPrimDouble: {
1794 FPRegister dst = OutputFPRegister(instr);
1795 FPRegister lhs = InputFPRegisterAt(instr, 0);
1796 FPRegister rhs = InputFPRegisterAt(instr, 1);
1797 if (instr->IsAdd()) {
1798 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001799 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001800 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001801 } else {
1802 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001803 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001804 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001805 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001806 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001807 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001808 }
1809}
1810
Serban Constantinescu02164b32014-11-13 14:05:07 +00001811void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1812 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1813
1814 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1815 Primitive::Type type = instr->GetResultType();
1816 switch (type) {
1817 case Primitive::kPrimInt:
1818 case Primitive::kPrimLong: {
1819 locations->SetInAt(0, Location::RequiresRegister());
1820 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1821 locations->SetOut(Location::RequiresRegister());
1822 break;
1823 }
1824 default:
1825 LOG(FATAL) << "Unexpected shift type " << type;
1826 }
1827}
1828
1829void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1830 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1831
1832 Primitive::Type type = instr->GetType();
1833 switch (type) {
1834 case Primitive::kPrimInt:
1835 case Primitive::kPrimLong: {
1836 Register dst = OutputRegister(instr);
1837 Register lhs = InputRegisterAt(instr, 0);
1838 Operand rhs = InputOperandAt(instr, 1);
1839 if (rhs.IsImmediate()) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001840 uint32_t shift_value = rhs.immediate() &
1841 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001842 if (instr->IsShl()) {
1843 __ Lsl(dst, lhs, shift_value);
1844 } else if (instr->IsShr()) {
1845 __ Asr(dst, lhs, shift_value);
1846 } else {
1847 __ Lsr(dst, lhs, shift_value);
1848 }
1849 } else {
1850 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1851
1852 if (instr->IsShl()) {
1853 __ Lsl(dst, lhs, rhs_reg);
1854 } else if (instr->IsShr()) {
1855 __ Asr(dst, lhs, rhs_reg);
1856 } else {
1857 __ Lsr(dst, lhs, rhs_reg);
1858 }
1859 }
1860 break;
1861 }
1862 default:
1863 LOG(FATAL) << "Unexpected shift operation type " << type;
1864 }
1865}
1866
Alexandre Rames5319def2014-10-23 10:03:10 +01001867void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001868 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001869}
1870
1871void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001872 HandleBinaryOp(instruction);
1873}
1874
1875void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1876 HandleBinaryOp(instruction);
1877}
1878
1879void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1880 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001881}
1882
Artem Serov7fc63502016-02-09 17:15:29 +00001883void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001884 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
1885 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1886 locations->SetInAt(0, Location::RequiresRegister());
1887 // There is no immediate variant of negated bitwise instructions in AArch64.
1888 locations->SetInAt(1, Location::RequiresRegister());
1889 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1890}
1891
Artem Serov7fc63502016-02-09 17:15:29 +00001892void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00001893 Register dst = OutputRegister(instr);
1894 Register lhs = InputRegisterAt(instr, 0);
1895 Register rhs = InputRegisterAt(instr, 1);
1896
1897 switch (instr->GetOpKind()) {
1898 case HInstruction::kAnd:
1899 __ Bic(dst, lhs, rhs);
1900 break;
1901 case HInstruction::kOr:
1902 __ Orn(dst, lhs, rhs);
1903 break;
1904 case HInstruction::kXor:
1905 __ Eon(dst, lhs, rhs);
1906 break;
1907 default:
1908 LOG(FATAL) << "Unreachable";
1909 }
1910}
1911
Alexandre Rames8626b742015-11-25 16:28:08 +00001912void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
1913 HArm64DataProcWithShifterOp* instruction) {
1914 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
1915 instruction->GetType() == Primitive::kPrimLong);
1916 LocationSummary* locations =
1917 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1918 if (instruction->GetInstrKind() == HInstruction::kNeg) {
1919 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
1920 } else {
1921 locations->SetInAt(0, Location::RequiresRegister());
1922 }
1923 locations->SetInAt(1, Location::RequiresRegister());
1924 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1925}
1926
1927void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
1928 HArm64DataProcWithShifterOp* instruction) {
1929 Primitive::Type type = instruction->GetType();
1930 HInstruction::InstructionKind kind = instruction->GetInstrKind();
1931 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
1932 Register out = OutputRegister(instruction);
1933 Register left;
1934 if (kind != HInstruction::kNeg) {
1935 left = InputRegisterAt(instruction, 0);
1936 }
1937 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
1938 // shifter operand operation, the IR generating `right_reg` (input to the type
1939 // conversion) can have a different type from the current instruction's type,
1940 // so we manually indicate the type.
1941 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00001942 int64_t shift_amount = instruction->GetShiftAmount() &
1943 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00001944
1945 Operand right_operand(0);
1946
1947 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
1948 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
1949 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
1950 } else {
1951 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
1952 }
1953
1954 // Logical binary operations do not support extension operations in the
1955 // operand. Note that VIXL would still manage if it was passed by generating
1956 // the extension as a separate instruction.
1957 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
1958 DCHECK(!right_operand.IsExtendedRegister() ||
1959 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
1960 kind != HInstruction::kNeg));
1961 switch (kind) {
1962 case HInstruction::kAdd:
1963 __ Add(out, left, right_operand);
1964 break;
1965 case HInstruction::kAnd:
1966 __ And(out, left, right_operand);
1967 break;
1968 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00001969 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00001970 __ Neg(out, right_operand);
1971 break;
1972 case HInstruction::kOr:
1973 __ Orr(out, left, right_operand);
1974 break;
1975 case HInstruction::kSub:
1976 __ Sub(out, left, right_operand);
1977 break;
1978 case HInstruction::kXor:
1979 __ Eor(out, left, right_operand);
1980 break;
1981 default:
1982 LOG(FATAL) << "Unexpected operation kind: " << kind;
1983 UNREACHABLE();
1984 }
1985}
1986
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001987void LocationsBuilderARM64::VisitArm64IntermediateAddress(HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001988 // The read barrier instrumentation does not support the
1989 // HArm64IntermediateAddress instruction yet.
1990 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01001991 LocationSummary* locations =
1992 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1993 locations->SetInAt(0, Location::RequiresRegister());
1994 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
1995 locations->SetOut(Location::RequiresRegister());
1996}
1997
1998void InstructionCodeGeneratorARM64::VisitArm64IntermediateAddress(
1999 HArm64IntermediateAddress* instruction) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00002000 // The read barrier instrumentation does not support the
2001 // HArm64IntermediateAddress instruction yet.
2002 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002003 __ Add(OutputRegister(instruction),
2004 InputRegisterAt(instruction, 0),
2005 Operand(InputOperandAt(instruction, 1)));
2006}
2007
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002008void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002009 LocationSummary* locations =
2010 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002011 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2012 if (instr->GetOpKind() == HInstruction::kSub &&
2013 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002014 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002015 // Don't allocate register for Mneg instruction.
2016 } else {
2017 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2018 Location::RequiresRegister());
2019 }
2020 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2021 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002022 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2023}
2024
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002025void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002026 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002027 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2028 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002029
2030 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2031 // This fixup should be carried out for all multiply-accumulate instructions:
2032 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2033 if (instr->GetType() == Primitive::kPrimLong &&
2034 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2035 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
2036 vixl::Instruction* prev = masm->GetCursorAddress<vixl::Instruction*>() - vixl::kInstructionSize;
2037 if (prev->IsLoadOrStore()) {
2038 // Make sure we emit only exactly one nop.
2039 vixl::CodeBufferCheckScope scope(masm,
2040 vixl::kInstructionSize,
2041 vixl::CodeBufferCheckScope::kCheck,
2042 vixl::CodeBufferCheckScope::kExactSize);
2043 __ nop();
2044 }
2045 }
2046
2047 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002048 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002049 __ Madd(res, mul_left, mul_right, accumulator);
2050 } else {
2051 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002052 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002053 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002054 __ Mneg(res, mul_left, mul_right);
2055 } else {
2056 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2057 __ Msub(res, mul_left, mul_right, accumulator);
2058 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002059 }
2060}
2061
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002062void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002063 bool object_array_get_with_read_barrier =
2064 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002065 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002066 new (GetGraph()->GetArena()) LocationSummary(instruction,
2067 object_array_get_with_read_barrier ?
2068 LocationSummary::kCallOnSlowPath :
2069 LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002070 locations->SetInAt(0, Location::RequiresRegister());
2071 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002072 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2073 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2074 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002075 // The output overlaps in the case of an object array get with
2076 // read barriers enabled: we do not want the move to overwrite the
2077 // array's location, as we need it to emit the read barrier.
2078 locations->SetOut(
2079 Location::RequiresRegister(),
2080 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002081 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002082}
2083
2084void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002085 Primitive::Type type = instruction->GetType();
2086 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002087 LocationSummary* locations = instruction->GetLocations();
2088 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002089 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002090 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002091
Alexandre Ramesd921d642015-04-16 15:07:16 +01002092 MacroAssembler* masm = GetVIXLAssembler();
2093 UseScratchRegisterScope temps(masm);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002094 // Block pools between `Load` and `MaybeRecordImplicitNullCheck`.
Alexandre Ramesd921d642015-04-16 15:07:16 +01002095 BlockPoolsScope block_pools(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002096
Roland Levillain44015862016-01-22 11:47:17 +00002097 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2098 // Object ArrayGet with Baker's read barrier case.
2099 Register temp = temps.AcquireW();
2100 // The read barrier instrumentation does not support the
2101 // HArm64IntermediateAddress instruction yet.
2102 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
2103 // Note that a potential implicit null check is handled in the
2104 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2105 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2106 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002107 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002108 // General case.
2109 MemOperand source = HeapOperand(obj);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002110 if (index.IsConstant()) {
Roland Levillain44015862016-01-22 11:47:17 +00002111 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2112 source = HeapOperand(obj, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002113 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002114 Register temp = temps.AcquireSameSizeAs(obj);
2115 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
2116 // The read barrier instrumentation does not support the
2117 // HArm64IntermediateAddress instruction yet.
2118 DCHECK(!kEmitCompilerReadBarrier);
2119 // We do not need to compute the intermediate address from the array: the
2120 // input instruction has done it already. See the comment in
2121 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2122 if (kIsDebugBuild) {
2123 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2124 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2125 }
2126 temp = obj;
2127 } else {
2128 __ Add(temp, obj, offset);
2129 }
2130 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2131 }
2132
2133 codegen_->Load(type, OutputCPURegister(instruction), source);
2134 codegen_->MaybeRecordImplicitNullCheck(instruction);
2135
2136 if (type == Primitive::kPrimNot) {
2137 static_assert(
2138 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2139 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2140 Location obj_loc = locations->InAt(0);
2141 if (index.IsConstant()) {
2142 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2143 } else {
2144 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2145 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002146 }
Roland Levillain4d027112015-07-01 15:41:14 +01002147 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002148}
2149
Alexandre Rames5319def2014-10-23 10:03:10 +01002150void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2151 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2152 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002153 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002154}
2155
2156void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002157 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexandre Ramesd921d642015-04-16 15:07:16 +01002158 BlockPoolsScope block_pools(GetVIXLAssembler());
Vladimir Markodce016e2016-04-28 13:10:02 +01002159 __ Ldr(OutputRegister(instruction), HeapOperand(InputRegisterAt(instruction, 0), offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00002160 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002161}
2162
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002163void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002164 Primitive::Type value_type = instruction->GetComponentType();
2165
2166 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2167 bool object_array_set_with_read_barrier =
2168 kEmitCompilerReadBarrier && (value_type == Primitive::kPrimNot);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002169 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2170 instruction,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002171 (may_need_runtime_call_for_type_check || object_array_set_with_read_barrier) ?
2172 LocationSummary::kCallOnSlowPath :
2173 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002174 locations->SetInAt(0, Location::RequiresRegister());
2175 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002176 if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002177 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002178 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002179 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002180 }
2181}
2182
2183void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2184 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002185 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002186 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002187 bool needs_write_barrier =
2188 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002189
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002190 Register array = InputRegisterAt(instruction, 0);
2191 CPURegister value = InputCPURegisterAt(instruction, 2);
2192 CPURegister source = value;
2193 Location index = locations->InAt(1);
2194 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2195 MemOperand destination = HeapOperand(array);
2196 MacroAssembler* masm = GetVIXLAssembler();
2197 BlockPoolsScope block_pools(masm);
2198
2199 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002200 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002201 if (index.IsConstant()) {
2202 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2203 destination = HeapOperand(array, offset);
2204 } else {
2205 UseScratchRegisterScope temps(masm);
2206 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002207 if (instruction->GetArray()->IsArm64IntermediateAddress()) {
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00002208 // The read barrier instrumentation does not support the
2209 // HArm64IntermediateAddress instruction yet.
2210 DCHECK(!kEmitCompilerReadBarrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002211 // We do not need to compute the intermediate address from the array: the
2212 // input instruction has done it already. See the comment in
2213 // `InstructionSimplifierArm64::TryExtractArrayAccessAddress()`.
2214 if (kIsDebugBuild) {
2215 HArm64IntermediateAddress* tmp = instruction->GetArray()->AsArm64IntermediateAddress();
2216 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2217 }
2218 temp = array;
2219 } else {
2220 __ Add(temp, array, offset);
2221 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002222 destination = HeapOperand(temp,
2223 XRegisterFrom(index),
2224 LSL,
2225 Primitive::ComponentSizeShift(value_type));
2226 }
2227 codegen_->Store(value_type, value, destination);
2228 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002229 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002230 DCHECK(needs_write_barrier);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002231 DCHECK(!instruction->GetArray()->IsArm64IntermediateAddress());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002232 vixl::Label done;
2233 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002234 {
2235 // We use a block to end the scratch scope before the write barrier, thus
2236 // freeing the temporary registers so they can be used in `MarkGCCard`.
2237 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002238 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002239 if (index.IsConstant()) {
2240 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002241 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002242 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002243 destination = HeapOperand(temp,
2244 XRegisterFrom(index),
2245 LSL,
2246 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002247 }
2248
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002249 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2250 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2251 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2252
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002253 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002254 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2255 codegen_->AddSlowPath(slow_path);
2256 if (instruction->GetValueCanBeNull()) {
2257 vixl::Label non_zero;
2258 __ Cbnz(Register(value), &non_zero);
2259 if (!index.IsConstant()) {
2260 __ Add(temp, array, offset);
2261 }
2262 __ Str(wzr, destination);
2263 codegen_->MaybeRecordImplicitNullCheck(instruction);
2264 __ B(&done);
2265 __ Bind(&non_zero);
2266 }
2267
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002268 if (kEmitCompilerReadBarrier) {
2269 // When read barriers are enabled, the type checking
2270 // instrumentation requires two read barriers:
2271 //
2272 // __ Mov(temp2, temp);
2273 // // /* HeapReference<Class> */ temp = temp->component_type_
2274 // __ Ldr(temp, HeapOperand(temp, component_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002275 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002276 // instruction, temp_loc, temp_loc, temp2_loc, component_offset);
2277 //
2278 // // /* HeapReference<Class> */ temp2 = value->klass_
2279 // __ Ldr(temp2, HeapOperand(Register(value), class_offset));
Roland Levillain44015862016-01-22 11:47:17 +00002280 // codegen_->GenerateReadBarrierSlow(
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002281 // instruction, temp2_loc, temp2_loc, value_loc, class_offset, temp_loc);
2282 //
2283 // __ Cmp(temp, temp2);
2284 //
2285 // However, the second read barrier may trash `temp`, as it
2286 // is a temporary register, and as such would not be saved
2287 // along with live registers before calling the runtime (nor
2288 // restored afterwards). So in this case, we bail out and
2289 // delegate the work to the array set slow path.
2290 //
2291 // TODO: Extend the register allocator to support a new
2292 // "(locally) live temp" location so as to avoid always
2293 // going into the slow path when read barriers are enabled.
2294 __ B(slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002295 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002296 Register temp2 = temps.AcquireSameSizeAs(array);
2297 // /* HeapReference<Class> */ temp = array->klass_
2298 __ Ldr(temp, HeapOperand(array, class_offset));
2299 codegen_->MaybeRecordImplicitNullCheck(instruction);
2300 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2301
2302 // /* HeapReference<Class> */ temp = temp->component_type_
2303 __ Ldr(temp, HeapOperand(temp, component_offset));
2304 // /* HeapReference<Class> */ temp2 = value->klass_
2305 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2306 // If heap poisoning is enabled, no need to unpoison `temp`
2307 // nor `temp2`, as we are comparing two poisoned references.
2308 __ Cmp(temp, temp2);
2309
2310 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2311 vixl::Label do_put;
2312 __ B(eq, &do_put);
2313 // If heap poisoning is enabled, the `temp` reference has
2314 // not been unpoisoned yet; unpoison it now.
2315 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2316
2317 // /* HeapReference<Class> */ temp = temp->super_class_
2318 __ Ldr(temp, HeapOperand(temp, super_offset));
2319 // If heap poisoning is enabled, no need to unpoison
2320 // `temp`, as we are comparing against null below.
2321 __ Cbnz(temp, slow_path->GetEntryLabel());
2322 __ Bind(&do_put);
2323 } else {
2324 __ B(ne, slow_path->GetEntryLabel());
2325 }
2326 temps.Release(temp2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002327 }
2328 }
2329
2330 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002331 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002332 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002333 __ Mov(temp2, value.W());
2334 GetAssembler()->PoisonHeapReference(temp2);
2335 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002336 }
2337
2338 if (!index.IsConstant()) {
2339 __ Add(temp, array, offset);
2340 }
Nicolas Geoffray61b1dbe2015-10-01 10:27:52 +01002341 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002342
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002343 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002344 codegen_->MaybeRecordImplicitNullCheck(instruction);
2345 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002346 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002347
2348 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2349
2350 if (done.IsLinked()) {
2351 __ Bind(&done);
2352 }
2353
2354 if (slow_path != nullptr) {
2355 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002356 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002357 }
2358}
2359
Alexandre Rames67555f72014-11-18 10:55:16 +00002360void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002361 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2362 ? LocationSummary::kCallOnSlowPath
2363 : LocationSummary::kNoCall;
2364 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002365 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002366 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002367 if (instruction->HasUses()) {
2368 locations->SetOut(Location::SameAsFirstInput());
2369 }
2370}
2371
2372void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002373 BoundsCheckSlowPathARM64* slow_path =
2374 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002375 codegen_->AddSlowPath(slow_path);
2376
2377 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2378 __ B(slow_path->GetEntryLabel(), hs);
2379}
2380
Alexandre Rames67555f72014-11-18 10:55:16 +00002381void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2382 LocationSummary* locations =
2383 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2384 locations->SetInAt(0, Location::RequiresRegister());
2385 if (check->HasUses()) {
2386 locations->SetOut(Location::SameAsFirstInput());
2387 }
2388}
2389
2390void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2391 // We assume the class is not null.
2392 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2393 check->GetLoadClass(), check, check->GetDexPc(), true);
2394 codegen_->AddSlowPath(slow_path);
2395 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2396}
2397
Roland Levillain1a653882016-03-18 18:05:57 +00002398static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2399 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2400 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2401}
2402
2403void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2404 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2405 Location rhs_loc = instruction->GetLocations()->InAt(1);
2406 if (rhs_loc.IsConstant()) {
2407 // 0.0 is the only immediate that can be encoded directly in
2408 // an FCMP instruction.
2409 //
2410 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2411 // specify that in a floating-point comparison, positive zero
2412 // and negative zero are considered equal, so we can use the
2413 // literal 0.0 for both cases here.
2414 //
2415 // Note however that some methods (Float.equal, Float.compare,
2416 // Float.compareTo, Double.equal, Double.compare,
2417 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2418 // StrictMath.min) consider 0.0 to be (strictly) greater than
2419 // -0.0. So if we ever translate calls to these methods into a
2420 // HCompare instruction, we must handle the -0.0 case with
2421 // care here.
2422 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2423 __ Fcmp(lhs_reg, 0.0);
2424 } else {
2425 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2426 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002427}
2428
Serban Constantinescu02164b32014-11-13 14:05:07 +00002429void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002430 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002431 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2432 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002433 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002434 case Primitive::kPrimBoolean:
2435 case Primitive::kPrimByte:
2436 case Primitive::kPrimShort:
2437 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002438 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002439 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002440 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002441 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002442 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2443 break;
2444 }
2445 case Primitive::kPrimFloat:
2446 case Primitive::kPrimDouble: {
2447 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002448 locations->SetInAt(1,
2449 IsFloatingPointZeroConstant(compare->InputAt(1))
2450 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2451 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002452 locations->SetOut(Location::RequiresRegister());
2453 break;
2454 }
2455 default:
2456 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2457 }
2458}
2459
2460void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2461 Primitive::Type in_type = compare->InputAt(0)->GetType();
2462
2463 // 0 if: left == right
2464 // 1 if: left > right
2465 // -1 if: left < right
2466 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002467 case Primitive::kPrimBoolean:
2468 case Primitive::kPrimByte:
2469 case Primitive::kPrimShort:
2470 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002471 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002472 case Primitive::kPrimLong: {
2473 Register result = OutputRegister(compare);
2474 Register left = InputRegisterAt(compare, 0);
2475 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002476 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002477 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2478 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002479 break;
2480 }
2481 case Primitive::kPrimFloat:
2482 case Primitive::kPrimDouble: {
2483 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002484 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002485 __ Cset(result, ne);
2486 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002487 break;
2488 }
2489 default:
2490 LOG(FATAL) << "Unimplemented compare type " << in_type;
2491 }
2492}
2493
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002494void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002495 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002496
2497 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2498 locations->SetInAt(0, Location::RequiresFpuRegister());
2499 locations->SetInAt(1,
2500 IsFloatingPointZeroConstant(instruction->InputAt(1))
2501 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2502 : Location::RequiresFpuRegister());
2503 } else {
2504 // Integer cases.
2505 locations->SetInAt(0, Location::RequiresRegister());
2506 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2507 }
2508
David Brazdilb3e773e2016-01-26 11:28:37 +00002509 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002510 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002511 }
2512}
2513
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002514void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002515 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002516 return;
2517 }
2518
2519 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002520 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002521 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002522
Roland Levillain7f63c522015-07-13 15:54:55 +00002523 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002524 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002525 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002526 } else {
2527 // Integer cases.
2528 Register lhs = InputRegisterAt(instruction, 0);
2529 Operand rhs = InputOperandAt(instruction, 1);
2530 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002531 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002532 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002533}
2534
2535#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2536 M(Equal) \
2537 M(NotEqual) \
2538 M(LessThan) \
2539 M(LessThanOrEqual) \
2540 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002541 M(GreaterThanOrEqual) \
2542 M(Below) \
2543 M(BelowOrEqual) \
2544 M(Above) \
2545 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002546#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002547void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2548void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002549FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002550#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002551#undef FOR_EACH_CONDITION_INSTRUCTION
2552
Zheng Xuc6667102015-05-15 16:08:45 +08002553void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2554 DCHECK(instruction->IsDiv() || instruction->IsRem());
2555
2556 LocationSummary* locations = instruction->GetLocations();
2557 Location second = locations->InAt(1);
2558 DCHECK(second.IsConstant());
2559
2560 Register out = OutputRegister(instruction);
2561 Register dividend = InputRegisterAt(instruction, 0);
2562 int64_t imm = Int64FromConstant(second.GetConstant());
2563 DCHECK(imm == 1 || imm == -1);
2564
2565 if (instruction->IsRem()) {
2566 __ Mov(out, 0);
2567 } else {
2568 if (imm == 1) {
2569 __ Mov(out, dividend);
2570 } else {
2571 __ Neg(out, dividend);
2572 }
2573 }
2574}
2575
2576void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2577 DCHECK(instruction->IsDiv() || instruction->IsRem());
2578
2579 LocationSummary* locations = instruction->GetLocations();
2580 Location second = locations->InAt(1);
2581 DCHECK(second.IsConstant());
2582
2583 Register out = OutputRegister(instruction);
2584 Register dividend = InputRegisterAt(instruction, 0);
2585 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002586 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002587 int ctz_imm = CTZ(abs_imm);
2588
2589 UseScratchRegisterScope temps(GetVIXLAssembler());
2590 Register temp = temps.AcquireSameSizeAs(out);
2591
2592 if (instruction->IsDiv()) {
2593 __ Add(temp, dividend, abs_imm - 1);
2594 __ Cmp(dividend, 0);
2595 __ Csel(out, temp, dividend, lt);
2596 if (imm > 0) {
2597 __ Asr(out, out, ctz_imm);
2598 } else {
2599 __ Neg(out, Operand(out, ASR, ctz_imm));
2600 }
2601 } else {
2602 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2603 __ Asr(temp, dividend, bits - 1);
2604 __ Lsr(temp, temp, bits - ctz_imm);
2605 __ Add(out, dividend, temp);
2606 __ And(out, out, abs_imm - 1);
2607 __ Sub(out, out, temp);
2608 }
2609}
2610
2611void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2612 DCHECK(instruction->IsDiv() || instruction->IsRem());
2613
2614 LocationSummary* locations = instruction->GetLocations();
2615 Location second = locations->InAt(1);
2616 DCHECK(second.IsConstant());
2617
2618 Register out = OutputRegister(instruction);
2619 Register dividend = InputRegisterAt(instruction, 0);
2620 int64_t imm = Int64FromConstant(second.GetConstant());
2621
2622 Primitive::Type type = instruction->GetResultType();
2623 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2624
2625 int64_t magic;
2626 int shift;
2627 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
2628
2629 UseScratchRegisterScope temps(GetVIXLAssembler());
2630 Register temp = temps.AcquireSameSizeAs(out);
2631
2632 // temp = get_high(dividend * magic)
2633 __ Mov(temp, magic);
2634 if (type == Primitive::kPrimLong) {
2635 __ Smulh(temp, dividend, temp);
2636 } else {
2637 __ Smull(temp.X(), dividend, temp);
2638 __ Lsr(temp.X(), temp.X(), 32);
2639 }
2640
2641 if (imm > 0 && magic < 0) {
2642 __ Add(temp, temp, dividend);
2643 } else if (imm < 0 && magic > 0) {
2644 __ Sub(temp, temp, dividend);
2645 }
2646
2647 if (shift != 0) {
2648 __ Asr(temp, temp, shift);
2649 }
2650
2651 if (instruction->IsDiv()) {
2652 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2653 } else {
2654 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
2655 // TODO: Strength reduction for msub.
2656 Register temp_imm = temps.AcquireSameSizeAs(out);
2657 __ Mov(temp_imm, imm);
2658 __ Msub(out, temp, temp_imm, dividend);
2659 }
2660}
2661
2662void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2663 DCHECK(instruction->IsDiv() || instruction->IsRem());
2664 Primitive::Type type = instruction->GetResultType();
2665 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
2666
2667 LocationSummary* locations = instruction->GetLocations();
2668 Register out = OutputRegister(instruction);
2669 Location second = locations->InAt(1);
2670
2671 if (second.IsConstant()) {
2672 int64_t imm = Int64FromConstant(second.GetConstant());
2673
2674 if (imm == 0) {
2675 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2676 } else if (imm == 1 || imm == -1) {
2677 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002678 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08002679 DivRemByPowerOfTwo(instruction);
2680 } else {
2681 DCHECK(imm <= -2 || imm >= 2);
2682 GenerateDivRemWithAnyConstant(instruction);
2683 }
2684 } else {
2685 Register dividend = InputRegisterAt(instruction, 0);
2686 Register divisor = InputRegisterAt(instruction, 1);
2687 if (instruction->IsDiv()) {
2688 __ Sdiv(out, dividend, divisor);
2689 } else {
2690 UseScratchRegisterScope temps(GetVIXLAssembler());
2691 Register temp = temps.AcquireSameSizeAs(out);
2692 __ Sdiv(temp, dividend, divisor);
2693 __ Msub(out, temp, divisor, dividend);
2694 }
2695 }
2696}
2697
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002698void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2699 LocationSummary* locations =
2700 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2701 switch (div->GetResultType()) {
2702 case Primitive::kPrimInt:
2703 case Primitive::kPrimLong:
2704 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002705 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002706 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2707 break;
2708
2709 case Primitive::kPrimFloat:
2710 case Primitive::kPrimDouble:
2711 locations->SetInAt(0, Location::RequiresFpuRegister());
2712 locations->SetInAt(1, Location::RequiresFpuRegister());
2713 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2714 break;
2715
2716 default:
2717 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2718 }
2719}
2720
2721void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
2722 Primitive::Type type = div->GetResultType();
2723 switch (type) {
2724 case Primitive::kPrimInt:
2725 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08002726 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002727 break;
2728
2729 case Primitive::kPrimFloat:
2730 case Primitive::kPrimDouble:
2731 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
2732 break;
2733
2734 default:
2735 LOG(FATAL) << "Unexpected div type " << type;
2736 }
2737}
2738
Alexandre Rames67555f72014-11-18 10:55:16 +00002739void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00002740 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
2741 ? LocationSummary::kCallOnSlowPath
2742 : LocationSummary::kNoCall;
2743 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames67555f72014-11-18 10:55:16 +00002744 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2745 if (instruction->HasUses()) {
2746 locations->SetOut(Location::SameAsFirstInput());
2747 }
2748}
2749
2750void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2751 SlowPathCodeARM64* slow_path =
2752 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
2753 codegen_->AddSlowPath(slow_path);
2754 Location value = instruction->GetLocations()->InAt(0);
2755
Alexandre Rames3e69f162014-12-10 10:36:50 +00002756 Primitive::Type type = instruction->GetType();
2757
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002758 if (!Primitive::IsIntegralType(type)) {
2759 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00002760 return;
2761 }
2762
Alexandre Rames67555f72014-11-18 10:55:16 +00002763 if (value.IsConstant()) {
2764 int64_t divisor = Int64ConstantFrom(value);
2765 if (divisor == 0) {
2766 __ B(slow_path->GetEntryLabel());
2767 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002768 // A division by a non-null constant is valid. We don't need to perform
2769 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00002770 }
2771 } else {
2772 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
2773 }
2774}
2775
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002776void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
2777 LocationSummary* locations =
2778 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2779 locations->SetOut(Location::ConstantLocation(constant));
2780}
2781
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002782void InstructionCodeGeneratorARM64::VisitDoubleConstant(
2783 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002784 // Will be generated at use site.
2785}
2786
Alexandre Rames5319def2014-10-23 10:03:10 +01002787void LocationsBuilderARM64::VisitExit(HExit* exit) {
2788 exit->SetLocations(nullptr);
2789}
2790
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002791void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002792}
2793
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002794void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
2795 LocationSummary* locations =
2796 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2797 locations->SetOut(Location::ConstantLocation(constant));
2798}
2799
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002800void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002801 // Will be generated at use site.
2802}
2803
David Brazdilfc6a86a2015-06-26 10:33:45 +00002804void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002805 DCHECK(!successor->IsExitBlock());
2806 HBasicBlock* block = got->GetBlock();
2807 HInstruction* previous = got->GetPrevious();
2808 HLoopInformation* info = block->GetLoopInformation();
2809
David Brazdil46e2a392015-03-16 17:31:52 +00002810 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002811 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2812 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2813 return;
2814 }
2815 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2816 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2817 }
2818 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002819 __ B(codegen_->GetLabelOf(successor));
2820 }
2821}
2822
David Brazdilfc6a86a2015-06-26 10:33:45 +00002823void LocationsBuilderARM64::VisitGoto(HGoto* got) {
2824 got->SetLocations(nullptr);
2825}
2826
2827void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
2828 HandleGoto(got, got->GetSuccessor());
2829}
2830
2831void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2832 try_boundary->SetLocations(nullptr);
2833}
2834
2835void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
2836 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2837 if (!successor->IsExitBlock()) {
2838 HandleGoto(try_boundary, successor);
2839 }
2840}
2841
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002842void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002843 size_t condition_input_index,
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002844 vixl::Label* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00002845 vixl::Label* false_target) {
2846 // FP branching requires both targets to be explicit. If either of the targets
2847 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
2848 vixl::Label fallthrough_target;
2849 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002850
David Brazdil0debae72015-11-12 18:37:00 +00002851 if (true_target == nullptr && false_target == nullptr) {
2852 // Nothing to do. The code always falls through.
2853 return;
2854 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002855 // Constant condition, statically compared against "true" (integer value 1).
2856 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002857 if (true_target != nullptr) {
2858 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002859 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002860 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002861 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002862 if (false_target != nullptr) {
2863 __ B(false_target);
2864 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002865 }
David Brazdil0debae72015-11-12 18:37:00 +00002866 return;
2867 }
2868
2869 // The following code generates these patterns:
2870 // (1) true_target == nullptr && false_target != nullptr
2871 // - opposite condition true => branch to false_target
2872 // (2) true_target != nullptr && false_target == nullptr
2873 // - condition true => branch to true_target
2874 // (3) true_target != nullptr && false_target != nullptr
2875 // - condition true => branch to true_target
2876 // - branch to false_target
2877 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002878 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002879 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01002880 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002881 if (true_target == nullptr) {
2882 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
2883 } else {
2884 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
2885 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002886 } else {
2887 // The condition instruction has not been materialized, use its inputs as
2888 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002889 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00002890
David Brazdil0debae72015-11-12 18:37:00 +00002891 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00002892 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00002893 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00002894 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002895 IfCondition opposite_condition = condition->GetOppositeCondition();
2896 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002897 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00002898 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00002899 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002900 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00002901 // Integer cases.
2902 Register lhs = InputRegisterAt(condition, 0);
2903 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00002904
2905 Condition arm64_cond;
2906 vixl::Label* non_fallthrough_target;
2907 if (true_target == nullptr) {
2908 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
2909 non_fallthrough_target = false_target;
2910 } else {
2911 arm64_cond = ARM64Condition(condition->GetCondition());
2912 non_fallthrough_target = true_target;
2913 }
2914
Aart Bik086d27e2016-01-20 17:02:00 -08002915 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
2916 rhs.IsImmediate() && (rhs.immediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002917 switch (arm64_cond) {
2918 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00002919 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002920 break;
2921 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00002922 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002923 break;
2924 case lt:
2925 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002926 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002927 break;
2928 case ge:
2929 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00002930 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002931 break;
2932 default:
2933 // Without the `static_cast` the compiler throws an error for
2934 // `-Werror=sign-promo`.
2935 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
2936 }
2937 } else {
2938 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00002939 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00002940 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002941 }
2942 }
David Brazdil0debae72015-11-12 18:37:00 +00002943
2944 // If neither branch falls through (case 3), the conditional branch to `true_target`
2945 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2946 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002947 __ B(false_target);
2948 }
David Brazdil0debae72015-11-12 18:37:00 +00002949
2950 if (fallthrough_target.IsLinked()) {
2951 __ Bind(&fallthrough_target);
2952 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002953}
2954
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002955void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
2956 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002957 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002958 locations->SetInAt(0, Location::RequiresRegister());
2959 }
2960}
2961
2962void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002963 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2964 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
2965 vixl::Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
2966 nullptr : codegen_->GetLabelOf(true_successor);
2967 vixl::Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
2968 nullptr : codegen_->GetLabelOf(false_successor);
2969 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002970}
2971
2972void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
2973 LocationSummary* locations = new (GetGraph()->GetArena())
2974 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
David Brazdil0debae72015-11-12 18:37:00 +00002975 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002976 locations->SetInAt(0, Location::RequiresRegister());
2977 }
2978}
2979
2980void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002981 SlowPathCodeARM64* slow_path =
2982 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002983 GenerateTestAndBranch(deoptimize,
2984 /* condition_input_index */ 0,
2985 slow_path->GetEntryLabel(),
2986 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07002987}
2988
David Brazdilc0b601b2016-02-08 14:20:45 +00002989static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
2990 return condition->IsCondition() &&
2991 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
2992}
2993
Alexandre Rames880f1192016-06-13 16:04:50 +01002994static inline Condition GetConditionForSelect(HCondition* condition) {
2995 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00002996 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
2997 : ARM64Condition(cond);
2998}
2999
David Brazdil74eb1b22015-12-14 11:44:01 +00003000void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3001 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01003002 if (Primitive::IsFloatingPointType(select->GetType())) {
3003 locations->SetInAt(0, Location::RequiresFpuRegister());
3004 locations->SetInAt(1, Location::RequiresFpuRegister());
3005 locations->SetOut(Location::RequiresFpuRegister());
3006 } else {
3007 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3008 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3009 bool is_true_value_constant = cst_true_value != nullptr;
3010 bool is_false_value_constant = cst_false_value != nullptr;
3011 // Ask VIXL whether we should synthesize constants in registers.
3012 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3013 Operand true_op = is_true_value_constant ?
3014 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3015 Operand false_op = is_false_value_constant ?
3016 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3017 bool true_value_in_register = false;
3018 bool false_value_in_register = false;
3019 MacroAssembler::GetCselSynthesisInformation(
3020 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3021 true_value_in_register |= !is_true_value_constant;
3022 false_value_in_register |= !is_false_value_constant;
3023
3024 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3025 : Location::ConstantLocation(cst_true_value));
3026 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3027 : Location::ConstantLocation(cst_false_value));
3028 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003029 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003030
David Brazdil74eb1b22015-12-14 11:44:01 +00003031 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3032 locations->SetInAt(2, Location::RequiresRegister());
3033 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003034}
3035
3036void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003037 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003038 Condition csel_cond;
3039
3040 if (IsBooleanValueOrMaterializedCondition(cond)) {
3041 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003042 // Use the condition flags set by the previous instruction.
3043 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003044 } else {
3045 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003046 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003047 }
3048 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003049 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003050 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003051 } else {
3052 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003053 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003054 }
3055
Alexandre Rames880f1192016-06-13 16:04:50 +01003056 if (Primitive::IsFloatingPointType(select->GetType())) {
3057 __ Fcsel(OutputFPRegister(select),
3058 InputFPRegisterAt(select, 1),
3059 InputFPRegisterAt(select, 0),
3060 csel_cond);
3061 } else {
3062 __ Csel(OutputRegister(select),
3063 InputOperandAt(select, 1),
3064 InputOperandAt(select, 0),
3065 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003066 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003067}
3068
David Srbecky0cf44932015-12-09 14:09:59 +00003069void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3070 new (GetGraph()->GetArena()) LocationSummary(info);
3071}
3072
David Srbeckyd28f4a02016-03-14 17:14:24 +00003073void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3074 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003075}
3076
3077void CodeGeneratorARM64::GenerateNop() {
3078 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003079}
3080
Alexandre Rames5319def2014-10-23 10:03:10 +01003081void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003082 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003083}
3084
3085void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003086 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003087}
3088
3089void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003090 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003091}
3092
3093void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003094 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003095}
3096
Roland Levillain44015862016-01-22 11:47:17 +00003097static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
3098 return kEmitCompilerReadBarrier &&
3099 (kUseBakerReadBarrier ||
3100 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3101 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3102 type_check_kind == TypeCheckKind::kArrayObjectCheck);
3103}
3104
Alexandre Rames67555f72014-11-18 10:55:16 +00003105void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003106 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003107 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3108 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003109 case TypeCheckKind::kExactCheck:
3110 case TypeCheckKind::kAbstractClassCheck:
3111 case TypeCheckKind::kClassHierarchyCheck:
3112 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003113 call_kind =
3114 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003115 break;
3116 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003117 case TypeCheckKind::kUnresolvedCheck:
3118 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003119 call_kind = LocationSummary::kCallOnSlowPath;
3120 break;
3121 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003122
Alexandre Rames67555f72014-11-18 10:55:16 +00003123 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003124 locations->SetInAt(0, Location::RequiresRegister());
3125 locations->SetInAt(1, Location::RequiresRegister());
3126 // The "out" register is used as a temporary, so it overlaps with the inputs.
3127 // Note that TypeCheckSlowPathARM64 uses this register too.
3128 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3129 // When read barriers are enabled, we need a temporary register for
3130 // some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003131 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003132 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003133 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003134}
3135
3136void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003137 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003138 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003139 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003140 Register obj = InputRegisterAt(instruction, 0);
3141 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003142 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003143 Register out = OutputRegister(instruction);
Roland Levillain44015862016-01-22 11:47:17 +00003144 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3145 locations->GetTemp(0) :
3146 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003147 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3148 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3149 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3150 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003151
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003152 vixl::Label done, zero;
3153 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003154
3155 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003156 // Avoid null check if we know `obj` is not null.
3157 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003158 __ Cbz(obj, &zero);
3159 }
3160
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003161 // /* HeapReference<Class> */ out = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003162 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003163
Roland Levillain44015862016-01-22 11:47:17 +00003164 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003165 case TypeCheckKind::kExactCheck: {
3166 __ Cmp(out, cls);
3167 __ Cset(out, eq);
3168 if (zero.IsLinked()) {
3169 __ B(&done);
3170 }
3171 break;
3172 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003173
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003174 case TypeCheckKind::kAbstractClassCheck: {
3175 // If the class is abstract, we eagerly fetch the super class of the
3176 // object to avoid doing a comparison we know will fail.
3177 vixl::Label loop, success;
3178 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003179 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003180 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003181 // If `out` is null, we use it for the result, and jump to `done`.
3182 __ Cbz(out, &done);
3183 __ Cmp(out, cls);
3184 __ B(ne, &loop);
3185 __ Mov(out, 1);
3186 if (zero.IsLinked()) {
3187 __ B(&done);
3188 }
3189 break;
3190 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003191
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003192 case TypeCheckKind::kClassHierarchyCheck: {
3193 // Walk over the class hierarchy to find a match.
3194 vixl::Label loop, success;
3195 __ Bind(&loop);
3196 __ Cmp(out, cls);
3197 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003198 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003199 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003200 __ Cbnz(out, &loop);
3201 // If `out` is null, we use it for the result, and jump to `done`.
3202 __ B(&done);
3203 __ Bind(&success);
3204 __ Mov(out, 1);
3205 if (zero.IsLinked()) {
3206 __ B(&done);
3207 }
3208 break;
3209 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003210
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003211 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003212 // Do an exact check.
3213 vixl::Label exact_check;
3214 __ Cmp(out, cls);
3215 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003216 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003217 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003218 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003219 // If `out` is null, we use it for the result, and jump to `done`.
3220 __ Cbz(out, &done);
3221 __ Ldrh(out, HeapOperand(out, primitive_offset));
3222 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3223 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003224 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003225 __ Mov(out, 1);
3226 __ B(&done);
3227 break;
3228 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003229
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003230 case TypeCheckKind::kArrayCheck: {
3231 __ Cmp(out, cls);
3232 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003233 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3234 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003235 codegen_->AddSlowPath(slow_path);
3236 __ B(ne, slow_path->GetEntryLabel());
3237 __ Mov(out, 1);
3238 if (zero.IsLinked()) {
3239 __ B(&done);
3240 }
3241 break;
3242 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003243
Calin Juravle98893e12015-10-02 21:05:03 +01003244 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003245 case TypeCheckKind::kInterfaceCheck: {
3246 // Note that we indeed only call on slow path, but we always go
3247 // into the slow path for the unresolved and interface check
3248 // cases.
3249 //
3250 // We cannot directly call the InstanceofNonTrivial runtime
3251 // entry point without resorting to a type checking slow path
3252 // here (i.e. by calling InvokeRuntime directly), as it would
3253 // require to assign fixed registers for the inputs of this
3254 // HInstanceOf instruction (following the runtime calling
3255 // convention), which might be cluttered by the potential first
3256 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003257 //
3258 // TODO: Introduce a new runtime entry point taking the object
3259 // to test (instead of its class) as argument, and let it deal
3260 // with the read barrier issues. This will let us refactor this
3261 // case of the `switch` code as it was previously (with a direct
3262 // call to the runtime not using a type checking slow path).
3263 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003264 DCHECK(locations->OnlyCallsOnSlowPath());
3265 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3266 /* is_fatal */ false);
3267 codegen_->AddSlowPath(slow_path);
3268 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003269 if (zero.IsLinked()) {
3270 __ B(&done);
3271 }
3272 break;
3273 }
3274 }
3275
3276 if (zero.IsLinked()) {
3277 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003278 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003279 }
3280
3281 if (done.IsLinked()) {
3282 __ Bind(&done);
3283 }
3284
3285 if (slow_path != nullptr) {
3286 __ Bind(slow_path->GetExitLabel());
3287 }
3288}
3289
3290void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3291 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3292 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3293
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003294 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3295 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003296 case TypeCheckKind::kExactCheck:
3297 case TypeCheckKind::kAbstractClassCheck:
3298 case TypeCheckKind::kClassHierarchyCheck:
3299 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003300 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3301 LocationSummary::kCallOnSlowPath :
3302 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003303 break;
3304 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003305 case TypeCheckKind::kUnresolvedCheck:
3306 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003307 call_kind = LocationSummary::kCallOnSlowPath;
3308 break;
3309 }
3310
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003311 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3312 locations->SetInAt(0, Location::RequiresRegister());
3313 locations->SetInAt(1, Location::RequiresRegister());
3314 // Note that TypeCheckSlowPathARM64 uses this "temp" register too.
3315 locations->AddTemp(Location::RequiresRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003316 // When read barriers are enabled, we need an additional temporary
3317 // register for some cases.
Roland Levillain44015862016-01-22 11:47:17 +00003318 if (TypeCheckNeedsATemporary(type_check_kind)) {
3319 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003320 }
3321}
3322
3323void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003324 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003325 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003326 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003327 Register obj = InputRegisterAt(instruction, 0);
3328 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003329 Location temp_loc = locations->GetTemp(0);
Roland Levillain44015862016-01-22 11:47:17 +00003330 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
3331 locations->GetTemp(1) :
3332 Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003333 Register temp = WRegisterFrom(temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003334 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3335 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3336 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3337 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003338
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003339 bool is_type_check_slow_path_fatal =
3340 (type_check_kind == TypeCheckKind::kExactCheck ||
3341 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3342 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3343 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3344 !instruction->CanThrowIntoCatchBlock();
3345 SlowPathCodeARM64* type_check_slow_path =
3346 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3347 is_type_check_slow_path_fatal);
3348 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003349
3350 vixl::Label done;
3351 // Avoid null check if we know obj is not null.
3352 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003353 __ Cbz(obj, &done);
3354 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003355
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003356 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003357 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Nicolas Geoffray75374372015-09-17 17:12:19 +00003358
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003359 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003360 case TypeCheckKind::kExactCheck:
3361 case TypeCheckKind::kArrayCheck: {
3362 __ Cmp(temp, cls);
3363 // Jump to slow path for throwing the exception or doing a
3364 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003365 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003366 break;
3367 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003368
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003369 case TypeCheckKind::kAbstractClassCheck: {
3370 // If the class is abstract, we eagerly fetch the super class of the
3371 // object to avoid doing a comparison we know will fail.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003372 vixl::Label loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003373 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003374 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003375 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003376
3377 // If the class reference currently in `temp` is not null, jump
3378 // to the `compare_classes` label to compare it with the checked
3379 // class.
3380 __ Cbnz(temp, &compare_classes);
3381 // Otherwise, jump to the slow path to throw the exception.
3382 //
3383 // But before, move back the object's class into `temp` before
3384 // going into the slow path, as it has been overwritten in the
3385 // meantime.
3386 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003387 GenerateReferenceLoadTwoRegisters(
3388 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003389 __ B(type_check_slow_path->GetEntryLabel());
3390
3391 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003392 __ Cmp(temp, cls);
3393 __ B(ne, &loop);
3394 break;
3395 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003396
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003397 case TypeCheckKind::kClassHierarchyCheck: {
3398 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003399 vixl::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003400 __ Bind(&loop);
3401 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003402 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003403
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003404 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain44015862016-01-22 11:47:17 +00003405 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003406
3407 // If the class reference currently in `temp` is not null, jump
3408 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003409 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003410 // Otherwise, jump to the slow path to throw the exception.
3411 //
3412 // But before, move back the object's class into `temp` before
3413 // going into the slow path, as it has been overwritten in the
3414 // meantime.
3415 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003416 GenerateReferenceLoadTwoRegisters(
3417 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003418 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003419 break;
3420 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003421
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003422 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003423 // Do an exact check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003424 vixl::Label check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003425 __ Cmp(temp, cls);
3426 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003427
3428 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003429 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain44015862016-01-22 11:47:17 +00003430 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003431
3432 // If the component type is not null (i.e. the object is indeed
3433 // an array), jump to label `check_non_primitive_component_type`
3434 // to further check that this component type is not a primitive
3435 // type.
3436 __ Cbnz(temp, &check_non_primitive_component_type);
3437 // Otherwise, jump to the slow path to throw the exception.
3438 //
3439 // But before, move back the object's class into `temp` before
3440 // going into the slow path, as it has been overwritten in the
3441 // meantime.
3442 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003443 GenerateReferenceLoadTwoRegisters(
3444 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003445 __ B(type_check_slow_path->GetEntryLabel());
3446
3447 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003448 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3449 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003450 __ Cbz(temp, &done);
3451 // Same comment as above regarding `temp` and the slow path.
3452 // /* HeapReference<Class> */ temp = obj->klass_
Roland Levillain44015862016-01-22 11:47:17 +00003453 GenerateReferenceLoadTwoRegisters(
3454 instruction, temp_loc, obj_loc, class_offset, maybe_temp2_loc);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003455 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003456 break;
3457 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003458
Calin Juravle98893e12015-10-02 21:05:03 +01003459 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003460 case TypeCheckKind::kInterfaceCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003461 // We always go into the type check slow path for the unresolved
3462 // and interface check cases.
3463 //
3464 // We cannot directly call the CheckCast runtime entry point
3465 // without resorting to a type checking slow path here (i.e. by
3466 // calling InvokeRuntime directly), as it would require to
3467 // assign fixed registers for the inputs of this HInstanceOf
3468 // instruction (following the runtime calling convention), which
3469 // might be cluttered by the potential first read barrier
3470 // emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003471 //
3472 // TODO: Introduce a new runtime entry point taking the object
3473 // to test (instead of its class) as argument, and let it deal
3474 // with the read barrier issues. This will let us refactor this
3475 // case of the `switch` code as it was previously (with a direct
3476 // call to the runtime not using a type checking slow path).
3477 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003478 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003479 break;
3480 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003481 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003482
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003483 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003484}
3485
Alexandre Rames5319def2014-10-23 10:03:10 +01003486void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3487 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3488 locations->SetOut(Location::ConstantLocation(constant));
3489}
3490
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003491void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003492 // Will be generated at use site.
3493}
3494
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003495void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3496 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3497 locations->SetOut(Location::ConstantLocation(constant));
3498}
3499
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003500void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003501 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003502}
3503
Calin Juravle175dc732015-08-25 15:42:32 +01003504void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3505 // The trampoline uses the same calling convention as dex calling conventions,
3506 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3507 // the method_idx.
3508 HandleInvoke(invoke);
3509}
3510
3511void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3512 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3513}
3514
Alexandre Rames5319def2014-10-23 10:03:10 +01003515void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003516 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003517 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003518}
3519
Alexandre Rames67555f72014-11-18 10:55:16 +00003520void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3521 HandleInvoke(invoke);
3522}
3523
3524void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3525 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003526 LocationSummary* locations = invoke->GetLocations();
3527 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003528 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003529 Offset class_offset = mirror::Object::ClassOffset();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003530 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003531
3532 // The register ip1 is required to be used for the hidden argument in
3533 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003534 MacroAssembler* masm = GetVIXLAssembler();
3535 UseScratchRegisterScope scratch_scope(masm);
3536 BlockPoolsScope block_pools(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003537 scratch_scope.Exclude(ip1);
3538 __ Mov(ip1, invoke->GetDexMethodIndex());
3539
Alexandre Rames67555f72014-11-18 10:55:16 +00003540 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003541 __ Ldr(temp.W(), StackOperandFrom(receiver));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003542 // /* HeapReference<Class> */ temp = temp->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003543 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003544 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003545 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003546 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003547 }
Calin Juravle77520bc2015-01-12 18:45:46 +00003548 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003549 // Instead of simply (possibly) unpoisoning `temp` here, we should
3550 // emit a read barrier for the previous class reference load.
3551 // However this is not required in practice, as this is an
3552 // intermediate/temporary reference and because the current
3553 // concurrent copying collector keeps the from-space memory
3554 // intact/accessible until the end of the marking phase (the
3555 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01003556 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003557 __ Ldr(temp,
3558 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
3559 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
3560 invoke->GetImtIndex() % ImTable::kSize, kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00003561 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07003562 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00003563 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07003564 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Alexandre Rames67555f72014-11-18 10:55:16 +00003565 // lr();
3566 __ Blr(lr);
3567 DCHECK(!codegen_->IsLeafMethod());
3568 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3569}
3570
3571void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003572 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3573 if (intrinsic.TryDispatch(invoke)) {
3574 return;
3575 }
3576
Alexandre Rames67555f72014-11-18 10:55:16 +00003577 HandleInvoke(invoke);
3578}
3579
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00003580void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003581 // Explicit clinit checks triggered by static invokes must have been pruned by
3582 // art::PrepareForRegisterAllocation.
3583 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003584
Andreas Gampe878d58c2015-01-15 23:24:00 -08003585 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
3586 if (intrinsic.TryDispatch(invoke)) {
3587 return;
3588 }
3589
Alexandre Rames67555f72014-11-18 10:55:16 +00003590 HandleInvoke(invoke);
3591}
3592
Andreas Gampe878d58c2015-01-15 23:24:00 -08003593static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
3594 if (invoke->GetLocations()->Intrinsified()) {
3595 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
3596 intrinsic.Dispatch(invoke);
3597 return true;
3598 }
3599 return false;
3600}
3601
Vladimir Markodc151b22015-10-15 18:02:30 +01003602HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
3603 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
3604 MethodReference target_method ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00003605 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01003606 return desired_dispatch_info;
3607}
3608
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003609void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00003610 // For better instruction scheduling we load the direct code pointer before the method pointer.
3611 bool direct_code_loaded = false;
3612 switch (invoke->GetCodePtrLocation()) {
3613 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3614 // LR = code address from literal pool with link-time patch.
3615 __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3616 direct_code_loaded = true;
3617 break;
3618 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3619 // LR = invoke->GetDirectCodePtr();
3620 __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3621 direct_code_loaded = true;
3622 break;
3623 default:
3624 break;
3625 }
3626
Andreas Gampe878d58c2015-01-15 23:24:00 -08003627 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003628 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3629 switch (invoke->GetMethodLoadKind()) {
3630 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
3631 // temp = thread->string_init_entrypoint
Alexandre Rames6dc01742015-11-12 14:44:19 +00003632 __ Ldr(XRegisterFrom(temp), MemOperand(tr, invoke->GetStringInitOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003633 break;
3634 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003635 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003636 break;
3637 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3638 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003639 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003640 break;
3641 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
3642 // Load method address from literal pool with a link-time patch.
Alexandre Rames6dc01742015-11-12 14:44:19 +00003643 __ Ldr(XRegisterFrom(temp),
Vladimir Marko58155012015-08-19 12:49:41 +00003644 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3645 break;
3646 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3647 // Add ADRP with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003648 const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
3649 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
3650 vixl::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003651 {
3652 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003653 __ Bind(adrp_label);
3654 __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0);
Vladimir Marko58155012015-08-19 12:49:41 +00003655 }
Vladimir Marko58155012015-08-19 12:49:41 +00003656 // Add LDR with its PC-relative DexCache access patch.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003657 vixl::Label* ldr_label =
3658 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Alexandre Rames6dc01742015-11-12 14:44:19 +00003659 {
3660 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003661 __ Bind(ldr_label);
3662 __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0));
Alexandre Rames6dc01742015-11-12 14:44:19 +00003663 }
Vladimir Marko58155012015-08-19 12:49:41 +00003664 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01003665 }
Vladimir Marko58155012015-08-19 12:49:41 +00003666 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003667 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003668 Register reg = XRegisterFrom(temp);
3669 Register method_reg;
3670 if (current_method.IsRegister()) {
3671 method_reg = XRegisterFrom(current_method);
3672 } else {
3673 DCHECK(invoke->GetLocations()->Intrinsified());
3674 DCHECK(!current_method.IsValid());
3675 method_reg = reg;
3676 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
3677 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00003678
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003679 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003680 __ Ldr(reg.X(),
3681 MemOperand(method_reg.X(),
3682 ArtMethod::DexCacheResolvedMethodsOffset(kArm64WordSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00003683 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01003684 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3685 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003686 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
3687 break;
3688 }
3689 }
3690
3691 switch (invoke->GetCodePtrLocation()) {
3692 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
3693 __ Bl(&frame_entry_label_);
3694 break;
3695 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3696 relative_call_patches_.emplace_back(invoke->GetTargetMethod());
3697 vixl::Label* label = &relative_call_patches_.back().label;
Alexandre Rames6dc01742015-11-12 14:44:19 +00003698 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
3699 __ Bind(label);
3700 __ bl(0); // Branch and link to itself. This will be overriden at link time.
Vladimir Marko58155012015-08-19 12:49:41 +00003701 break;
3702 }
3703 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3704 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3705 // LR prepared above for better instruction scheduling.
3706 DCHECK(direct_code_loaded);
3707 // lr()
3708 __ Blr(lr);
3709 break;
3710 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3711 // LR = callee_method->entry_point_from_quick_compiled_code_;
3712 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00003713 XRegisterFrom(callee_method),
Vladimir Marko58155012015-08-19 12:49:41 +00003714 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize).Int32Value()));
3715 // lr()
3716 __ Blr(lr);
3717 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00003718 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003719
Andreas Gampe878d58c2015-01-15 23:24:00 -08003720 DCHECK(!IsLeafMethod());
3721}
3722
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003723void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003724 // Use the calling convention instead of the location of the receiver, as
3725 // intrinsics may have put the receiver in a different register. In the intrinsics
3726 // slow path, the arguments have been moved to the right place, so here we are
3727 // guaranteed that the receiver is the first register of the calling convention.
3728 InvokeDexCallingConvention calling_convention;
3729 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003730 Register temp = XRegisterFrom(temp_in);
3731 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3732 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
3733 Offset class_offset = mirror::Object::ClassOffset();
3734 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
3735
3736 BlockPoolsScope block_pools(GetVIXLAssembler());
3737
3738 DCHECK(receiver.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003739 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003740 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003741 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003742 // Instead of simply (possibly) unpoisoning `temp` here, we should
3743 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003744 // intermediate/temporary reference and because the current
3745 // concurrent copying collector keeps the from-space memory
3746 // intact/accessible until the end of the marking phase (the
3747 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003748 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
3749 // temp = temp->GetMethodAt(method_offset);
3750 __ Ldr(temp, MemOperand(temp, method_offset));
3751 // lr = temp->GetEntryPoint();
3752 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
3753 // lr();
3754 __ Blr(lr);
3755}
3756
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003757vixl::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(const DexFile& dex_file,
3758 uint32_t string_index,
3759 vixl::Label* adrp_label) {
3760 return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
3761}
3762
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003763vixl::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(const DexFile& dex_file,
3764 uint32_t type_index,
3765 vixl::Label* adrp_label) {
3766 return NewPcRelativePatch(dex_file, type_index, adrp_label, &pc_relative_type_patches_);
3767}
3768
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003769vixl::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
3770 uint32_t element_offset,
3771 vixl::Label* adrp_label) {
3772 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
3773}
3774
3775vixl::Label* CodeGeneratorARM64::NewPcRelativePatch(const DexFile& dex_file,
3776 uint32_t offset_or_index,
3777 vixl::Label* adrp_label,
3778 ArenaDeque<PcRelativePatchInfo>* patches) {
3779 // Add a patch entry and return the label.
3780 patches->emplace_back(dex_file, offset_or_index);
3781 PcRelativePatchInfo* info = &patches->back();
3782 vixl::Label* label = &info->label;
3783 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
3784 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
3785 return label;
3786}
3787
3788vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
3789 const DexFile& dex_file, uint32_t string_index) {
3790 return boot_image_string_patches_.GetOrCreate(
3791 StringReference(&dex_file, string_index),
3792 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3793}
3794
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003795vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
3796 const DexFile& dex_file, uint32_t type_index) {
3797 return boot_image_type_patches_.GetOrCreate(
3798 TypeReference(&dex_file, type_index),
3799 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
3800}
3801
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003802vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(uint64_t address) {
3803 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
3804 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
3805 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
3806}
3807
3808vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddressLiteral(uint64_t address) {
3809 return DeduplicateUint64Literal(address);
3810}
3811
Vladimir Marko58155012015-08-19 12:49:41 +00003812void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
3813 DCHECK(linker_patches->empty());
3814 size_t size =
3815 method_patches_.size() +
3816 call_patches_.size() +
3817 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003818 pc_relative_dex_cache_patches_.size() +
3819 boot_image_string_patches_.size() +
3820 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003821 boot_image_type_patches_.size() +
3822 pc_relative_type_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003823 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00003824 linker_patches->reserve(size);
3825 for (const auto& entry : method_patches_) {
3826 const MethodReference& target_method = entry.first;
3827 vixl::Literal<uint64_t>* literal = entry.second;
3828 linker_patches->push_back(LinkerPatch::MethodPatch(literal->offset(),
3829 target_method.dex_file,
3830 target_method.dex_method_index));
3831 }
3832 for (const auto& entry : call_patches_) {
3833 const MethodReference& target_method = entry.first;
3834 vixl::Literal<uint64_t>* literal = entry.second;
3835 linker_patches->push_back(LinkerPatch::CodePatch(literal->offset(),
3836 target_method.dex_file,
3837 target_method.dex_method_index));
3838 }
3839 for (const MethodPatchInfo<vixl::Label>& info : relative_call_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003840 linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003841 info.target_method.dex_file,
3842 info.target_method.dex_method_index));
3843 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003844 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Alexandre Rames6dc01742015-11-12 14:44:19 +00003845 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.location(),
Vladimir Marko58155012015-08-19 12:49:41 +00003846 &info.target_dex_file,
Alexandre Rames6dc01742015-11-12 14:44:19 +00003847 info.pc_insn_label->location(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003848 info.offset_or_index));
3849 }
3850 for (const auto& entry : boot_image_string_patches_) {
3851 const StringReference& target_string = entry.first;
3852 vixl::Literal<uint32_t>* literal = entry.second;
3853 linker_patches->push_back(LinkerPatch::StringPatch(literal->offset(),
3854 target_string.dex_file,
3855 target_string.string_index));
3856 }
3857 for (const PcRelativePatchInfo& info : pc_relative_string_patches_) {
3858 linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.location(),
3859 &info.target_dex_file,
3860 info.pc_insn_label->location(),
3861 info.offset_or_index));
3862 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003863 for (const auto& entry : boot_image_type_patches_) {
3864 const TypeReference& target_type = entry.first;
3865 vixl::Literal<uint32_t>* literal = entry.second;
3866 linker_patches->push_back(LinkerPatch::TypePatch(literal->offset(),
3867 target_type.dex_file,
3868 target_type.type_index));
3869 }
3870 for (const PcRelativePatchInfo& info : pc_relative_type_patches_) {
3871 linker_patches->push_back(LinkerPatch::RelativeTypePatch(info.label.location(),
3872 &info.target_dex_file,
3873 info.pc_insn_label->location(),
3874 info.offset_or_index));
3875 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003876 for (const auto& entry : boot_image_address_patches_) {
3877 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
3878 vixl::Literal<uint32_t>* literal = entry.second;
3879 linker_patches->push_back(LinkerPatch::RecordPosition(literal->offset()));
Vladimir Marko58155012015-08-19 12:49:41 +00003880 }
3881}
3882
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003883vixl::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
3884 Uint32ToLiteralMap* map) {
3885 return map->GetOrCreate(
3886 value,
3887 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
3888}
3889
Vladimir Marko58155012015-08-19 12:49:41 +00003890vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003891 return uint64_literals_.GetOrCreate(
3892 value,
3893 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00003894}
3895
3896vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
3897 MethodReference target_method,
3898 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003899 return map->GetOrCreate(
3900 target_method,
3901 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00003902}
3903
3904vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
3905 MethodReference target_method) {
3906 return DeduplicateMethodLiteral(target_method, &method_patches_);
3907}
3908
3909vixl::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
3910 MethodReference target_method) {
3911 return DeduplicateMethodLiteral(target_method, &call_patches_);
3912}
3913
3914
Andreas Gampe878d58c2015-01-15 23:24:00 -08003915void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003916 // Explicit clinit checks triggered by static invokes must have been pruned by
3917 // art::PrepareForRegisterAllocation.
3918 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01003919
Andreas Gampe878d58c2015-01-15 23:24:00 -08003920 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3921 return;
3922 }
3923
Alexandre Ramesd921d642015-04-16 15:07:16 +01003924 BlockPoolsScope block_pools(GetVIXLAssembler());
Nicolas Geoffray38207af2015-06-01 15:46:22 +01003925 LocationSummary* locations = invoke->GetLocations();
3926 codegen_->GenerateStaticOrDirectCall(
3927 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00003928 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01003929}
3930
3931void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08003932 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3933 return;
3934 }
3935
Andreas Gampebfb5ba92015-09-01 15:45:02 +00003936 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01003937 DCHECK(!codegen_->IsLeafMethod());
3938 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3939}
3940
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003941HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
3942 HLoadClass::LoadKind desired_class_load_kind) {
3943 if (kEmitCompilerReadBarrier) {
3944 switch (desired_class_load_kind) {
3945 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3946 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3947 case HLoadClass::LoadKind::kBootImageAddress:
3948 // TODO: Implement for read barrier.
3949 return HLoadClass::LoadKind::kDexCacheViaMethod;
3950 default:
3951 break;
3952 }
3953 }
3954 switch (desired_class_load_kind) {
3955 case HLoadClass::LoadKind::kReferrersClass:
3956 break;
3957 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3958 DCHECK(!GetCompilerOptions().GetCompilePic());
3959 break;
3960 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3961 DCHECK(GetCompilerOptions().GetCompilePic());
3962 break;
3963 case HLoadClass::LoadKind::kBootImageAddress:
3964 break;
3965 case HLoadClass::LoadKind::kDexCacheAddress:
3966 DCHECK(Runtime::Current()->UseJitCompilation());
3967 break;
3968 case HLoadClass::LoadKind::kDexCachePcRelative:
3969 DCHECK(!Runtime::Current()->UseJitCompilation());
3970 break;
3971 case HLoadClass::LoadKind::kDexCacheViaMethod:
3972 break;
3973 }
3974 return desired_class_load_kind;
3975}
3976
Alexandre Rames67555f72014-11-18 10:55:16 +00003977void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003978 if (cls->NeedsAccessCheck()) {
3979 InvokeRuntimeCallingConvention calling_convention;
3980 CodeGenerator::CreateLoadClassLocationSummary(
3981 cls,
3982 LocationFrom(calling_convention.GetRegisterAt(0)),
3983 LocationFrom(vixl::x0),
3984 /* code_generator_supports_read_barrier */ true);
3985 return;
3986 }
3987
3988 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
3989 ? LocationSummary::kCallOnSlowPath
3990 : LocationSummary::kNoCall;
3991 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3992 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3993 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
3994 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
3995 locations->SetInAt(0, Location::RequiresRegister());
3996 }
3997 locations->SetOut(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00003998}
3999
4000void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01004001 if (cls->NeedsAccessCheck()) {
4002 codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex());
4003 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInitializeTypeAndVerifyAccess),
4004 cls,
4005 cls->GetDexPc(),
4006 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004007 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01004008 return;
4009 }
4010
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004011 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004012 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004013
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004014 bool generate_null_check = false;
4015 switch (cls->GetLoadKind()) {
4016 case HLoadClass::LoadKind::kReferrersClass: {
4017 DCHECK(!cls->CanCallRuntime());
4018 DCHECK(!cls->MustGenerateClinitCheck());
4019 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4020 Register current_method = InputRegisterAt(cls, 0);
4021 GenerateGcRootFieldLoad(
4022 cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4023 break;
4024 }
4025 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4026 DCHECK(!kEmitCompilerReadBarrier);
4027 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4028 cls->GetTypeIndex()));
4029 break;
4030 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
4031 DCHECK(!kEmitCompilerReadBarrier);
4032 // Add ADRP with its PC-relative type patch.
4033 const DexFile& dex_file = cls->GetDexFile();
4034 uint32_t type_index = cls->GetTypeIndex();
4035 vixl::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
4036 {
4037 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4038 __ Bind(adrp_label);
4039 __ adrp(out.X(), /* offset placeholder */ 0);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004040 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004041 // Add ADD with its PC-relative type patch.
4042 vixl::Label* add_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
4043 {
4044 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4045 __ Bind(add_label);
4046 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00004047 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004048 break;
4049 }
4050 case HLoadClass::LoadKind::kBootImageAddress: {
4051 DCHECK(!kEmitCompilerReadBarrier);
4052 DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress()));
4053 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress()));
4054 break;
4055 }
4056 case HLoadClass::LoadKind::kDexCacheAddress: {
4057 DCHECK_NE(cls->GetAddress(), 0u);
4058 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4059 // that gives a 16KiB range. To try and reduce the number of literals if we load
4060 // multiple types, simply split the dex cache address to a 16KiB aligned base
4061 // loaded from a literal and the remaining offset embedded in the load.
4062 static_assert(sizeof(GcRoot<mirror::Class>) == 4u, "Expected GC root to be 4 bytes.");
4063 DCHECK_ALIGNED(cls->GetAddress(), 4u);
4064 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4065 uint64_t base_address = cls->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4066 uint32_t offset = cls->GetAddress() & MaxInt<uint64_t>(offset_bits);
4067 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
4068 // /* GcRoot<mirror::Class> */ out = *(base_address + offset)
4069 GenerateGcRootFieldLoad(cls, out_loc, out.X(), offset);
4070 generate_null_check = !cls->IsInDexCache();
4071 break;
4072 }
4073 case HLoadClass::LoadKind::kDexCachePcRelative: {
4074 // Add ADRP with its PC-relative DexCache access patch.
4075 const DexFile& dex_file = cls->GetDexFile();
4076 uint32_t element_offset = cls->GetDexCacheElementOffset();
4077 vixl::Label* adrp_label = codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
4078 {
4079 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4080 __ Bind(adrp_label);
4081 __ adrp(out.X(), /* offset placeholder */ 0);
4082 }
4083 // Add LDR with its PC-relative DexCache access patch.
4084 vixl::Label* ldr_label =
4085 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
4086 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4087 GenerateGcRootFieldLoad(cls, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4088 generate_null_check = !cls->IsInDexCache();
4089 break;
4090 }
4091 case HLoadClass::LoadKind::kDexCacheViaMethod: {
4092 MemberOffset resolved_types_offset =
4093 ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
4094 // /* GcRoot<mirror::Class>[] */ out =
4095 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
4096 Register current_method = InputRegisterAt(cls, 0);
4097 __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
4098 // /* GcRoot<mirror::Class> */ out = out[type_index]
4099 GenerateGcRootFieldLoad(
4100 cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex()));
4101 generate_null_check = !cls->IsInDexCache();
4102 break;
4103 }
4104 }
4105
4106 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4107 DCHECK(cls->CanCallRuntime());
4108 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
4109 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4110 codegen_->AddSlowPath(slow_path);
4111 if (generate_null_check) {
4112 __ Cbz(out, slow_path->GetEntryLabel());
4113 }
4114 if (cls->MustGenerateClinitCheck()) {
4115 GenerateClassInitializationCheck(slow_path, out);
4116 } else {
4117 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004118 }
4119 }
4120}
4121
David Brazdilcb1c0552015-08-04 16:22:25 +01004122static MemOperand GetExceptionTlsAddress() {
4123 return MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
4124}
4125
Alexandre Rames67555f72014-11-18 10:55:16 +00004126void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4127 LocationSummary* locations =
4128 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4129 locations->SetOut(Location::RequiresRegister());
4130}
4131
4132void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004133 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4134}
4135
4136void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4137 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4138}
4139
4140void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4141 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004142}
4143
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004144HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4145 HLoadString::LoadKind desired_string_load_kind) {
4146 if (kEmitCompilerReadBarrier) {
4147 switch (desired_string_load_kind) {
4148 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4149 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4150 case HLoadString::LoadKind::kBootImageAddress:
4151 // TODO: Implement for read barrier.
4152 return HLoadString::LoadKind::kDexCacheViaMethod;
4153 default:
4154 break;
4155 }
4156 }
4157 switch (desired_string_load_kind) {
4158 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4159 DCHECK(!GetCompilerOptions().GetCompilePic());
4160 break;
4161 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4162 DCHECK(GetCompilerOptions().GetCompilePic());
4163 break;
4164 case HLoadString::LoadKind::kBootImageAddress:
4165 break;
4166 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01004167 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004168 break;
4169 case HLoadString::LoadKind::kDexCachePcRelative:
Calin Juravleffc87072016-04-20 14:22:09 +01004170 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004171 break;
4172 case HLoadString::LoadKind::kDexCacheViaMethod:
4173 break;
4174 }
4175 return desired_string_load_kind;
4176}
4177
Alexandre Rames67555f72014-11-18 10:55:16 +00004178void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004179 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004180 ? LocationSummary::kCallOnSlowPath
4181 : LocationSummary::kNoCall;
4182 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004183 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
4184 locations->SetInAt(0, Location::RequiresRegister());
4185 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004186 locations->SetOut(Location::RequiresRegister());
4187}
4188
4189void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004190 Location out_loc = load->GetLocations()->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00004191 Register out = OutputRegister(load);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004192
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004193 switch (load->GetLoadKind()) {
4194 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4195 DCHECK(!kEmitCompilerReadBarrier);
4196 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4197 load->GetStringIndex()));
4198 return; // No dex cache slow path.
4199 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
4200 DCHECK(!kEmitCompilerReadBarrier);
4201 // Add ADRP with its PC-relative String patch.
4202 const DexFile& dex_file = load->GetDexFile();
4203 uint32_t string_index = load->GetStringIndex();
4204 vixl::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
4205 {
4206 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4207 __ Bind(adrp_label);
4208 __ adrp(out.X(), /* offset placeholder */ 0);
4209 }
4210 // Add ADD with its PC-relative String patch.
4211 vixl::Label* add_label =
4212 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
4213 {
4214 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4215 __ Bind(add_label);
4216 __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0));
4217 }
4218 return; // No dex cache slow path.
4219 }
4220 case HLoadString::LoadKind::kBootImageAddress: {
4221 DCHECK(!kEmitCompilerReadBarrier);
4222 DCHECK(load->GetAddress() != 0u && IsUint<32>(load->GetAddress()));
4223 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress()));
4224 return; // No dex cache slow path.
4225 }
4226 case HLoadString::LoadKind::kDexCacheAddress: {
4227 DCHECK_NE(load->GetAddress(), 0u);
4228 // LDR immediate has a 12-bit offset multiplied by the size and for 32-bit loads
4229 // that gives a 16KiB range. To try and reduce the number of literals if we load
4230 // multiple strings, simply split the dex cache address to a 16KiB aligned base
4231 // loaded from a literal and the remaining offset embedded in the load.
4232 static_assert(sizeof(GcRoot<mirror::String>) == 4u, "Expected GC root to be 4 bytes.");
4233 DCHECK_ALIGNED(load->GetAddress(), 4u);
4234 constexpr size_t offset_bits = /* encoded bits */ 12 + /* scale */ 2;
4235 uint64_t base_address = load->GetAddress() & ~MaxInt<uint64_t>(offset_bits);
4236 uint32_t offset = load->GetAddress() & MaxInt<uint64_t>(offset_bits);
4237 __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004238 // /* GcRoot<mirror::String> */ out = *(base_address + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004239 GenerateGcRootFieldLoad(load, out_loc, out.X(), offset);
4240 break;
4241 }
4242 case HLoadString::LoadKind::kDexCachePcRelative: {
4243 // Add ADRP with its PC-relative DexCache access patch.
4244 const DexFile& dex_file = load->GetDexFile();
4245 uint32_t element_offset = load->GetDexCacheElementOffset();
4246 vixl::Label* adrp_label = codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
4247 {
4248 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
4249 __ Bind(adrp_label);
4250 __ adrp(out.X(), /* offset placeholder */ 0);
4251 }
4252 // Add LDR with its PC-relative DexCache access patch.
4253 vixl::Label* ldr_label =
4254 codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004255 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004256 GenerateGcRootFieldLoad(load, out_loc, out.X(), /* offset placeholder */ 0, ldr_label);
4257 break;
4258 }
4259 case HLoadString::LoadKind::kDexCacheViaMethod: {
4260 Register current_method = InputRegisterAt(load, 0);
4261 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4262 GenerateGcRootFieldLoad(
4263 load, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value());
4264 // /* GcRoot<mirror::String>[] */ out = out->dex_cache_strings_
4265 __ Ldr(out.X(), HeapOperand(out, mirror::Class::DexCacheStringsOffset().Uint32Value()));
4266 // /* GcRoot<mirror::String> */ out = out[string_index]
4267 GenerateGcRootFieldLoad(
4268 load, out_loc, out.X(), CodeGenerator::GetCacheOffset(load->GetStringIndex()));
4269 break;
4270 }
4271 default:
4272 LOG(FATAL) << "Unexpected load kind: " << load->GetLoadKind();
4273 UNREACHABLE();
4274 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004275
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004276 if (!load->IsInDexCache()) {
4277 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
4278 codegen_->AddSlowPath(slow_path);
4279 __ Cbz(out, slow_path->GetEntryLabel());
4280 __ Bind(slow_path->GetExitLabel());
4281 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004282}
4283
Alexandre Rames5319def2014-10-23 10:03:10 +01004284void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4285 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4286 locations->SetOut(Location::ConstantLocation(constant));
4287}
4288
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004289void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004290 // Will be generated at use site.
4291}
4292
Alexandre Rames67555f72014-11-18 10:55:16 +00004293void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4294 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004295 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004296 InvokeRuntimeCallingConvention calling_convention;
4297 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4298}
4299
4300void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4301 codegen_->InvokeRuntime(instruction->IsEnter()
4302 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
4303 instruction,
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004304 instruction->GetDexPc(),
4305 nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004306 if (instruction->IsEnter()) {
4307 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4308 } else {
4309 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4310 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004311}
4312
Alexandre Rames42d641b2014-10-27 14:00:51 +00004313void LocationsBuilderARM64::VisitMul(HMul* mul) {
4314 LocationSummary* locations =
4315 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4316 switch (mul->GetResultType()) {
4317 case Primitive::kPrimInt:
4318 case Primitive::kPrimLong:
4319 locations->SetInAt(0, Location::RequiresRegister());
4320 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004321 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004322 break;
4323
4324 case Primitive::kPrimFloat:
4325 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004326 locations->SetInAt(0, Location::RequiresFpuRegister());
4327 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004328 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004329 break;
4330
4331 default:
4332 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4333 }
4334}
4335
4336void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4337 switch (mul->GetResultType()) {
4338 case Primitive::kPrimInt:
4339 case Primitive::kPrimLong:
4340 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4341 break;
4342
4343 case Primitive::kPrimFloat:
4344 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004345 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004346 break;
4347
4348 default:
4349 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4350 }
4351}
4352
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004353void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4354 LocationSummary* locations =
4355 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4356 switch (neg->GetResultType()) {
4357 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004358 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004359 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004360 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004361 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004362
4363 case Primitive::kPrimFloat:
4364 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004365 locations->SetInAt(0, Location::RequiresFpuRegister());
4366 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004367 break;
4368
4369 default:
4370 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4371 }
4372}
4373
4374void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4375 switch (neg->GetResultType()) {
4376 case Primitive::kPrimInt:
4377 case Primitive::kPrimLong:
4378 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4379 break;
4380
4381 case Primitive::kPrimFloat:
4382 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004383 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004384 break;
4385
4386 default:
4387 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4388 }
4389}
4390
4391void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4392 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004393 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004394 InvokeRuntimeCallingConvention calling_convention;
4395 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004396 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004397 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004398 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004399}
4400
4401void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
4402 LocationSummary* locations = instruction->GetLocations();
4403 InvokeRuntimeCallingConvention calling_convention;
4404 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
4405 DCHECK(type_index.Is(w0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004406 __ Mov(type_index, instruction->GetTypeIndex());
Roland Levillain4d027112015-07-01 15:41:14 +01004407 // Note: if heap poisoning is enabled, the entry point takes cares
4408 // of poisoning the reference.
Calin Juravle175dc732015-08-25 15:42:32 +01004409 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4410 instruction,
4411 instruction->GetDexPc(),
4412 nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004413 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004414}
4415
Alexandre Rames5319def2014-10-23 10:03:10 +01004416void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4417 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004418 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004419 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004420 if (instruction->IsStringAlloc()) {
4421 locations->AddTemp(LocationFrom(kArtMethodRegister));
4422 } else {
4423 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4424 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
4425 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004426 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4427}
4428
4429void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004430 // Note: if heap poisoning is enabled, the entry point takes cares
4431 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004432 if (instruction->IsStringAlloc()) {
4433 // String is allocated through StringFactory. Call NewEmptyString entry point.
4434 Location temp = instruction->GetLocations()->GetTemp(0);
4435 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
4436 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4437 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
4438 __ Blr(lr);
4439 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4440 } else {
4441 codegen_->InvokeRuntime(instruction->GetEntrypoint(),
4442 instruction,
4443 instruction->GetDexPc(),
4444 nullptr);
4445 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4446 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004447}
4448
4449void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4450 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004451 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004452 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004453}
4454
4455void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004456 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004457 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004458 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004459 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004460 break;
4461
4462 default:
4463 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4464 }
4465}
4466
David Brazdil66d126e2015-04-03 16:02:44 +01004467void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4468 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4469 locations->SetInAt(0, Location::RequiresRegister());
4470 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4471}
4472
4473void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
David Brazdil66d126e2015-04-03 16:02:44 +01004474 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::Operand(1));
4475}
4476
Alexandre Rames5319def2014-10-23 10:03:10 +01004477void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
David Brazdil77a48ae2015-09-15 12:34:04 +00004478 LocationSummary::CallKind call_kind = instruction->CanThrowIntoCatchBlock()
4479 ? LocationSummary::kCallOnSlowPath
4480 : LocationSummary::kNoCall;
4481 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexandre Rames5319def2014-10-23 10:03:10 +01004482 locations->SetInAt(0, Location::RequiresRegister());
4483 if (instruction->HasUses()) {
4484 locations->SetOut(Location::SameAsFirstInput());
4485 }
4486}
4487
Calin Juravle2ae48182016-03-16 14:05:09 +00004488void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4489 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004490 return;
4491 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004492
Alexandre Ramesd921d642015-04-16 15:07:16 +01004493 BlockPoolsScope block_pools(GetVIXLAssembler());
4494 Location obj = instruction->GetLocations()->InAt(0);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004495 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
Calin Juravle2ae48182016-03-16 14:05:09 +00004496 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004497}
4498
Calin Juravle2ae48182016-03-16 14:05:09 +00004499void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004500 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004501 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004502
4503 LocationSummary* locations = instruction->GetLocations();
4504 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004505
4506 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01004507}
4508
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004509void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00004510 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004511}
4512
Alexandre Rames67555f72014-11-18 10:55:16 +00004513void LocationsBuilderARM64::VisitOr(HOr* instruction) {
4514 HandleBinaryOp(instruction);
4515}
4516
4517void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
4518 HandleBinaryOp(instruction);
4519}
4520
Alexandre Rames3e69f162014-12-10 10:36:50 +00004521void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
4522 LOG(FATAL) << "Unreachable";
4523}
4524
4525void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
4526 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
4527}
4528
Alexandre Rames5319def2014-10-23 10:03:10 +01004529void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
4530 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4531 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4532 if (location.IsStackSlot()) {
4533 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4534 } else if (location.IsDoubleStackSlot()) {
4535 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4536 }
4537 locations->SetOut(location);
4538}
4539
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004540void InstructionCodeGeneratorARM64::VisitParameterValue(
4541 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004542 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004543}
4544
4545void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
4546 LocationSummary* locations =
4547 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004548 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004549}
4550
4551void InstructionCodeGeneratorARM64::VisitCurrentMethod(
4552 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
4553 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01004554}
4555
4556void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
4557 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004558 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004559 locations->SetInAt(i, Location::Any());
4560 }
4561 locations->SetOut(Location::Any());
4562}
4563
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004564void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004565 LOG(FATAL) << "Unreachable";
4566}
4567
Serban Constantinescu02164b32014-11-13 14:05:07 +00004568void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004569 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00004570 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004571 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
4572 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004573 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4574
4575 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004576 case Primitive::kPrimInt:
4577 case Primitive::kPrimLong:
4578 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08004579 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00004580 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4581 break;
4582
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004583 case Primitive::kPrimFloat:
4584 case Primitive::kPrimDouble: {
4585 InvokeRuntimeCallingConvention calling_convention;
4586 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
4587 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
4588 locations->SetOut(calling_convention.GetReturnLocation(type));
4589
4590 break;
4591 }
4592
Serban Constantinescu02164b32014-11-13 14:05:07 +00004593 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004594 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00004595 }
4596}
4597
4598void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
4599 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004600
Serban Constantinescu02164b32014-11-13 14:05:07 +00004601 switch (type) {
4602 case Primitive::kPrimInt:
4603 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08004604 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004605 break;
4606 }
4607
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004608 case Primitive::kPrimFloat:
4609 case Primitive::kPrimDouble: {
4610 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
4611 : QUICK_ENTRY_POINT(pFmod);
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004612 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc(), nullptr);
Roland Levillain888d0672015-11-23 18:53:50 +00004613 if (type == Primitive::kPrimFloat) {
4614 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4615 } else {
4616 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4617 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00004618 break;
4619 }
4620
Serban Constantinescu02164b32014-11-13 14:05:07 +00004621 default:
4622 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00004623 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00004624 }
4625}
4626
Calin Juravle27df7582015-04-17 19:12:31 +01004627void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4628 memory_barrier->SetLocations(nullptr);
4629}
4630
4631void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00004632 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01004633}
4634
Alexandre Rames5319def2014-10-23 10:03:10 +01004635void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
4636 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4637 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004638 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01004639}
4640
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004641void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004642 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004643}
4644
4645void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
4646 instruction->SetLocations(nullptr);
4647}
4648
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004649void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004650 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01004651}
4652
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004653void LocationsBuilderARM64::VisitRor(HRor* ror) {
4654 HandleBinaryOp(ror);
4655}
4656
4657void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
4658 HandleBinaryOp(ror);
4659}
4660
Serban Constantinescu02164b32014-11-13 14:05:07 +00004661void LocationsBuilderARM64::VisitShl(HShl* shl) {
4662 HandleShift(shl);
4663}
4664
4665void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
4666 HandleShift(shl);
4667}
4668
4669void LocationsBuilderARM64::VisitShr(HShr* shr) {
4670 HandleShift(shr);
4671}
4672
4673void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
4674 HandleShift(shr);
4675}
4676
Alexandre Rames5319def2014-10-23 10:03:10 +01004677void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004678 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004679}
4680
4681void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004682 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004683}
4684
Alexandre Rames67555f72014-11-18 10:55:16 +00004685void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004686 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00004687}
4688
4689void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004690 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00004691}
4692
4693void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01004694 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01004695}
4696
Alexandre Rames67555f72014-11-18 10:55:16 +00004697void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004698 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01004699}
4700
Calin Juravlee460d1d2015-09-29 04:52:17 +01004701void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
4702 HUnresolvedInstanceFieldGet* instruction) {
4703 FieldAccessCallingConventionARM64 calling_convention;
4704 codegen_->CreateUnresolvedFieldLocationSummary(
4705 instruction, instruction->GetFieldType(), calling_convention);
4706}
4707
4708void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
4709 HUnresolvedInstanceFieldGet* instruction) {
4710 FieldAccessCallingConventionARM64 calling_convention;
4711 codegen_->GenerateUnresolvedFieldAccess(instruction,
4712 instruction->GetFieldType(),
4713 instruction->GetFieldIndex(),
4714 instruction->GetDexPc(),
4715 calling_convention);
4716}
4717
4718void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
4719 HUnresolvedInstanceFieldSet* instruction) {
4720 FieldAccessCallingConventionARM64 calling_convention;
4721 codegen_->CreateUnresolvedFieldLocationSummary(
4722 instruction, instruction->GetFieldType(), calling_convention);
4723}
4724
4725void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
4726 HUnresolvedInstanceFieldSet* instruction) {
4727 FieldAccessCallingConventionARM64 calling_convention;
4728 codegen_->GenerateUnresolvedFieldAccess(instruction,
4729 instruction->GetFieldType(),
4730 instruction->GetFieldIndex(),
4731 instruction->GetDexPc(),
4732 calling_convention);
4733}
4734
4735void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
4736 HUnresolvedStaticFieldGet* instruction) {
4737 FieldAccessCallingConventionARM64 calling_convention;
4738 codegen_->CreateUnresolvedFieldLocationSummary(
4739 instruction, instruction->GetFieldType(), calling_convention);
4740}
4741
4742void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
4743 HUnresolvedStaticFieldGet* instruction) {
4744 FieldAccessCallingConventionARM64 calling_convention;
4745 codegen_->GenerateUnresolvedFieldAccess(instruction,
4746 instruction->GetFieldType(),
4747 instruction->GetFieldIndex(),
4748 instruction->GetDexPc(),
4749 calling_convention);
4750}
4751
4752void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
4753 HUnresolvedStaticFieldSet* instruction) {
4754 FieldAccessCallingConventionARM64 calling_convention;
4755 codegen_->CreateUnresolvedFieldLocationSummary(
4756 instruction, instruction->GetFieldType(), calling_convention);
4757}
4758
4759void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
4760 HUnresolvedStaticFieldSet* instruction) {
4761 FieldAccessCallingConventionARM64 calling_convention;
4762 codegen_->GenerateUnresolvedFieldAccess(instruction,
4763 instruction->GetFieldType(),
4764 instruction->GetFieldIndex(),
4765 instruction->GetDexPc(),
4766 calling_convention);
4767}
4768
Alexandre Rames5319def2014-10-23 10:03:10 +01004769void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
4770 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
4771}
4772
4773void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004774 HBasicBlock* block = instruction->GetBlock();
4775 if (block->GetLoopInformation() != nullptr) {
4776 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4777 // The back edge will generate the suspend check.
4778 return;
4779 }
4780 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4781 // The goto will generate the suspend check.
4782 return;
4783 }
4784 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01004785}
4786
Alexandre Rames67555f72014-11-18 10:55:16 +00004787void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
4788 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004789 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004790 InvokeRuntimeCallingConvention calling_convention;
4791 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4792}
4793
4794void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
4795 codegen_->InvokeRuntime(
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +00004796 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc(), nullptr);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004797 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004798}
4799
4800void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
4801 LocationSummary* locations =
4802 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
4803 Primitive::Type input_type = conversion->GetInputType();
4804 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00004805 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00004806 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4807 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4808 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4809 }
4810
Alexandre Rames542361f2015-01-29 16:57:31 +00004811 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004812 locations->SetInAt(0, Location::RequiresFpuRegister());
4813 } else {
4814 locations->SetInAt(0, Location::RequiresRegister());
4815 }
4816
Alexandre Rames542361f2015-01-29 16:57:31 +00004817 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004818 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
4819 } else {
4820 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4821 }
4822}
4823
4824void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
4825 Primitive::Type result_type = conversion->GetResultType();
4826 Primitive::Type input_type = conversion->GetInputType();
4827
4828 DCHECK_NE(input_type, result_type);
4829
Alexandre Rames542361f2015-01-29 16:57:31 +00004830 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00004831 int result_size = Primitive::ComponentSize(result_type);
4832 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00004833 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00004834 Register output = OutputRegister(conversion);
4835 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00004836 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01004837 // 'int' values are used directly as W registers, discarding the top
4838 // bits, so we don't need to sign-extend and can just perform a move.
4839 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
4840 // top 32 bits of the target register. We theoretically could leave those
4841 // bits unchanged, but we would have to make sure that no code uses a
4842 // 32bit input value as a 64bit value assuming that the top 32 bits are
4843 // zero.
4844 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00004845 } else if (result_type == Primitive::kPrimChar ||
4846 (input_type == Primitive::kPrimChar && input_size < result_size)) {
4847 __ Ubfx(output,
4848 output.IsX() ? source.X() : source.W(),
4849 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004850 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00004851 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00004852 }
Alexandre Rames542361f2015-01-29 16:57:31 +00004853 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004854 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004855 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004856 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
4857 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00004858 } else if (Primitive::IsFloatingPointType(result_type) &&
4859 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00004860 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
4861 } else {
4862 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4863 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00004864 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00004865}
Alexandre Rames67555f72014-11-18 10:55:16 +00004866
Serban Constantinescu02164b32014-11-13 14:05:07 +00004867void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
4868 HandleShift(ushr);
4869}
4870
4871void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
4872 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00004873}
4874
4875void LocationsBuilderARM64::VisitXor(HXor* instruction) {
4876 HandleBinaryOp(instruction);
4877}
4878
4879void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
4880 HandleBinaryOp(instruction);
4881}
4882
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004883void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004884 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004885 LOG(FATAL) << "Unreachable";
4886}
4887
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004888void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00004889 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00004890 LOG(FATAL) << "Unreachable";
4891}
4892
Mark Mendellfe57faa2015-09-18 09:26:15 -04004893// Simple implementation of packed switch - generate cascaded compare/jumps.
4894void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4895 LocationSummary* locations =
4896 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4897 locations->SetInAt(0, Location::RequiresRegister());
4898}
4899
4900void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4901 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08004902 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04004903 Register value_reg = InputRegisterAt(switch_instr, 0);
4904 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4905
Zheng Xu3927c8b2015-11-18 17:46:25 +08004906 // Roughly set 16 as max average assemblies generated per HIR in a graph.
4907 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * vixl::kInstructionSize;
4908 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
4909 // make sure we don't emit it if the target may run out of range.
4910 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
4911 // ranges and emit the tables only as required.
4912 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04004913
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004914 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08004915 // Current instruction id is an upper bound of the number of HIRs in the graph.
4916 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
4917 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004918 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4919 Register temp = temps.AcquireW();
4920 __ Subs(temp, value_reg, Operand(lower_bound));
4921
Zheng Xu3927c8b2015-11-18 17:46:25 +08004922 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004923 // Jump to successors[0] if value == lower_bound.
4924 __ B(eq, codegen_->GetLabelOf(successors[0]));
4925 int32_t last_index = 0;
4926 for (; num_entries - last_index > 2; last_index += 2) {
4927 __ Subs(temp, temp, Operand(2));
4928 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4929 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
4930 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4931 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
4932 }
4933 if (num_entries - last_index == 2) {
4934 // The last missing case_value.
4935 __ Cmp(temp, Operand(1));
4936 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08004937 }
4938
4939 // And the default for any other value.
4940 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
4941 __ B(codegen_->GetLabelOf(default_block));
4942 }
4943 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01004944 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08004945
4946 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4947
4948 // Below instructions should use at most one blocked register. Since there are two blocked
4949 // registers, we are free to block one.
4950 Register temp_w = temps.AcquireW();
4951 Register index;
4952 // Remove the bias.
4953 if (lower_bound != 0) {
4954 index = temp_w;
4955 __ Sub(index, value_reg, Operand(lower_bound));
4956 } else {
4957 index = value_reg;
4958 }
4959
4960 // Jump to default block if index is out of the range.
4961 __ Cmp(index, Operand(num_entries));
4962 __ B(hs, codegen_->GetLabelOf(default_block));
4963
4964 // In current VIXL implementation, it won't require any blocked registers to encode the
4965 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
4966 // register pressure.
4967 Register table_base = temps.AcquireX();
4968 // Load jump offset from the table.
4969 __ Adr(table_base, jump_table->GetTableStartLabel());
4970 Register jump_offset = temp_w;
4971 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
4972
4973 // Jump to target block by branching to table_base(pc related) + offset.
4974 Register target_address = table_base;
4975 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
4976 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04004977 }
4978}
4979
Roland Levillain44015862016-01-22 11:47:17 +00004980void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(HInstruction* instruction,
4981 Location out,
4982 uint32_t offset,
4983 Location maybe_temp) {
4984 Primitive::Type type = Primitive::kPrimNot;
4985 Register out_reg = RegisterFrom(out, type);
4986 if (kEmitCompilerReadBarrier) {
4987 Register temp_reg = RegisterFrom(maybe_temp, type);
4988 if (kUseBakerReadBarrier) {
4989 // Load with fast path based Baker's read barrier.
4990 // /* HeapReference<Object> */ out = *(out + offset)
4991 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4992 out,
4993 out_reg,
4994 offset,
4995 temp_reg,
4996 /* needs_null_check */ false,
4997 /* use_load_acquire */ false);
4998 } else {
4999 // Load with slow path based read barrier.
5000 // Save the value of `out` into `maybe_temp` before overwriting it
5001 // in the following move operation, as we will need it for the
5002 // read barrier below.
5003 __ Mov(temp_reg, out_reg);
5004 // /* HeapReference<Object> */ out = *(out + offset)
5005 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5006 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5007 }
5008 } else {
5009 // Plain load with no read barrier.
5010 // /* HeapReference<Object> */ out = *(out + offset)
5011 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5012 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5013 }
5014}
5015
5016void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
5017 Location out,
5018 Location obj,
5019 uint32_t offset,
5020 Location maybe_temp) {
5021 Primitive::Type type = Primitive::kPrimNot;
5022 Register out_reg = RegisterFrom(out, type);
5023 Register obj_reg = RegisterFrom(obj, type);
5024 if (kEmitCompilerReadBarrier) {
5025 if (kUseBakerReadBarrier) {
5026 // Load with fast path based Baker's read barrier.
5027 Register temp_reg = RegisterFrom(maybe_temp, type);
5028 // /* HeapReference<Object> */ out = *(obj + offset)
5029 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5030 out,
5031 obj_reg,
5032 offset,
5033 temp_reg,
5034 /* needs_null_check */ false,
5035 /* use_load_acquire */ false);
5036 } else {
5037 // Load with slow path based read barrier.
5038 // /* HeapReference<Object> */ out = *(obj + offset)
5039 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5040 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5041 }
5042 } else {
5043 // Plain load with no read barrier.
5044 // /* HeapReference<Object> */ out = *(obj + offset)
5045 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5046 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5047 }
5048}
5049
5050void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instruction,
5051 Location root,
5052 vixl::Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005053 uint32_t offset,
5054 vixl::Label* fixup_label) {
Roland Levillain44015862016-01-22 11:47:17 +00005055 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
5056 if (kEmitCompilerReadBarrier) {
5057 if (kUseBakerReadBarrier) {
5058 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5059 // Baker's read barrier are used:
5060 //
5061 // root = obj.field;
5062 // if (Thread::Current()->GetIsGcMarking()) {
5063 // root = ReadBarrier::Mark(root)
5064 // }
5065
5066 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005067 if (fixup_label == nullptr) {
5068 __ Ldr(root_reg, MemOperand(obj, offset));
5069 } else {
5070 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
5071 __ Bind(fixup_label);
5072 __ ldr(root_reg, MemOperand(obj, offset));
5073 }
Roland Levillain44015862016-01-22 11:47:17 +00005074 static_assert(
5075 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5076 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5077 "have different sizes.");
5078 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5079 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5080 "have different sizes.");
5081
5082 // Slow path used to mark the GC root `root`.
5083 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005084 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, root);
Roland Levillain44015862016-01-22 11:47:17 +00005085 codegen_->AddSlowPath(slow_path);
5086
5087 MacroAssembler* masm = GetVIXLAssembler();
5088 UseScratchRegisterScope temps(masm);
5089 Register temp = temps.AcquireW();
5090 // temp = Thread::Current()->GetIsGcMarking()
5091 __ Ldr(temp, MemOperand(tr, Thread::IsGcMarkingOffset<kArm64WordSize>().Int32Value()));
5092 __ Cbnz(temp, slow_path->GetEntryLabel());
5093 __ Bind(slow_path->GetExitLabel());
5094 } else {
5095 // GC root loaded through a slow path for read barriers other
5096 // than Baker's.
5097 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005098 if (fixup_label == nullptr) {
5099 __ Add(root_reg.X(), obj.X(), offset);
5100 } else {
5101 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
5102 __ Bind(fixup_label);
5103 __ add(root_reg.X(), obj.X(), offset);
5104 }
Roland Levillain44015862016-01-22 11:47:17 +00005105 // /* mirror::Object* */ root = root->Read()
5106 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5107 }
5108 } else {
5109 // Plain GC root load with no read barrier.
5110 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005111 if (fixup_label == nullptr) {
5112 __ Ldr(root_reg, MemOperand(obj, offset));
5113 } else {
5114 vixl::SingleEmissionCheckScope guard(GetVIXLAssembler());
5115 __ Bind(fixup_label);
5116 __ ldr(root_reg, MemOperand(obj, offset));
5117 }
Roland Levillain44015862016-01-22 11:47:17 +00005118 // Note that GC roots are not affected by heap poisoning, thus we
5119 // do not have to unpoison `root_reg` here.
5120 }
5121}
5122
5123void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5124 Location ref,
5125 vixl::Register obj,
5126 uint32_t offset,
5127 Register temp,
5128 bool needs_null_check,
5129 bool use_load_acquire) {
5130 DCHECK(kEmitCompilerReadBarrier);
5131 DCHECK(kUseBakerReadBarrier);
5132
5133 // /* HeapReference<Object> */ ref = *(obj + offset)
5134 Location no_index = Location::NoLocation();
Roland Levillainbfea3352016-06-23 13:48:47 +01005135 size_t no_scale_factor = 0U;
5136 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5137 ref,
5138 obj,
5139 offset,
5140 no_index,
5141 no_scale_factor,
5142 temp,
5143 needs_null_check,
5144 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005145}
5146
5147void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5148 Location ref,
5149 vixl::Register obj,
5150 uint32_t data_offset,
5151 Location index,
5152 Register temp,
5153 bool needs_null_check) {
5154 DCHECK(kEmitCompilerReadBarrier);
5155 DCHECK(kUseBakerReadBarrier);
5156
5157 // Array cells are never volatile variables, therefore array loads
5158 // never use Load-Acquire instructions on ARM64.
5159 const bool use_load_acquire = false;
5160
Roland Levillainbfea3352016-06-23 13:48:47 +01005161 static_assert(
5162 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5163 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005164 // /* HeapReference<Object> */ ref =
5165 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005166 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5167 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5168 ref,
5169 obj,
5170 data_offset,
5171 index,
5172 scale_factor,
5173 temp,
5174 needs_null_check,
5175 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005176}
5177
5178void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5179 Location ref,
5180 vixl::Register obj,
5181 uint32_t offset,
5182 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005183 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005184 Register temp,
5185 bool needs_null_check,
5186 bool use_load_acquire) {
5187 DCHECK(kEmitCompilerReadBarrier);
5188 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005189 // If we are emitting an array load, we should not be using a
5190 // Load Acquire instruction. In other words:
5191 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5192 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005193
5194 MacroAssembler* masm = GetVIXLAssembler();
5195 UseScratchRegisterScope temps(masm);
5196
5197 // In slow path based read barriers, the read barrier call is
5198 // inserted after the original load. However, in fast path based
5199 // Baker's read barriers, we need to perform the load of
5200 // mirror::Object::monitor_ *before* the original reference load.
5201 // This load-load ordering is required by the read barrier.
5202 // The fast path/slow path (for Baker's algorithm) should look like:
5203 //
5204 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5205 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5206 // HeapReference<Object> ref = *src; // Original reference load.
5207 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
5208 // if (is_gray) {
5209 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5210 // }
5211 //
5212 // Note: the original implementation in ReadBarrier::Barrier is
5213 // slightly more complex as it performs additional checks that we do
5214 // not do here for performance reasons.
5215
5216 Primitive::Type type = Primitive::kPrimNot;
5217 Register ref_reg = RegisterFrom(ref, type);
5218 DCHECK(obj.IsW());
5219 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5220
5221 // /* int32_t */ monitor = obj->monitor_
5222 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5223 if (needs_null_check) {
5224 MaybeRecordImplicitNullCheck(instruction);
5225 }
5226 // /* LockWord */ lock_word = LockWord(monitor)
5227 static_assert(sizeof(LockWord) == sizeof(int32_t),
5228 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005229
Vladimir Marko877a0332016-07-11 19:30:56 +01005230 // Introduce a dependency on the lock_word including rb_state,
5231 // to prevent load-load reordering, and without using
Roland Levillain44015862016-01-22 11:47:17 +00005232 // a memory barrier (which would be more expensive).
Vladimir Marko877a0332016-07-11 19:30:56 +01005233 // obj is unchanged by this operation, but its value now depends on temp.
5234 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
Roland Levillain44015862016-01-22 11:47:17 +00005235
5236 // The actual reference load.
5237 if (index.IsValid()) {
Roland Levillainbfea3352016-06-23 13:48:47 +01005238 // Load types involving an "index".
5239 if (use_load_acquire) {
5240 // UnsafeGetObjectVolatile intrinsic case.
5241 // Register `index` is not an index in an object array, but an
5242 // offset to an object reference field within object `obj`.
5243 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5244 DCHECK(instruction->GetLocations()->Intrinsified());
5245 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5246 << instruction->AsInvoke()->GetIntrinsic();
5247 DCHECK_EQ(offset, 0U);
5248 DCHECK_EQ(scale_factor, 0U);
5249 DCHECK_EQ(needs_null_check, 0U);
5250 // /* HeapReference<Object> */ ref = *(obj + index)
5251 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5252 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005253 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01005254 // ArrayGet and UnsafeGetObject intrinsics cases.
5255 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5256 if (index.IsConstant()) {
5257 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5258 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5259 } else {
Vladimir Marko877a0332016-07-11 19:30:56 +01005260 Register temp2 = temps.AcquireW();
Roland Levillainbfea3352016-06-23 13:48:47 +01005261 __ Add(temp2, obj, offset);
5262 Load(type, ref_reg, HeapOperand(temp2, XRegisterFrom(index), LSL, scale_factor));
5263 temps.Release(temp2);
5264 }
Roland Levillain44015862016-01-22 11:47:17 +00005265 }
Roland Levillain44015862016-01-22 11:47:17 +00005266 } else {
5267 // /* HeapReference<Object> */ ref = *(obj + offset)
5268 MemOperand field = HeapOperand(obj, offset);
5269 if (use_load_acquire) {
5270 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5271 } else {
5272 Load(type, ref_reg, field);
5273 }
5274 }
5275
5276 // Object* ref = ref_addr->AsMirrorPtr()
5277 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5278
5279 // Slow path used to mark the object `ref` when it is gray.
5280 SlowPathCodeARM64* slow_path =
Roland Levillain02b75802016-07-13 11:54:35 +01005281 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref);
Roland Levillain44015862016-01-22 11:47:17 +00005282 AddSlowPath(slow_path);
5283
5284 // if (rb_state == ReadBarrier::gray_ptr_)
5285 // ref = ReadBarrier::Mark(ref);
Vladimir Marko877a0332016-07-11 19:30:56 +01005286 // Given the numeric representation, it's enough to check the low bit of the rb_state.
5287 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
5288 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
5289 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
5290 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005291 __ Bind(slow_path->GetExitLabel());
5292}
5293
5294void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5295 Location out,
5296 Location ref,
5297 Location obj,
5298 uint32_t offset,
5299 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005300 DCHECK(kEmitCompilerReadBarrier);
5301
Roland Levillain44015862016-01-22 11:47:17 +00005302 // Insert a slow path based read barrier *after* the reference load.
5303 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005304 // If heap poisoning is enabled, the unpoisoning of the loaded
5305 // reference will be carried out by the runtime within the slow
5306 // path.
5307 //
5308 // Note that `ref` currently does not get unpoisoned (when heap
5309 // poisoning is enabled), which is alright as the `ref` argument is
5310 // not used by the artReadBarrierSlow entry point.
5311 //
5312 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5313 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5314 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5315 AddSlowPath(slow_path);
5316
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005317 __ B(slow_path->GetEntryLabel());
5318 __ Bind(slow_path->GetExitLabel());
5319}
5320
Roland Levillain44015862016-01-22 11:47:17 +00005321void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5322 Location out,
5323 Location ref,
5324 Location obj,
5325 uint32_t offset,
5326 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005327 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005328 // Baker's read barriers shall be handled by the fast path
5329 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5330 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005331 // If heap poisoning is enabled, unpoisoning will be taken care of
5332 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005333 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005334 } else if (kPoisonHeapReferences) {
5335 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5336 }
5337}
5338
Roland Levillain44015862016-01-22 11:47:17 +00005339void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5340 Location out,
5341 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005342 DCHECK(kEmitCompilerReadBarrier);
5343
Roland Levillain44015862016-01-22 11:47:17 +00005344 // Insert a slow path based read barrier *after* the GC root load.
5345 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005346 // Note that GC roots are not affected by heap poisoning, so we do
5347 // not need to do anything special for this here.
5348 SlowPathCodeARM64* slow_path =
5349 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5350 AddSlowPath(slow_path);
5351
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005352 __ B(slow_path->GetEntryLabel());
5353 __ Bind(slow_path->GetExitLabel());
5354}
5355
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005356void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5357 LocationSummary* locations =
5358 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5359 locations->SetInAt(0, Location::RequiresRegister());
5360 locations->SetOut(Location::RequiresRegister());
5361}
5362
5363void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5364 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005365 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005366 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005367 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005368 __ Ldr(XRegisterFrom(locations->Out()),
5369 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005370 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005371 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
5372 instruction->GetIndex() % ImTable::kSize, kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005373 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5374 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005375 __ Ldr(XRegisterFrom(locations->Out()),
5376 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005377 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005378}
5379
5380
5381
Alexandre Rames67555f72014-11-18 10:55:16 +00005382#undef __
5383#undef QUICK_ENTRY_POINT
5384
Alexandre Rames5319def2014-10-23 10:03:10 +01005385} // namespace arm64
5386} // namespace art