blob: ec72af951e0f46d6c35f01420138b72f38147fa9 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Artem Serov914d7a82017-02-07 14:33:49 +000037using vixl::ExactAssemblyScope;
38using vixl::CodeBufferCheckScope;
39using vixl::EmissionCheckScope;
Alexandre Rames5319def2014-10-23 10:03:10 +010040
41#ifdef __
42#error "ARM64 Codegen VIXL macro-assembler macro already defined."
43#endif
44
Alexandre Rames5319def2014-10-23 10:03:10 +010045namespace art {
46
Roland Levillain22ccc3a2015-11-24 13:10:05 +000047template<class MirrorType>
48class GcRoot;
49
Alexandre Rames5319def2014-10-23 10:03:10 +010050namespace arm64 {
51
Alexandre Ramesbe919d92016-08-23 18:33:36 +010052using helpers::ARM64EncodableConstantOrRegister;
53using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080054using helpers::CPURegisterFrom;
55using helpers::DRegisterFrom;
56using helpers::FPRegisterFrom;
57using helpers::HeapOperand;
58using helpers::HeapOperandFrom;
59using helpers::InputCPURegisterAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080061using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080062using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010063using helpers::InputRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080064using helpers::Int64ConstantFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010065using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080066using helpers::LocationFrom;
67using helpers::OperandFromMemOperand;
68using helpers::OutputCPURegister;
69using helpers::OutputFPRegister;
70using helpers::OutputRegister;
71using helpers::RegisterFrom;
72using helpers::StackOperandFrom;
73using helpers::VIXLRegCodeFromART;
74using helpers::WRegisterFrom;
75using helpers::XRegisterFrom;
76
Alexandre Rames5319def2014-10-23 10:03:10 +010077static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000078// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080079// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
80// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000081static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010082
Alexandre Rames5319def2014-10-23 10:03:10 +010083inline Condition ARM64Condition(IfCondition cond) {
84 switch (cond) {
85 case kCondEQ: return eq;
86 case kCondNE: return ne;
87 case kCondLT: return lt;
88 case kCondLE: return le;
89 case kCondGT: return gt;
90 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070091 case kCondB: return lo;
92 case kCondBE: return ls;
93 case kCondA: return hi;
94 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010095 }
Roland Levillain7f63c522015-07-13 15:54:55 +000096 LOG(FATAL) << "Unreachable";
97 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010098}
99
Vladimir Markod6e069b2016-01-18 11:11:01 +0000100inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
101 // The ARM64 condition codes can express all the necessary branches, see the
102 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
103 // There is no dex instruction or HIR that would need the missing conditions
104 // "equal or unordered" or "not equal".
105 switch (cond) {
106 case kCondEQ: return eq;
107 case kCondNE: return ne /* unordered */;
108 case kCondLT: return gt_bias ? cc : lt /* unordered */;
109 case kCondLE: return gt_bias ? ls : le /* unordered */;
110 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
111 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
112 default:
113 LOG(FATAL) << "UNREACHABLE";
114 UNREACHABLE();
115 }
116}
117
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000118Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000119 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
120 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
121 // but we use the exact registers for clarity.
122 if (return_type == Primitive::kPrimFloat) {
123 return LocationFrom(s0);
124 } else if (return_type == Primitive::kPrimDouble) {
125 return LocationFrom(d0);
126 } else if (return_type == Primitive::kPrimLong) {
127 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100128 } else if (return_type == Primitive::kPrimVoid) {
129 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000130 } else {
131 return LocationFrom(w0);
132 }
133}
134
Alexandre Rames5319def2014-10-23 10:03:10 +0100135Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000136 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100137}
138
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100139// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
140#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700141#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100142
Zheng Xuda403092015-04-24 17:35:39 +0800143// Calculate memory accessing operand for save/restore live registers.
144static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100145 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800146 int64_t spill_offset,
147 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100148 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
149 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
150 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800151 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100152 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800153 codegen->GetNumberOfFloatingPointRegisters()));
154
Vladimir Marko804b03f2016-09-14 16:26:36 +0100155 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
156 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800157
158 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
159 UseScratchRegisterScope temps(masm);
160
161 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100162 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
163 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800164 int64_t reg_size = kXRegSizeInBytes;
165 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
166 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100167 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800168 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
169 // If the offset does not fit in the instruction's immediate field, use an alternate register
170 // to compute the base address(float point registers spill base address).
171 Register new_base = temps.AcquireSameSizeAs(base);
172 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
173 base = new_base;
174 spill_offset = -core_spill_size;
175 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
176 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
177 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
178 }
179
180 if (is_save) {
181 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
182 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
183 } else {
184 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
185 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
186 }
187}
188
189void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800190 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100191 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
192 for (uint32_t i : LowToHighBits(core_spills)) {
193 // If the register holds an object, update the stack mask.
194 if (locations->RegisterContainsObject(i)) {
195 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800196 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100197 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
198 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
199 saved_core_stack_offsets_[i] = stack_offset;
200 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800201 }
202
Vladimir Marko804b03f2016-09-14 16:26:36 +0100203 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
204 for (uint32_t i : LowToHighBits(fp_spills)) {
205 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
206 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
207 saved_fpu_stack_offsets_[i] = stack_offset;
208 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800209 }
210
Vladimir Marko804b03f2016-09-14 16:26:36 +0100211 SaveRestoreLiveRegistersHelper(codegen,
212 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800213 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
214}
215
216void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100217 SaveRestoreLiveRegistersHelper(codegen,
218 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800219 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
220}
221
Alexandre Rames5319def2014-10-23 10:03:10 +0100222class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
223 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000224 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100225
Alexandre Rames67555f72014-11-18 10:55:16 +0000226 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000228 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100229
Alexandre Rames5319def2014-10-23 10:03:10 +0100230 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000231 if (instruction_->CanThrowIntoCatchBlock()) {
232 // Live registers will be restored in the catch block if caught.
233 SaveLiveRegisters(codegen, instruction_->GetLocations());
234 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000235 // We're moving two locations to locations that could overlap, so we need a parallel
236 // move resolver.
237 InvokeRuntimeCallingConvention calling_convention;
238 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100239 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
240 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000241 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
242 ? kQuickThrowStringBounds
243 : kQuickThrowArrayBounds;
244 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100245 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800246 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100247 }
248
Alexandre Rames8158f282015-08-07 10:26:17 +0100249 bool IsFatal() const OVERRIDE { return true; }
250
Alexandre Rames9931f312015-06-19 14:47:01 +0100251 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
252
Alexandre Rames5319def2014-10-23 10:03:10 +0100253 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100254 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
255};
256
Alexandre Rames67555f72014-11-18 10:55:16 +0000257class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
258 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000259 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000260
261 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
262 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
263 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000264 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800265 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000266 }
267
Alexandre Rames8158f282015-08-07 10:26:17 +0100268 bool IsFatal() const OVERRIDE { return true; }
269
Alexandre Rames9931f312015-06-19 14:47:01 +0100270 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
271
Alexandre Rames67555f72014-11-18 10:55:16 +0000272 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000273 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
274};
275
276class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
277 public:
278 LoadClassSlowPathARM64(HLoadClass* cls,
279 HInstruction* at,
280 uint32_t dex_pc,
281 bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000282 : SlowPathCodeARM64(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000283 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
284 }
285
286 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000287 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +0000288 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
289
290 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000291 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000292
293 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000294 dex::TypeIndex type_index = cls_->GetTypeIndex();
295 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000296 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
297 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000298 arm64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800299 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100300 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800301 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100302 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800303 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000304
305 // Move the class to the desired location.
306 Location out = locations->Out();
307 if (out.IsValid()) {
308 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000309 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000310 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000311 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000312 RestoreLiveRegisters(codegen, locations);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000313 // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
314 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
315 if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
316 DCHECK(out.IsValid());
317 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
318 Register temp = temps.AcquireX();
319 const DexFile& dex_file = cls_->GetDexFile();
320 // TODO: Change art_quick_initialize_type/art_quick_initialize_static_storage to
321 // kSaveEverything and use a temporary for the ADRP in the fast path, so that we
322 // can avoid the ADRP here.
323 vixl::aarch64::Label* adrp_label =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000324 arm64_codegen->NewBssEntryTypePatch(dex_file, type_index);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000325 arm64_codegen->EmitAdrpPlaceholder(adrp_label, temp);
326 vixl::aarch64::Label* strp_label =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000327 arm64_codegen->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000328 {
329 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
330 __ Bind(strp_label);
331 __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
332 MemOperand(temp, /* offset placeholder */ 0));
333 }
334 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000335 __ B(GetExitLabel());
336 }
337
Alexandre Rames9931f312015-06-19 14:47:01 +0100338 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
339
Alexandre Rames67555f72014-11-18 10:55:16 +0000340 private:
341 // The class this slow path will load.
342 HLoadClass* const cls_;
343
Alexandre Rames67555f72014-11-18 10:55:16 +0000344 // The dex PC of `at_`.
345 const uint32_t dex_pc_;
346
347 // Whether to initialize the class.
348 const bool do_clinit_;
349
350 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
351};
352
Vladimir Markoaad75c62016-10-03 08:46:48 +0000353class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
354 public:
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100355 LoadStringSlowPathARM64(HLoadString* instruction, Register temp, vixl::aarch64::Label* adrp_label)
356 : SlowPathCodeARM64(instruction),
357 temp_(temp),
358 adrp_label_(adrp_label) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000359
360 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
361 LocationSummary* locations = instruction_->GetLocations();
362 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
363 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
364
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100365 // temp_ is a scratch register. Make sure it's not used for saving/restoring registers.
366 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
367 temps.Exclude(temp_);
368
Vladimir Markoaad75c62016-10-03 08:46:48 +0000369 __ Bind(GetEntryLabel());
370 SaveLiveRegisters(codegen, locations);
371
372 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000373 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
374 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000375 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
376 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
377 Primitive::Type type = instruction_->GetType();
378 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
379
380 RestoreLiveRegisters(codegen, locations);
381
382 // Store the resolved String to the BSS entry.
Vladimir Markoaad75c62016-10-03 08:46:48 +0000383 const DexFile& dex_file = instruction_->AsLoadString()->GetDexFile();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100384 if (!kUseReadBarrier || kUseBakerReadBarrier) {
385 // The string entry page address was preserved in temp_ thanks to kSaveEverything.
386 } else {
387 // For non-Baker read barrier, we need to re-calculate the address of the string entry page.
388 adrp_label_ = arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index);
389 arm64_codegen->EmitAdrpPlaceholder(adrp_label_, temp_);
390 }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000391 vixl::aarch64::Label* strp_label =
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100392 arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index, adrp_label_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000393 {
394 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
395 __ Bind(strp_label);
396 __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100397 MemOperand(temp_, /* offset placeholder */ 0));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000398 }
399
400 __ B(GetExitLabel());
401 }
402
403 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
404
405 private:
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100406 const Register temp_;
407 vixl::aarch64::Label* adrp_label_;
408
Vladimir Markoaad75c62016-10-03 08:46:48 +0000409 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
410};
411
Alexandre Rames5319def2014-10-23 10:03:10 +0100412class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
413 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000414 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100415
Alexandre Rames67555f72014-11-18 10:55:16 +0000416 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
417 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100418 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000419 if (instruction_->CanThrowIntoCatchBlock()) {
420 // Live registers will be restored in the catch block if caught.
421 SaveLiveRegisters(codegen, instruction_->GetLocations());
422 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000423 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
424 instruction_,
425 instruction_->GetDexPc(),
426 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800427 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100428 }
429
Alexandre Rames8158f282015-08-07 10:26:17 +0100430 bool IsFatal() const OVERRIDE { return true; }
431
Alexandre Rames9931f312015-06-19 14:47:01 +0100432 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
433
Alexandre Rames5319def2014-10-23 10:03:10 +0100434 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100435 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
436};
437
438class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
439 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100440 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000441 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100442
Alexandre Rames67555f72014-11-18 10:55:16 +0000443 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
444 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100445 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000446 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800447 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000448 if (successor_ == nullptr) {
449 __ B(GetReturnLabel());
450 } else {
451 __ B(arm64_codegen->GetLabelOf(successor_));
452 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100453 }
454
Scott Wakeling97c72b72016-06-24 16:19:36 +0100455 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100456 DCHECK(successor_ == nullptr);
457 return &return_label_;
458 }
459
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100460 HBasicBlock* GetSuccessor() const {
461 return successor_;
462 }
463
Alexandre Rames9931f312015-06-19 14:47:01 +0100464 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
465
Alexandre Rames5319def2014-10-23 10:03:10 +0100466 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100467 // If not null, the block to branch to after the suspend check.
468 HBasicBlock* const successor_;
469
470 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100471 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100472
473 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
474};
475
Alexandre Rames67555f72014-11-18 10:55:16 +0000476class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
477 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000478 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000479 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000480
481 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000482 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800483
Alexandre Rames3e69f162014-12-10 10:36:50 +0000484 DCHECK(instruction_->IsCheckCast()
485 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
486 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100487 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000488
Alexandre Rames67555f72014-11-18 10:55:16 +0000489 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000490
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000491 if (!is_fatal_) {
492 SaveLiveRegisters(codegen, locations);
493 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000494
495 // We're moving two locations to locations that could overlap, so we need a parallel
496 // move resolver.
497 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800498 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800499 LocationFrom(calling_convention.GetRegisterAt(0)),
500 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800501 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800502 LocationFrom(calling_convention.GetRegisterAt(1)),
503 Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000504 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000505 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800506 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000507 Primitive::Type ret_type = instruction_->GetType();
508 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
509 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
510 } else {
511 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800512 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
513 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000514 }
515
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000516 if (!is_fatal_) {
517 RestoreLiveRegisters(codegen, locations);
518 __ B(GetExitLabel());
519 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000520 }
521
Alexandre Rames9931f312015-06-19 14:47:01 +0100522 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100523 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100524
Alexandre Rames67555f72014-11-18 10:55:16 +0000525 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000526 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000527
Alexandre Rames67555f72014-11-18 10:55:16 +0000528 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
529};
530
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700531class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
532 public:
Aart Bik42249c32016-01-07 15:33:50 -0800533 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000534 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700535
536 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800537 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700538 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000539 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000540 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700541 }
542
Alexandre Rames9931f312015-06-19 14:47:01 +0100543 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
544
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700545 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700546 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
547};
548
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100549class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
550 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000551 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100552
553 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
554 LocationSummary* locations = instruction_->GetLocations();
555 __ Bind(GetEntryLabel());
556 SaveLiveRegisters(codegen, locations);
557
558 InvokeRuntimeCallingConvention calling_convention;
559 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
560 parallel_move.AddMove(
561 locations->InAt(0),
562 LocationFrom(calling_convention.GetRegisterAt(0)),
563 Primitive::kPrimNot,
564 nullptr);
565 parallel_move.AddMove(
566 locations->InAt(1),
567 LocationFrom(calling_convention.GetRegisterAt(1)),
568 Primitive::kPrimInt,
569 nullptr);
570 parallel_move.AddMove(
571 locations->InAt(2),
572 LocationFrom(calling_convention.GetRegisterAt(2)),
573 Primitive::kPrimNot,
574 nullptr);
575 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
576
577 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000578 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100579 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
580 RestoreLiveRegisters(codegen, locations);
581 __ B(GetExitLabel());
582 }
583
584 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
585
586 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100587 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
588};
589
Zheng Xu3927c8b2015-11-18 17:46:25 +0800590void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
591 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000592 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800593
594 // We are about to use the assembler to place literals directly. Make sure we have enough
595 // underlying code buffer and we have generated the jump table with right size.
Artem Serov914d7a82017-02-07 14:33:49 +0000596 EmissionCheckScope scope(codegen->GetVIXLAssembler(),
597 num_entries * sizeof(int32_t),
598 CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800599
600 __ Bind(&table_start_);
601 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
602 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100603 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800604 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100605 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800606 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
607 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
608 Literal<int32_t> literal(jump_offset);
609 __ place(&literal);
610 }
611}
612
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100613// Slow path marking an object reference `ref` during a read
614// barrier. The field `obj.field` in the object `obj` holding this
615// reference does not get updated by this slow path after marking (see
616// ReadBarrierMarkAndUpdateFieldSlowPathARM64 below for that).
617//
618// This means that after the execution of this slow path, `ref` will
619// always be up-to-date, but `obj.field` may not; i.e., after the
620// flip, `ref` will be a to-space reference, but `obj.field` will
621// probably still be a from-space reference (unless it gets updated by
622// another thread, or if another thread installed another object
623// reference (different from `ref`) in `obj.field`).
Mathieu Chartierfe814e82016-11-09 14:32:49 -0800624// If entrypoint is a valid location it is assumed to already be holding the entrypoint. The case
625// where the entrypoint is passed in is for the GcRoot read barrier.
Roland Levillain44015862016-01-22 11:47:17 +0000626class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
627 public:
Mathieu Chartierfe814e82016-11-09 14:32:49 -0800628 ReadBarrierMarkSlowPathARM64(HInstruction* instruction,
629 Location ref,
630 Location entrypoint = Location::NoLocation())
631 : SlowPathCodeARM64(instruction),
632 ref_(ref),
633 entrypoint_(entrypoint) {
Roland Levillain44015862016-01-22 11:47:17 +0000634 DCHECK(kEmitCompilerReadBarrier);
635 }
636
637 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
638
639 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
640 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain44015862016-01-22 11:47:17 +0000641 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100642 DCHECK(ref_.IsRegister()) << ref_;
643 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain44015862016-01-22 11:47:17 +0000644 DCHECK(instruction_->IsInstanceFieldGet() ||
645 instruction_->IsStaticFieldGet() ||
646 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100647 instruction_->IsArraySet() ||
Roland Levillain44015862016-01-22 11:47:17 +0000648 instruction_->IsLoadClass() ||
649 instruction_->IsLoadString() ||
650 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100651 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100652 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
653 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000654 << "Unexpected instruction in read barrier marking slow path: "
655 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +0000656 // The read barrier instrumentation of object ArrayGet
657 // instructions does not support the HIntermediateAddress
658 // instruction.
659 DCHECK(!(instruction_->IsArrayGet() &&
660 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain44015862016-01-22 11:47:17 +0000661
662 __ Bind(GetEntryLabel());
Roland Levillain4359e612016-07-20 11:32:19 +0100663 // No need to save live registers; it's taken care of by the
664 // entrypoint. Also, there is no need to update the stack mask,
665 // as this runtime call will not trigger a garbage collection.
Roland Levillain44015862016-01-22 11:47:17 +0000666 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100667 DCHECK_NE(ref_.reg(), LR);
668 DCHECK_NE(ref_.reg(), WSP);
669 DCHECK_NE(ref_.reg(), WZR);
Roland Levillain0b671c02016-08-19 12:02:34 +0100670 // IP0 is used internally by the ReadBarrierMarkRegX entry point
671 // as a temporary, it cannot be the entry point's input/output.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100672 DCHECK_NE(ref_.reg(), IP0);
673 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
Roland Levillain02b75802016-07-13 11:54:35 +0100674 // "Compact" slow path, saving two moves.
675 //
676 // Instead of using the standard runtime calling convention (input
677 // and output in W0):
678 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100679 // W0 <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100680 // W0 <- ReadBarrierMark(W0)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100681 // ref <- W0
Roland Levillain02b75802016-07-13 11:54:35 +0100682 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100683 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100684 // of a dedicated entrypoint:
685 //
686 // rX <- ReadBarrierMarkRegX(rX)
687 //
Mathieu Chartierfe814e82016-11-09 14:32:49 -0800688 if (entrypoint_.IsValid()) {
689 arm64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
690 __ Blr(XRegisterFrom(entrypoint_));
691 } else {
692 // Entrypoint is not already loaded, load from the thread.
693 int32_t entry_point_offset =
694 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
695 // This runtime call does not require a stack map.
696 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
697 }
Roland Levillain44015862016-01-22 11:47:17 +0000698 __ B(GetExitLabel());
699 }
700
701 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100702 // The location (register) of the marked object reference.
703 const Location ref_;
Roland Levillain44015862016-01-22 11:47:17 +0000704
Mathieu Chartierfe814e82016-11-09 14:32:49 -0800705 // The location of the entrypoint if it is already loaded.
706 const Location entrypoint_;
707
Roland Levillain44015862016-01-22 11:47:17 +0000708 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
709};
710
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100711// Slow path marking an object reference `ref` during a read barrier,
712// and if needed, atomically updating the field `obj.field` in the
713// object `obj` holding this reference after marking (contrary to
714// ReadBarrierMarkSlowPathARM64 above, which never tries to update
715// `obj.field`).
716//
717// This means that after the execution of this slow path, both `ref`
718// and `obj.field` will be up-to-date; i.e., after the flip, both will
719// hold the same to-space reference (unless another thread installed
720// another object reference (different from `ref`) in `obj.field`).
721class ReadBarrierMarkAndUpdateFieldSlowPathARM64 : public SlowPathCodeARM64 {
722 public:
723 ReadBarrierMarkAndUpdateFieldSlowPathARM64(HInstruction* instruction,
724 Location ref,
725 Register obj,
726 Location field_offset,
727 Register temp)
728 : SlowPathCodeARM64(instruction),
729 ref_(ref),
730 obj_(obj),
731 field_offset_(field_offset),
732 temp_(temp) {
733 DCHECK(kEmitCompilerReadBarrier);
734 }
735
736 const char* GetDescription() const OVERRIDE {
737 return "ReadBarrierMarkAndUpdateFieldSlowPathARM64";
738 }
739
740 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
741 LocationSummary* locations = instruction_->GetLocations();
742 Register ref_reg = WRegisterFrom(ref_);
743 DCHECK(locations->CanCall());
744 DCHECK(ref_.IsRegister()) << ref_;
745 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
746 // This slow path is only used by the UnsafeCASObject intrinsic.
747 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
748 << "Unexpected instruction in read barrier marking and field updating slow path: "
749 << instruction_->DebugName();
750 DCHECK(instruction_->GetLocations()->Intrinsified());
751 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
752 DCHECK(field_offset_.IsRegister()) << field_offset_;
753
754 __ Bind(GetEntryLabel());
755
756 // Save the old reference.
757 // Note that we cannot use IP to save the old reference, as IP is
758 // used internally by the ReadBarrierMarkRegX entry point, and we
759 // need the old reference after the call to that entry point.
760 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
761 __ Mov(temp_.W(), ref_reg);
762
763 // No need to save live registers; it's taken care of by the
764 // entrypoint. Also, there is no need to update the stack mask,
765 // as this runtime call will not trigger a garbage collection.
766 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
767 DCHECK_NE(ref_.reg(), LR);
768 DCHECK_NE(ref_.reg(), WSP);
769 DCHECK_NE(ref_.reg(), WZR);
770 // IP0 is used internally by the ReadBarrierMarkRegX entry point
771 // as a temporary, it cannot be the entry point's input/output.
772 DCHECK_NE(ref_.reg(), IP0);
773 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
774 // "Compact" slow path, saving two moves.
775 //
776 // Instead of using the standard runtime calling convention (input
777 // and output in W0):
778 //
779 // W0 <- ref
780 // W0 <- ReadBarrierMark(W0)
781 // ref <- W0
782 //
783 // we just use rX (the register containing `ref`) as input and output
784 // of a dedicated entrypoint:
785 //
786 // rX <- ReadBarrierMarkRegX(rX)
787 //
788 int32_t entry_point_offset =
789 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
790 // This runtime call does not require a stack map.
791 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
792
793 // If the new reference is different from the old reference,
794 // update the field in the holder (`*(obj_ + field_offset_)`).
795 //
796 // Note that this field could also hold a different object, if
797 // another thread had concurrently changed it. In that case, the
798 // LDXR/CMP/BNE sequence of instructions in the compare-and-set
799 // (CAS) operation below would abort the CAS, leaving the field
800 // as-is.
801 vixl::aarch64::Label done;
802 __ Cmp(temp_.W(), ref_reg);
803 __ B(eq, &done);
804
805 // Update the the holder's field atomically. This may fail if
806 // mutator updates before us, but it's OK. This is achieved
807 // using a strong compare-and-set (CAS) operation with relaxed
808 // memory synchronization ordering, where the expected value is
809 // the old reference and the desired value is the new reference.
810
811 MacroAssembler* masm = arm64_codegen->GetVIXLAssembler();
812 UseScratchRegisterScope temps(masm);
813
814 // Convenience aliases.
815 Register base = obj_.W();
816 Register offset = XRegisterFrom(field_offset_);
817 Register expected = temp_.W();
818 Register value = ref_reg;
819 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
820 Register tmp_value = temps.AcquireW(); // Value in memory.
821
822 __ Add(tmp_ptr, base.X(), Operand(offset));
823
824 if (kPoisonHeapReferences) {
825 arm64_codegen->GetAssembler()->PoisonHeapReference(expected);
826 if (value.Is(expected)) {
827 // Do not poison `value`, as it is the same register as
828 // `expected`, which has just been poisoned.
829 } else {
830 arm64_codegen->GetAssembler()->PoisonHeapReference(value);
831 }
832 }
833
834 // do {
835 // tmp_value = [tmp_ptr] - expected;
836 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
837
Roland Levillain24a4d112016-10-26 13:10:46 +0100838 vixl::aarch64::Label loop_head, comparison_failed, exit_loop;
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100839 __ Bind(&loop_head);
840 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
841 __ Cmp(tmp_value, expected);
Roland Levillain24a4d112016-10-26 13:10:46 +0100842 __ B(&comparison_failed, ne);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100843 __ Stxr(tmp_value, value, MemOperand(tmp_ptr));
844 __ Cbnz(tmp_value, &loop_head);
Roland Levillain24a4d112016-10-26 13:10:46 +0100845 __ B(&exit_loop);
846 __ Bind(&comparison_failed);
847 __ Clrex();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100848 __ Bind(&exit_loop);
849
850 if (kPoisonHeapReferences) {
851 arm64_codegen->GetAssembler()->UnpoisonHeapReference(expected);
852 if (value.Is(expected)) {
853 // Do not unpoison `value`, as it is the same register as
854 // `expected`, which has just been unpoisoned.
855 } else {
856 arm64_codegen->GetAssembler()->UnpoisonHeapReference(value);
857 }
858 }
859
860 __ Bind(&done);
861 __ B(GetExitLabel());
862 }
863
864 private:
865 // The location (register) of the marked object reference.
866 const Location ref_;
867 // The register containing the object holding the marked object reference field.
868 const Register obj_;
869 // The location of the offset of the marked reference field within `obj_`.
870 Location field_offset_;
871
872 const Register temp_;
873
874 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathARM64);
875};
876
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000877// Slow path generating a read barrier for a heap reference.
878class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
879 public:
880 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
881 Location out,
882 Location ref,
883 Location obj,
884 uint32_t offset,
885 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000886 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000887 out_(out),
888 ref_(ref),
889 obj_(obj),
890 offset_(offset),
891 index_(index) {
892 DCHECK(kEmitCompilerReadBarrier);
893 // If `obj` is equal to `out` or `ref`, it means the initial object
894 // has been overwritten by (or after) the heap object reference load
895 // to be instrumented, e.g.:
896 //
897 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000898 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000899 //
900 // In that case, we have lost the information about the original
901 // object, and the emitted read barrier cannot work properly.
902 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
903 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
904 }
905
906 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
907 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
908 LocationSummary* locations = instruction_->GetLocations();
909 Primitive::Type type = Primitive::kPrimNot;
910 DCHECK(locations->CanCall());
911 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100912 DCHECK(instruction_->IsInstanceFieldGet() ||
913 instruction_->IsStaticFieldGet() ||
914 instruction_->IsArrayGet() ||
915 instruction_->IsInstanceOf() ||
916 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100917 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain44015862016-01-22 11:47:17 +0000918 << "Unexpected instruction in read barrier for heap reference slow path: "
919 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +0000920 // The read barrier instrumentation of object ArrayGet
921 // instructions does not support the HIntermediateAddress
922 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000923 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100924 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000925
926 __ Bind(GetEntryLabel());
927
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000928 SaveLiveRegisters(codegen, locations);
929
930 // We may have to change the index's value, but as `index_` is a
931 // constant member (like other "inputs" of this slow path),
932 // introduce a copy of it, `index`.
933 Location index = index_;
934 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100935 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000936 if (instruction_->IsArrayGet()) {
937 // Compute the actual memory offset and store it in `index`.
938 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
939 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
940 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
941 // We are about to change the value of `index_reg` (see the
942 // calls to vixl::MacroAssembler::Lsl and
943 // vixl::MacroAssembler::Mov below), but it has
944 // not been saved by the previous call to
945 // art::SlowPathCode::SaveLiveRegisters, as it is a
946 // callee-save register --
947 // art::SlowPathCode::SaveLiveRegisters does not consider
948 // callee-save registers, as it has been designed with the
949 // assumption that callee-save registers are supposed to be
950 // handled by the called function. So, as a callee-save
951 // register, `index_reg` _would_ eventually be saved onto
952 // the stack, but it would be too late: we would have
953 // changed its value earlier. Therefore, we manually save
954 // it here into another freely available register,
955 // `free_reg`, chosen of course among the caller-save
956 // registers (as a callee-save `free_reg` register would
957 // exhibit the same problem).
958 //
959 // Note we could have requested a temporary register from
960 // the register allocator instead; but we prefer not to, as
961 // this is a slow path, and we know we can find a
962 // caller-save register that is available.
963 Register free_reg = FindAvailableCallerSaveRegister(codegen);
964 __ Mov(free_reg.W(), index_reg);
965 index_reg = free_reg;
966 index = LocationFrom(index_reg);
967 } else {
968 // The initial register stored in `index_` has already been
969 // saved in the call to art::SlowPathCode::SaveLiveRegisters
970 // (as it is not a callee-save register), so we can freely
971 // use it.
972 }
973 // Shifting the index value contained in `index_reg` by the scale
974 // factor (2) cannot overflow in practice, as the runtime is
975 // unable to allocate object arrays with a size larger than
976 // 2^26 - 1 (that is, 2^28 - 4 bytes).
977 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
978 static_assert(
979 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
980 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
981 __ Add(index_reg, index_reg, Operand(offset_));
982 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100983 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
984 // intrinsics, `index_` is not shifted by a scale factor of 2
985 // (as in the case of ArrayGet), as it is actually an offset
986 // to an object field within an object.
987 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000988 DCHECK(instruction_->GetLocations()->Intrinsified());
989 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
990 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
991 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100992 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +0100993 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000994 }
995 }
996
997 // We're moving two or three locations to locations that could
998 // overlap, so we need a parallel move resolver.
999 InvokeRuntimeCallingConvention calling_convention;
1000 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
1001 parallel_move.AddMove(ref_,
1002 LocationFrom(calling_convention.GetRegisterAt(0)),
1003 type,
1004 nullptr);
1005 parallel_move.AddMove(obj_,
1006 LocationFrom(calling_convention.GetRegisterAt(1)),
1007 type,
1008 nullptr);
1009 if (index.IsValid()) {
1010 parallel_move.AddMove(index,
1011 LocationFrom(calling_convention.GetRegisterAt(2)),
1012 Primitive::kPrimInt,
1013 nullptr);
1014 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1015 } else {
1016 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1017 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
1018 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001019 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001020 instruction_,
1021 instruction_->GetDexPc(),
1022 this);
1023 CheckEntrypointTypes<
1024 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
1025 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1026
1027 RestoreLiveRegisters(codegen, locations);
1028
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001029 __ B(GetExitLabel());
1030 }
1031
1032 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
1033
1034 private:
1035 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001036 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
1037 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001038 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1039 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
1040 return Register(VIXLRegCodeFromART(i), kXRegSize);
1041 }
1042 }
1043 // We shall never fail to find a free caller-save register, as
1044 // there are more than two core caller-save registers on ARM64
1045 // (meaning it is possible to find one which is different from
1046 // `ref` and `obj`).
1047 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1048 LOG(FATAL) << "Could not find a free register";
1049 UNREACHABLE();
1050 }
1051
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001052 const Location out_;
1053 const Location ref_;
1054 const Location obj_;
1055 const uint32_t offset_;
1056 // An additional location containing an index to an array.
1057 // Only used for HArrayGet and the UnsafeGetObject &
1058 // UnsafeGetObjectVolatile intrinsics.
1059 const Location index_;
1060
1061 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
1062};
1063
1064// Slow path generating a read barrier for a GC root.
1065class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
1066 public:
1067 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +00001068 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +00001069 DCHECK(kEmitCompilerReadBarrier);
1070 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001071
1072 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1073 LocationSummary* locations = instruction_->GetLocations();
1074 Primitive::Type type = Primitive::kPrimNot;
1075 DCHECK(locations->CanCall());
1076 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +00001077 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1078 << "Unexpected instruction in read barrier for GC root slow path: "
1079 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001080
1081 __ Bind(GetEntryLabel());
1082 SaveLiveRegisters(codegen, locations);
1083
1084 InvokeRuntimeCallingConvention calling_convention;
1085 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1086 // The argument of the ReadBarrierForRootSlow is not a managed
1087 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
1088 // thus we need a 64-bit move here, and we cannot use
1089 //
1090 // arm64_codegen->MoveLocation(
1091 // LocationFrom(calling_convention.GetRegisterAt(0)),
1092 // root_,
1093 // type);
1094 //
1095 // which would emit a 32-bit move, as `type` is a (32-bit wide)
1096 // reference type (`Primitive::kPrimNot`).
1097 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001098 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001099 instruction_,
1100 instruction_->GetDexPc(),
1101 this);
1102 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1103 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1104
1105 RestoreLiveRegisters(codegen, locations);
1106 __ B(GetExitLabel());
1107 }
1108
1109 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
1110
1111 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001112 const Location out_;
1113 const Location root_;
1114
1115 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
1116};
1117
Alexandre Rames5319def2014-10-23 10:03:10 +01001118#undef __
1119
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001120Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001121 Location next_location;
1122 if (type == Primitive::kPrimVoid) {
1123 LOG(FATAL) << "Unreachable type " << type;
1124 }
1125
Alexandre Rames542361f2015-01-29 16:57:31 +00001126 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001127 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
1128 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +00001129 } else if (!Primitive::IsFloatingPointType(type) &&
1130 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001131 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
1132 } else {
1133 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +00001134 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
1135 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +01001136 }
1137
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001138 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +00001139 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +01001140 return next_location;
1141}
1142
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001143Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +01001144 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001145}
1146
Serban Constantinescu579885a2015-02-22 20:51:33 +00001147CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
1148 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001149 const CompilerOptions& compiler_options,
1150 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +01001151 : CodeGenerator(graph,
1152 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001153 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +00001154 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001155 callee_saved_core_registers.GetList(),
1156 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001157 compiler_options,
1158 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001159 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +08001160 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +01001161 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +00001162 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +00001163 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001164 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +00001165 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001166 uint32_literals_(std::less<uint32_t>(),
1167 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +01001168 uint64_literals_(std::less<uint64_t>(),
1169 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001170 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1171 boot_image_string_patches_(StringReferenceValueComparator(),
1172 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1173 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001174 boot_image_type_patches_(TypeReferenceValueComparator(),
1175 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1176 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001177 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001178 boot_image_address_patches_(std::less<uint32_t>(),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001179 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1180 jit_string_patches_(StringReferenceValueComparator(),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001181 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1182 jit_class_patches_(TypeReferenceValueComparator(),
1183 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001184 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001185 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001186}
Alexandre Rames5319def2014-10-23 10:03:10 +01001187
Alexandre Rames67555f72014-11-18 10:55:16 +00001188#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +01001189
Zheng Xu3927c8b2015-11-18 17:46:25 +08001190void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001191 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001192 jump_table->EmitTable(this);
1193 }
1194}
1195
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001196void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001197 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001198 // Ensure we emit the literal pool.
1199 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +00001200
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001201 CodeGenerator::Finalize(allocator);
1202}
1203
Zheng Xuad4450e2015-04-17 18:48:56 +08001204void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1205 // Note: There are 6 kinds of moves:
1206 // 1. constant -> GPR/FPR (non-cycle)
1207 // 2. constant -> stack (non-cycle)
1208 // 3. GPR/FPR -> GPR/FPR
1209 // 4. GPR/FPR -> stack
1210 // 5. stack -> GPR/FPR
1211 // 6. stack -> stack (non-cycle)
1212 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1213 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1214 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1215 // dependency.
1216 vixl_temps_.Open(GetVIXLAssembler());
1217}
1218
1219void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1220 vixl_temps_.Close();
1221}
1222
1223Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
1224 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
1225 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
1226 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
1227 Location scratch = GetScratchLocation(kind);
1228 if (!scratch.Equals(Location::NoLocation())) {
1229 return scratch;
1230 }
1231 // Allocate from VIXL temp registers.
1232 if (kind == Location::kRegister) {
1233 scratch = LocationFrom(vixl_temps_.AcquireX());
1234 } else {
1235 DCHECK(kind == Location::kFpuRegister);
1236 scratch = LocationFrom(vixl_temps_.AcquireD());
1237 }
1238 AddScratchLocation(scratch);
1239 return scratch;
1240}
1241
1242void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1243 if (loc.IsRegister()) {
1244 vixl_temps_.Release(XRegisterFrom(loc));
1245 } else {
1246 DCHECK(loc.IsFpuRegister());
1247 vixl_temps_.Release(DRegisterFrom(loc));
1248 }
1249 RemoveScratchLocation(loc);
1250}
1251
Alexandre Rames3e69f162014-12-10 10:36:50 +00001252void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001253 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001254 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001255}
1256
Alexandre Rames5319def2014-10-23 10:03:10 +01001257void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001258 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001259 __ Bind(&frame_entry_label_);
1260
Serban Constantinescu02164b32014-11-13 14:05:07 +00001261 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1262 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001263 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001264 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001265 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001266 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Artem Serov914d7a82017-02-07 14:33:49 +00001267 {
1268 // Ensure that between load and RecordPcInfo there are no pools emitted.
1269 ExactAssemblyScope eas(GetVIXLAssembler(),
1270 kInstructionSize,
1271 CodeBufferCheckScope::kExactSize);
1272 __ ldr(wzr, MemOperand(temp, 0));
1273 RecordPcInfo(nullptr, 0);
1274 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001275 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001276
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001277 if (!HasEmptyFrame()) {
1278 int frame_size = GetFrameSize();
1279 // Stack layout:
1280 // sp[frame_size - 8] : lr.
1281 // ... : other preserved core registers.
1282 // ... : other preserved fp registers.
1283 // ... : reserved frame space.
1284 // sp[0] : current method.
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001285
1286 // Save the current method if we need it. Note that we do not
1287 // do this in HCurrentMethod, as the instruction might have been removed
1288 // in the SSA graph.
1289 if (RequiresCurrentMethod()) {
1290 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001291 } else {
1292 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001293 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001294 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001295 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1296 frame_size - GetCoreSpillSize());
1297 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1298 frame_size - FrameEntrySpillSize());
Mingyao Yang063fc772016-08-02 11:02:54 -07001299
1300 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1301 // Initialize should_deoptimize flag to 0.
1302 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1303 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1304 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001305 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001306}
1307
1308void CodeGeneratorARM64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001309 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001310 if (!HasEmptyFrame()) {
1311 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001312 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1313 frame_size - FrameEntrySpillSize());
1314 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1315 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001316 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001317 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001318 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001319 __ Ret();
1320 GetAssembler()->cfi().RestoreState();
1321 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001322}
1323
Scott Wakeling97c72b72016-06-24 16:19:36 +01001324CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001325 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001326 return CPURegList(CPURegister::kRegister, kXRegSize,
1327 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001328}
1329
Scott Wakeling97c72b72016-06-24 16:19:36 +01001330CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001331 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1332 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001333 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1334 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001335}
1336
Alexandre Rames5319def2014-10-23 10:03:10 +01001337void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1338 __ Bind(GetLabelOf(block));
1339}
1340
Calin Juravle175dc732015-08-25 15:42:32 +01001341void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1342 DCHECK(location.IsRegister());
1343 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1344}
1345
Calin Juravlee460d1d2015-09-29 04:52:17 +01001346void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1347 if (location.IsRegister()) {
1348 locations->AddTemp(location);
1349 } else {
1350 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1351 }
1352}
1353
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001354void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001355 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001356 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001357 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001358 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001359 if (value_can_be_null) {
1360 __ Cbz(value, &done);
1361 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001362 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001363 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001364 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001365 if (value_can_be_null) {
1366 __ Bind(&done);
1367 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001368}
1369
David Brazdil58282f42016-01-14 12:45:10 +00001370void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001371 // Blocked core registers:
1372 // lr : Runtime reserved.
1373 // tr : Runtime reserved.
1374 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1375 // ip1 : VIXL core temp.
1376 // ip0 : VIXL core temp.
1377 //
1378 // Blocked fp registers:
1379 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001380 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1381 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001382 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001383 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001384 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001385
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001386 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001387 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001388 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001389 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001390
David Brazdil58282f42016-01-14 12:45:10 +00001391 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001392 // Stubs do not save callee-save floating point registers. If the graph
1393 // is debuggable, we need to deal with these registers differently. For
1394 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001395 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1396 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001397 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001398 }
1399 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001400}
1401
Alexandre Rames3e69f162014-12-10 10:36:50 +00001402size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1403 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1404 __ Str(reg, MemOperand(sp, stack_index));
1405 return kArm64WordSize;
1406}
1407
1408size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1409 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1410 __ Ldr(reg, MemOperand(sp, stack_index));
1411 return kArm64WordSize;
1412}
1413
1414size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1415 FPRegister reg = FPRegister(reg_id, kDRegSize);
1416 __ Str(reg, MemOperand(sp, stack_index));
1417 return kArm64WordSize;
1418}
1419
1420size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1421 FPRegister reg = FPRegister(reg_id, kDRegSize);
1422 __ Ldr(reg, MemOperand(sp, stack_index));
1423 return kArm64WordSize;
1424}
1425
Alexandre Rames5319def2014-10-23 10:03:10 +01001426void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001427 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001428}
1429
1430void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001431 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001432}
1433
Alexandre Rames67555f72014-11-18 10:55:16 +00001434void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001435 if (constant->IsIntConstant()) {
1436 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1437 } else if (constant->IsLongConstant()) {
1438 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1439 } else if (constant->IsNullConstant()) {
1440 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001441 } else if (constant->IsFloatConstant()) {
1442 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1443 } else {
1444 DCHECK(constant->IsDoubleConstant());
1445 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1446 }
1447}
1448
Alexandre Rames3e69f162014-12-10 10:36:50 +00001449
1450static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1451 DCHECK(constant.IsConstant());
1452 HConstant* cst = constant.GetConstant();
1453 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001454 // Null is mapped to a core W register, which we associate with kPrimInt.
1455 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001456 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1457 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1458 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1459}
1460
Roland Levillain558dea12017-01-27 19:40:44 +00001461// Allocate a scratch register from the VIXL pool, querying first into
1462// the floating-point register pool, and then the the core register
1463// pool. This is essentially a reimplementation of
1464// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
1465// using a different allocation strategy.
1466static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
1467 vixl::aarch64::UseScratchRegisterScope* temps,
1468 int size_in_bits) {
1469 return masm->GetScratchFPRegisterList()->IsEmpty()
1470 ? CPURegister(temps->AcquireRegisterOfSize(size_in_bits))
1471 : CPURegister(temps->AcquireVRegisterOfSize(size_in_bits));
1472}
1473
Calin Juravlee460d1d2015-09-29 04:52:17 +01001474void CodeGeneratorARM64::MoveLocation(Location destination,
1475 Location source,
1476 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001477 if (source.Equals(destination)) {
1478 return;
1479 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001480
1481 // A valid move can always be inferred from the destination and source
1482 // locations. When moving from and to a register, the argument type can be
1483 // used to generate 32bit instead of 64bit moves. In debug mode we also
1484 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001485 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001486
1487 if (destination.IsRegister() || destination.IsFpuRegister()) {
1488 if (unspecified_type) {
1489 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1490 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001491 (src_cst != nullptr && (src_cst->IsIntConstant()
1492 || src_cst->IsFloatConstant()
1493 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001494 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001495 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001496 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001497 // If the source is a double stack slot or a 64bit constant, a 64bit
1498 // type is appropriate. Else the source is a register, and since the
1499 // type has not been specified, we chose a 64bit type to force a 64bit
1500 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001501 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001502 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001503 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001504 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1505 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1506 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001507 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1508 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1509 __ Ldr(dst, StackOperandFrom(source));
1510 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001511 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001512 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001513 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001514 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001515 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001516 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001517 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001518 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1519 ? Primitive::kPrimLong
1520 : Primitive::kPrimInt;
1521 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1522 }
1523 } else {
1524 DCHECK(source.IsFpuRegister());
1525 if (destination.IsRegister()) {
1526 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1527 ? Primitive::kPrimDouble
1528 : Primitive::kPrimFloat;
1529 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1530 } else {
1531 DCHECK(destination.IsFpuRegister());
1532 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001533 }
1534 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001535 } else { // The destination is not a register. It must be a stack slot.
1536 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1537 if (source.IsRegister() || source.IsFpuRegister()) {
1538 if (unspecified_type) {
1539 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001540 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001541 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001542 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001543 }
1544 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001545 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1546 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1547 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001548 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001549 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1550 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001551 UseScratchRegisterScope temps(GetVIXLAssembler());
1552 HConstant* src_cst = source.GetConstant();
1553 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001554 if (src_cst->IsZeroBitPattern()) {
Scott Wakeling79db9972017-01-19 14:08:42 +00001555 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant())
1556 ? Register(xzr)
1557 : Register(wzr);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001558 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001559 if (src_cst->IsIntConstant()) {
1560 temp = temps.AcquireW();
1561 } else if (src_cst->IsLongConstant()) {
1562 temp = temps.AcquireX();
1563 } else if (src_cst->IsFloatConstant()) {
1564 temp = temps.AcquireS();
1565 } else {
1566 DCHECK(src_cst->IsDoubleConstant());
1567 temp = temps.AcquireD();
1568 }
1569 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001570 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001571 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001572 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001573 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001574 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001575 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001576 // Use any scratch register (a core or a floating-point one)
1577 // from VIXL scratch register pools as a temporary.
1578 //
1579 // We used to only use the FP scratch register pool, but in some
1580 // rare cases the only register from this pool (D31) would
1581 // already be used (e.g. within a ParallelMove instruction, when
1582 // a move is blocked by a another move requiring a scratch FP
1583 // register, which would reserve D31). To prevent this issue, we
1584 // ask for a scratch register of any type (core or FP).
Roland Levillain558dea12017-01-27 19:40:44 +00001585 //
1586 // Also, we start by asking for a FP scratch register first, as the
1587 // demand of scratch core registers is higher. This is why we
1588 // use AcquireFPOrCoreCPURegisterOfSize instead of
1589 // UseScratchRegisterScope::AcquireCPURegisterOfSize, which
1590 // allocates core scratch registers first.
1591 CPURegister temp = AcquireFPOrCoreCPURegisterOfSize(
1592 GetVIXLAssembler(),
1593 &temps,
1594 (destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001595 __ Ldr(temp, StackOperandFrom(source));
1596 __ Str(temp, StackOperandFrom(destination));
1597 }
1598 }
1599}
1600
1601void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001602 CPURegister dst,
1603 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001604 switch (type) {
1605 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001606 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001607 break;
1608 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001609 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001610 break;
1611 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001612 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001613 break;
1614 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001615 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001616 break;
1617 case Primitive::kPrimInt:
1618 case Primitive::kPrimNot:
1619 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001620 case Primitive::kPrimFloat:
1621 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001622 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001623 __ Ldr(dst, src);
1624 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001625 case Primitive::kPrimVoid:
1626 LOG(FATAL) << "Unreachable type " << type;
1627 }
1628}
1629
Calin Juravle77520bc2015-01-12 18:45:46 +00001630void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001631 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001632 const MemOperand& src,
1633 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001634 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001635 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001636 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001637 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001638
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001639 DCHECK(!src.IsPreIndex());
1640 DCHECK(!src.IsPostIndex());
1641
1642 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001643 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Artem Serov914d7a82017-02-07 14:33:49 +00001644 {
1645 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1646 MemOperand base = MemOperand(temp_base);
1647 switch (type) {
1648 case Primitive::kPrimBoolean:
1649 {
1650 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1651 __ ldarb(Register(dst), base);
1652 if (needs_null_check) {
1653 MaybeRecordImplicitNullCheck(instruction);
1654 }
1655 }
1656 break;
1657 case Primitive::kPrimByte:
1658 {
1659 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1660 __ ldarb(Register(dst), base);
1661 if (needs_null_check) {
1662 MaybeRecordImplicitNullCheck(instruction);
1663 }
1664 }
1665 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1666 break;
1667 case Primitive::kPrimChar:
1668 {
1669 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1670 __ ldarh(Register(dst), base);
1671 if (needs_null_check) {
1672 MaybeRecordImplicitNullCheck(instruction);
1673 }
1674 }
1675 break;
1676 case Primitive::kPrimShort:
1677 {
1678 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1679 __ ldarh(Register(dst), base);
1680 if (needs_null_check) {
1681 MaybeRecordImplicitNullCheck(instruction);
1682 }
1683 }
1684 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1685 break;
1686 case Primitive::kPrimInt:
1687 case Primitive::kPrimNot:
1688 case Primitive::kPrimLong:
1689 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
1690 {
1691 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1692 __ ldar(Register(dst), base);
1693 if (needs_null_check) {
1694 MaybeRecordImplicitNullCheck(instruction);
1695 }
1696 }
1697 break;
1698 case Primitive::kPrimFloat:
1699 case Primitive::kPrimDouble: {
1700 DCHECK(dst.IsFPRegister());
1701 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001702
Artem Serov914d7a82017-02-07 14:33:49 +00001703 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1704 {
1705 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1706 __ ldar(temp, base);
1707 if (needs_null_check) {
1708 MaybeRecordImplicitNullCheck(instruction);
1709 }
1710 }
1711 __ Fmov(FPRegister(dst), temp);
1712 break;
Roland Levillain44015862016-01-22 11:47:17 +00001713 }
Artem Serov914d7a82017-02-07 14:33:49 +00001714 case Primitive::kPrimVoid:
1715 LOG(FATAL) << "Unreachable type " << type;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001716 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001717 }
1718}
1719
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001720void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001721 CPURegister src,
1722 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001723 switch (type) {
1724 case Primitive::kPrimBoolean:
1725 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001726 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001727 break;
1728 case Primitive::kPrimChar:
1729 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001730 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001731 break;
1732 case Primitive::kPrimInt:
1733 case Primitive::kPrimNot:
1734 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001735 case Primitive::kPrimFloat:
1736 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001737 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001738 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001739 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001740 case Primitive::kPrimVoid:
1741 LOG(FATAL) << "Unreachable type " << type;
1742 }
1743}
1744
Artem Serov914d7a82017-02-07 14:33:49 +00001745void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
1746 Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001747 CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +00001748 const MemOperand& dst,
1749 bool needs_null_check) {
1750 MacroAssembler* masm = GetVIXLAssembler();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001751 UseScratchRegisterScope temps(GetVIXLAssembler());
1752 Register temp_base = temps.AcquireX();
1753
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001754 DCHECK(!dst.IsPreIndex());
1755 DCHECK(!dst.IsPostIndex());
1756
1757 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001758 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001759 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001760 MemOperand base = MemOperand(temp_base);
Artem Serov914d7a82017-02-07 14:33:49 +00001761 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001762 switch (type) {
1763 case Primitive::kPrimBoolean:
1764 case Primitive::kPrimByte:
Artem Serov914d7a82017-02-07 14:33:49 +00001765 {
1766 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1767 __ stlrb(Register(src), base);
1768 if (needs_null_check) {
1769 MaybeRecordImplicitNullCheck(instruction);
1770 }
1771 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001772 break;
1773 case Primitive::kPrimChar:
1774 case Primitive::kPrimShort:
Artem Serov914d7a82017-02-07 14:33:49 +00001775 {
1776 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1777 __ stlrh(Register(src), base);
1778 if (needs_null_check) {
1779 MaybeRecordImplicitNullCheck(instruction);
1780 }
1781 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001782 break;
1783 case Primitive::kPrimInt:
1784 case Primitive::kPrimNot:
1785 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001786 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001787 {
1788 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1789 __ stlr(Register(src), base);
1790 if (needs_null_check) {
1791 MaybeRecordImplicitNullCheck(instruction);
1792 }
1793 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001794 break;
1795 case Primitive::kPrimFloat:
1796 case Primitive::kPrimDouble: {
Alexandre Rames542361f2015-01-29 16:57:31 +00001797 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001798 Register temp_src;
1799 if (src.IsZero()) {
1800 // The zero register is used to avoid synthesizing zero constants.
1801 temp_src = Register(src);
1802 } else {
1803 DCHECK(src.IsFPRegister());
1804 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1805 __ Fmov(temp_src, FPRegister(src));
1806 }
Artem Serov914d7a82017-02-07 14:33:49 +00001807 {
1808 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1809 __ stlr(temp_src, base);
1810 if (needs_null_check) {
1811 MaybeRecordImplicitNullCheck(instruction);
1812 }
1813 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001814 break;
1815 }
1816 case Primitive::kPrimVoid:
1817 LOG(FATAL) << "Unreachable type " << type;
1818 }
1819}
1820
Calin Juravle175dc732015-08-25 15:42:32 +01001821void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1822 HInstruction* instruction,
1823 uint32_t dex_pc,
1824 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001825 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00001826
1827 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value()));
1828 {
1829 // Ensure the pc position is recorded immediately after the `blr` instruction.
1830 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
1831 __ blr(lr);
1832 if (EntrypointRequiresStackMap(entrypoint)) {
1833 RecordPcInfo(instruction, dex_pc, slow_path);
1834 }
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00001835 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001836}
1837
Roland Levillaindec8f632016-07-22 17:10:06 +01001838void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1839 HInstruction* instruction,
1840 SlowPathCode* slow_path) {
1841 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Roland Levillaindec8f632016-07-22 17:10:06 +01001842 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1843 __ Blr(lr);
1844}
1845
Alexandre Rames67555f72014-11-18 10:55:16 +00001846void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001847 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001848 UseScratchRegisterScope temps(GetVIXLAssembler());
1849 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001850 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1851
Serban Constantinescu02164b32014-11-13 14:05:07 +00001852 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001853 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1854 __ Add(temp, class_reg, status_offset);
1855 __ Ldar(temp, HeapOperand(temp));
1856 __ Cmp(temp, mirror::Class::kStatusInitialized);
1857 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001858 __ Bind(slow_path->GetExitLabel());
1859}
Alexandre Rames5319def2014-10-23 10:03:10 +01001860
Roland Levillain44015862016-01-22 11:47:17 +00001861void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001862 BarrierType type = BarrierAll;
1863
1864 switch (kind) {
1865 case MemBarrierKind::kAnyAny:
1866 case MemBarrierKind::kAnyStore: {
1867 type = BarrierAll;
1868 break;
1869 }
1870 case MemBarrierKind::kLoadAny: {
1871 type = BarrierReads;
1872 break;
1873 }
1874 case MemBarrierKind::kStoreStore: {
1875 type = BarrierWrites;
1876 break;
1877 }
1878 default:
1879 LOG(FATAL) << "Unexpected memory barrier " << kind;
1880 }
1881 __ Dmb(InnerShareable, type);
1882}
1883
Serban Constantinescu02164b32014-11-13 14:05:07 +00001884void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1885 HBasicBlock* successor) {
1886 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001887 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1888 if (slow_path == nullptr) {
1889 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1890 instruction->SetSlowPath(slow_path);
1891 codegen_->AddSlowPath(slow_path);
1892 if (successor != nullptr) {
1893 DCHECK(successor->IsLoopHeader());
1894 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1895 }
1896 } else {
1897 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1898 }
1899
Serban Constantinescu02164b32014-11-13 14:05:07 +00001900 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1901 Register temp = temps.AcquireW();
1902
Andreas Gampe542451c2016-07-26 09:02:02 -07001903 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001904 if (successor == nullptr) {
1905 __ Cbnz(temp, slow_path->GetEntryLabel());
1906 __ Bind(slow_path->GetReturnLabel());
1907 } else {
1908 __ Cbz(temp, codegen_->GetLabelOf(successor));
1909 __ B(slow_path->GetEntryLabel());
1910 // slow_path will return to GetLabelOf(successor).
1911 }
1912}
1913
Alexandre Rames5319def2014-10-23 10:03:10 +01001914InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1915 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001916 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001917 assembler_(codegen->GetAssembler()),
1918 codegen_(codegen) {}
1919
1920#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001921 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001922
1923#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1924
1925enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001926 // Using a base helps identify when we hit such breakpoints.
1927 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001928#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1929 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1930#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1931};
1932
1933#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001934 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001935 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1936 } \
1937 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1938 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1939 locations->SetOut(Location::Any()); \
1940 }
1941 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1942#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1943
1944#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001945#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001946
Alexandre Rames67555f72014-11-18 10:55:16 +00001947void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001948 DCHECK_EQ(instr->InputCount(), 2U);
1949 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1950 Primitive::Type type = instr->GetResultType();
1951 switch (type) {
1952 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001953 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001954 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001955 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001956 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001957 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001958
1959 case Primitive::kPrimFloat:
1960 case Primitive::kPrimDouble:
1961 locations->SetInAt(0, Location::RequiresFpuRegister());
1962 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001963 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001964 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001965
Alexandre Rames5319def2014-10-23 10:03:10 +01001966 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001967 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001968 }
1969}
1970
Alexandre Rames09a99962015-04-15 11:47:56 +01001971void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001972 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1973
1974 bool object_field_get_with_read_barrier =
1975 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001976 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001977 new (GetGraph()->GetArena()) LocationSummary(instruction,
1978 object_field_get_with_read_barrier ?
1979 LocationSummary::kCallOnSlowPath :
1980 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01001981 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001982 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Roland Levillaind0b51832017-01-26 19:04:23 +00001983 // We need a temporary register for the read barrier marking slow
1984 // path in CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier.
1985 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko70e97462016-08-09 11:04:26 +01001986 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001987 locations->SetInAt(0, Location::RequiresRegister());
1988 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1989 locations->SetOut(Location::RequiresFpuRegister());
1990 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001991 // The output overlaps for an object field get when read barriers
1992 // are enabled: we do not want the load to overwrite the object's
1993 // location, as we need it to emit the read barrier.
1994 locations->SetOut(
1995 Location::RequiresRegister(),
1996 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001997 }
1998}
1999
2000void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
2001 const FieldInfo& field_info) {
2002 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00002003 LocationSummary* locations = instruction->GetLocations();
2004 Location base_loc = locations->InAt(0);
2005 Location out = locations->Out();
2006 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01002007 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002008 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01002009
Roland Levillain44015862016-01-22 11:47:17 +00002010 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2011 // Object FieldGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002012 // /* HeapReference<Object> */ out = *(base + offset)
2013 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
Roland Levillaind0b51832017-01-26 19:04:23 +00002014 Register temp = WRegisterFrom(locations->GetTemp(0));
Roland Levillain44015862016-01-22 11:47:17 +00002015 // Note that potential implicit null checks are handled in this
2016 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
2017 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2018 instruction,
2019 out,
2020 base,
2021 offset,
2022 temp,
2023 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002024 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00002025 } else {
2026 // General case.
2027 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002028 // Note that a potential implicit null check is handled in this
2029 // CodeGeneratorARM64::LoadAcquire call.
2030 // NB: LoadAcquire will record the pc info if needed.
2031 codegen_->LoadAcquire(
2032 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01002033 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002034 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2035 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002036 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01002037 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01002038 }
Roland Levillain44015862016-01-22 11:47:17 +00002039 if (field_type == Primitive::kPrimNot) {
2040 // If read barriers are enabled, emit read barriers other than
2041 // Baker's using a slow path (and also unpoison the loaded
2042 // reference, if heap poisoning is enabled).
2043 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
2044 }
Roland Levillain4d027112015-07-01 15:41:14 +01002045 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002046}
2047
2048void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
2049 LocationSummary* locations =
2050 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2051 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002052 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
2053 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
2054 } else if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002055 locations->SetInAt(1, Location::RequiresFpuRegister());
2056 } else {
2057 locations->SetInAt(1, Location::RequiresRegister());
2058 }
2059}
2060
2061void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002062 const FieldInfo& field_info,
2063 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002064 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2065
2066 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002067 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01002068 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01002069 Offset offset = field_info.GetFieldOffset();
2070 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002071
Roland Levillain4d027112015-07-01 15:41:14 +01002072 {
2073 // We use a block to end the scratch scope before the write barrier, thus
2074 // freeing the temporary registers so they can be used in `MarkGCCard`.
2075 UseScratchRegisterScope temps(GetVIXLAssembler());
2076
2077 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
2078 DCHECK(value.IsW());
2079 Register temp = temps.AcquireW();
2080 __ Mov(temp, value.W());
2081 GetAssembler()->PoisonHeapReference(temp.W());
2082 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01002083 }
Roland Levillain4d027112015-07-01 15:41:14 +01002084
2085 if (field_info.IsVolatile()) {
Artem Serov914d7a82017-02-07 14:33:49 +00002086 codegen_->StoreRelease(
2087 instruction, field_type, source, HeapOperand(obj, offset), /* needs_null_check */ true);
Roland Levillain4d027112015-07-01 15:41:14 +01002088 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002089 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2090 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002091 codegen_->Store(field_type, source, HeapOperand(obj, offset));
2092 codegen_->MaybeRecordImplicitNullCheck(instruction);
2093 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002094 }
2095
2096 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002097 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01002098 }
2099}
2100
Alexandre Rames67555f72014-11-18 10:55:16 +00002101void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002102 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002103
2104 switch (type) {
2105 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002106 case Primitive::kPrimLong: {
2107 Register dst = OutputRegister(instr);
2108 Register lhs = InputRegisterAt(instr, 0);
2109 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01002110 if (instr->IsAdd()) {
2111 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002112 } else if (instr->IsAnd()) {
2113 __ And(dst, lhs, rhs);
2114 } else if (instr->IsOr()) {
2115 __ Orr(dst, lhs, rhs);
2116 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002117 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002118 } else if (instr->IsRor()) {
2119 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002120 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002121 __ Ror(dst, lhs, shift);
2122 } else {
2123 // Ensure shift distance is in the same size register as the result. If
2124 // we are rotating a long and the shift comes in a w register originally,
2125 // we don't need to sxtw for use as an x since the shift distances are
2126 // all & reg_bits - 1.
2127 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
2128 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002129 } else {
2130 DCHECK(instr->IsXor());
2131 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002132 }
2133 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002134 }
2135 case Primitive::kPrimFloat:
2136 case Primitive::kPrimDouble: {
2137 FPRegister dst = OutputFPRegister(instr);
2138 FPRegister lhs = InputFPRegisterAt(instr, 0);
2139 FPRegister rhs = InputFPRegisterAt(instr, 1);
2140 if (instr->IsAdd()) {
2141 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002142 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002143 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002144 } else {
2145 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002146 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002147 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002148 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002149 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002150 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002151 }
2152}
2153
Serban Constantinescu02164b32014-11-13 14:05:07 +00002154void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2155 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2156
2157 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2158 Primitive::Type type = instr->GetResultType();
2159 switch (type) {
2160 case Primitive::kPrimInt:
2161 case Primitive::kPrimLong: {
2162 locations->SetInAt(0, Location::RequiresRegister());
2163 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2164 locations->SetOut(Location::RequiresRegister());
2165 break;
2166 }
2167 default:
2168 LOG(FATAL) << "Unexpected shift type " << type;
2169 }
2170}
2171
2172void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2173 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2174
2175 Primitive::Type type = instr->GetType();
2176 switch (type) {
2177 case Primitive::kPrimInt:
2178 case Primitive::kPrimLong: {
2179 Register dst = OutputRegister(instr);
2180 Register lhs = InputRegisterAt(instr, 0);
2181 Operand rhs = InputOperandAt(instr, 1);
2182 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002183 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00002184 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002185 if (instr->IsShl()) {
2186 __ Lsl(dst, lhs, shift_value);
2187 } else if (instr->IsShr()) {
2188 __ Asr(dst, lhs, shift_value);
2189 } else {
2190 __ Lsr(dst, lhs, shift_value);
2191 }
2192 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002193 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002194
2195 if (instr->IsShl()) {
2196 __ Lsl(dst, lhs, rhs_reg);
2197 } else if (instr->IsShr()) {
2198 __ Asr(dst, lhs, rhs_reg);
2199 } else {
2200 __ Lsr(dst, lhs, rhs_reg);
2201 }
2202 }
2203 break;
2204 }
2205 default:
2206 LOG(FATAL) << "Unexpected shift operation type " << type;
2207 }
2208}
2209
Alexandre Rames5319def2014-10-23 10:03:10 +01002210void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002211 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002212}
2213
2214void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002215 HandleBinaryOp(instruction);
2216}
2217
2218void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2219 HandleBinaryOp(instruction);
2220}
2221
2222void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2223 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002224}
2225
Artem Serov7fc63502016-02-09 17:15:29 +00002226void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002227 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
2228 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2229 locations->SetInAt(0, Location::RequiresRegister());
2230 // There is no immediate variant of negated bitwise instructions in AArch64.
2231 locations->SetInAt(1, Location::RequiresRegister());
2232 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2233}
2234
Artem Serov7fc63502016-02-09 17:15:29 +00002235void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002236 Register dst = OutputRegister(instr);
2237 Register lhs = InputRegisterAt(instr, 0);
2238 Register rhs = InputRegisterAt(instr, 1);
2239
2240 switch (instr->GetOpKind()) {
2241 case HInstruction::kAnd:
2242 __ Bic(dst, lhs, rhs);
2243 break;
2244 case HInstruction::kOr:
2245 __ Orn(dst, lhs, rhs);
2246 break;
2247 case HInstruction::kXor:
2248 __ Eon(dst, lhs, rhs);
2249 break;
2250 default:
2251 LOG(FATAL) << "Unreachable";
2252 }
2253}
2254
Alexandre Rames8626b742015-11-25 16:28:08 +00002255void LocationsBuilderARM64::VisitArm64DataProcWithShifterOp(
2256 HArm64DataProcWithShifterOp* instruction) {
2257 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
2258 instruction->GetType() == Primitive::kPrimLong);
2259 LocationSummary* locations =
2260 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2261 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2262 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2263 } else {
2264 locations->SetInAt(0, Location::RequiresRegister());
2265 }
2266 locations->SetInAt(1, Location::RequiresRegister());
2267 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2268}
2269
2270void InstructionCodeGeneratorARM64::VisitArm64DataProcWithShifterOp(
2271 HArm64DataProcWithShifterOp* instruction) {
2272 Primitive::Type type = instruction->GetType();
2273 HInstruction::InstructionKind kind = instruction->GetInstrKind();
2274 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2275 Register out = OutputRegister(instruction);
2276 Register left;
2277 if (kind != HInstruction::kNeg) {
2278 left = InputRegisterAt(instruction, 0);
2279 }
2280 // If this `HArm64DataProcWithShifterOp` was created by merging a type conversion as the
2281 // shifter operand operation, the IR generating `right_reg` (input to the type
2282 // conversion) can have a different type from the current instruction's type,
2283 // so we manually indicate the type.
2284 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Roland Levillain5b5b9312016-03-22 14:57:31 +00002285 int64_t shift_amount = instruction->GetShiftAmount() &
2286 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexandre Rames8626b742015-11-25 16:28:08 +00002287
2288 Operand right_operand(0);
2289
2290 HArm64DataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2291 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind)) {
2292 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2293 } else {
2294 right_operand = Operand(right_reg, helpers::ShiftFromOpKind(op_kind), shift_amount);
2295 }
2296
2297 // Logical binary operations do not support extension operations in the
2298 // operand. Note that VIXL would still manage if it was passed by generating
2299 // the extension as a separate instruction.
2300 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2301 DCHECK(!right_operand.IsExtendedRegister() ||
2302 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2303 kind != HInstruction::kNeg));
2304 switch (kind) {
2305 case HInstruction::kAdd:
2306 __ Add(out, left, right_operand);
2307 break;
2308 case HInstruction::kAnd:
2309 __ And(out, left, right_operand);
2310 break;
2311 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002312 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002313 __ Neg(out, right_operand);
2314 break;
2315 case HInstruction::kOr:
2316 __ Orr(out, left, right_operand);
2317 break;
2318 case HInstruction::kSub:
2319 __ Sub(out, left, right_operand);
2320 break;
2321 case HInstruction::kXor:
2322 __ Eor(out, left, right_operand);
2323 break;
2324 default:
2325 LOG(FATAL) << "Unexpected operation kind: " << kind;
2326 UNREACHABLE();
2327 }
2328}
2329
Artem Serov328429f2016-07-06 16:23:04 +01002330void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002331 LocationSummary* locations =
2332 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2333 locations->SetInAt(0, Location::RequiresRegister());
2334 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
2335 locations->SetOut(Location::RequiresRegister());
2336}
2337
Roland Levillain19c54192016-11-04 13:44:09 +00002338void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002339 __ Add(OutputRegister(instruction),
2340 InputRegisterAt(instruction, 0),
2341 Operand(InputOperandAt(instruction, 1)));
2342}
2343
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002344void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002345 LocationSummary* locations =
2346 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002347 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2348 if (instr->GetOpKind() == HInstruction::kSub &&
2349 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002350 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002351 // Don't allocate register for Mneg instruction.
2352 } else {
2353 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2354 Location::RequiresRegister());
2355 }
2356 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2357 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002358 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2359}
2360
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002361void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002362 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002363 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2364 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002365
2366 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2367 // This fixup should be carried out for all multiply-accumulate instructions:
2368 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2369 if (instr->GetType() == Primitive::kPrimLong &&
2370 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2371 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002372 vixl::aarch64::Instruction* prev =
2373 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002374 if (prev->IsLoadOrStore()) {
2375 // Make sure we emit only exactly one nop.
Artem Serov914d7a82017-02-07 14:33:49 +00002376 ExactAssemblyScope scope(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002377 __ nop();
2378 }
2379 }
2380
2381 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002382 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002383 __ Madd(res, mul_left, mul_right, accumulator);
2384 } else {
2385 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002386 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002387 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002388 __ Mneg(res, mul_left, mul_right);
2389 } else {
2390 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2391 __ Msub(res, mul_left, mul_right, accumulator);
2392 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002393 }
2394}
2395
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002396void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002397 bool object_array_get_with_read_barrier =
2398 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002399 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002400 new (GetGraph()->GetArena()) LocationSummary(instruction,
2401 object_array_get_with_read_barrier ?
2402 LocationSummary::kCallOnSlowPath :
2403 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002404 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002405 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01002406 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002407 locations->SetInAt(0, Location::RequiresRegister());
2408 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002409 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2410 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2411 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002412 // The output overlaps in the case of an object array get with
2413 // read barriers enabled: we do not want the move to overwrite the
2414 // array's location, as we need it to emit the read barrier.
2415 locations->SetOut(
2416 Location::RequiresRegister(),
2417 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002418 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002419}
2420
2421void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002422 Primitive::Type type = instruction->GetType();
2423 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002424 LocationSummary* locations = instruction->GetLocations();
2425 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002426 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002427 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002428 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2429 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002430 MacroAssembler* masm = GetVIXLAssembler();
2431 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002432
Roland Levillain19c54192016-11-04 13:44:09 +00002433 // The read barrier instrumentation of object ArrayGet instructions
2434 // does not support the HIntermediateAddress instruction.
2435 DCHECK(!((type == Primitive::kPrimNot) &&
2436 instruction->GetArray()->IsIntermediateAddress() &&
2437 kEmitCompilerReadBarrier));
2438
Roland Levillain44015862016-01-22 11:47:17 +00002439 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2440 // Object ArrayGet with Baker's read barrier case.
2441 Register temp = temps.AcquireW();
Roland Levillain44015862016-01-22 11:47:17 +00002442 // Note that a potential implicit null check is handled in the
2443 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2444 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2445 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002446 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002447 // General case.
2448 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002449 Register length;
2450 if (maybe_compressed_char_at) {
2451 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2452 length = temps.AcquireW();
Artem Serov914d7a82017-02-07 14:33:49 +00002453 {
2454 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2455 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2456
2457 if (instruction->GetArray()->IsIntermediateAddress()) {
2458 DCHECK_LT(count_offset, offset);
2459 int64_t adjusted_offset =
2460 static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2461 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2462 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2463 } else {
2464 __ Ldr(length, HeapOperand(obj, count_offset));
2465 }
2466 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002467 }
jessicahandojo05765752016-09-09 19:01:32 -07002468 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002469 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002470 if (maybe_compressed_char_at) {
2471 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002472 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2473 "Expecting 0=compressed, 1=uncompressed");
2474 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002475 __ Ldrb(Register(OutputCPURegister(instruction)),
2476 HeapOperand(obj, offset + Int64ConstantFrom(index)));
2477 __ B(&done);
2478 __ Bind(&uncompressed_load);
2479 __ Ldrh(Register(OutputCPURegister(instruction)),
2480 HeapOperand(obj, offset + (Int64ConstantFrom(index) << 1)));
2481 __ Bind(&done);
2482 } else {
2483 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2484 source = HeapOperand(obj, offset);
2485 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002486 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002487 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002488 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002489 // We do not need to compute the intermediate address from the array: the
2490 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002491 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002492 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002493 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002494 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2495 }
2496 temp = obj;
2497 } else {
2498 __ Add(temp, obj, offset);
2499 }
jessicahandojo05765752016-09-09 19:01:32 -07002500 if (maybe_compressed_char_at) {
2501 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002502 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2503 "Expecting 0=compressed, 1=uncompressed");
2504 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002505 __ Ldrb(Register(OutputCPURegister(instruction)),
2506 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2507 __ B(&done);
2508 __ Bind(&uncompressed_load);
2509 __ Ldrh(Register(OutputCPURegister(instruction)),
2510 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2511 __ Bind(&done);
2512 } else {
2513 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2514 }
Roland Levillain44015862016-01-22 11:47:17 +00002515 }
jessicahandojo05765752016-09-09 19:01:32 -07002516 if (!maybe_compressed_char_at) {
Artem Serov914d7a82017-02-07 14:33:49 +00002517 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2518 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
jessicahandojo05765752016-09-09 19:01:32 -07002519 codegen_->Load(type, OutputCPURegister(instruction), source);
2520 codegen_->MaybeRecordImplicitNullCheck(instruction);
2521 }
Roland Levillain44015862016-01-22 11:47:17 +00002522
2523 if (type == Primitive::kPrimNot) {
2524 static_assert(
2525 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2526 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2527 Location obj_loc = locations->InAt(0);
2528 if (index.IsConstant()) {
2529 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2530 } else {
2531 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2532 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002533 }
Roland Levillain4d027112015-07-01 15:41:14 +01002534 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002535}
2536
Alexandre Rames5319def2014-10-23 10:03:10 +01002537void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2538 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2539 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002540 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002541}
2542
2543void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002544 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002545 vixl::aarch64::Register out = OutputRegister(instruction);
Artem Serov914d7a82017-02-07 14:33:49 +00002546 {
2547 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2548 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2549 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
2550 codegen_->MaybeRecordImplicitNullCheck(instruction);
2551 }
jessicahandojo05765752016-09-09 19:01:32 -07002552 // Mask out compression flag from String's array length.
2553 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002554 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002555 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002556}
2557
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002558void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002559 Primitive::Type value_type = instruction->GetComponentType();
2560
2561 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002562 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2563 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002564 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002565 LocationSummary::kCallOnSlowPath :
2566 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002567 locations->SetInAt(0, Location::RequiresRegister());
2568 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002569 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2570 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2571 } else if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002572 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002573 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002574 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002575 }
2576}
2577
2578void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2579 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002580 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002581 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002582 bool needs_write_barrier =
2583 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002584
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002585 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002586 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002587 CPURegister source = value;
2588 Location index = locations->InAt(1);
2589 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2590 MemOperand destination = HeapOperand(array);
2591 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002592
2593 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002594 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002595 if (index.IsConstant()) {
2596 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2597 destination = HeapOperand(array, offset);
2598 } else {
2599 UseScratchRegisterScope temps(masm);
2600 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002601 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002602 // We do not need to compute the intermediate address from the array: the
2603 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002604 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002605 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002606 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002607 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2608 }
2609 temp = array;
2610 } else {
2611 __ Add(temp, array, offset);
2612 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002613 destination = HeapOperand(temp,
2614 XRegisterFrom(index),
2615 LSL,
2616 Primitive::ComponentSizeShift(value_type));
2617 }
Artem Serov914d7a82017-02-07 14:33:49 +00002618 {
2619 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2620 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2621 codegen_->Store(value_type, value, destination);
2622 codegen_->MaybeRecordImplicitNullCheck(instruction);
2623 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002624 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002625 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002626 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002627 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002628 {
2629 // We use a block to end the scratch scope before the write barrier, thus
2630 // freeing the temporary registers so they can be used in `MarkGCCard`.
2631 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002632 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002633 if (index.IsConstant()) {
2634 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002635 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002636 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002637 destination = HeapOperand(temp,
2638 XRegisterFrom(index),
2639 LSL,
2640 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002641 }
2642
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002643 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2644 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2645 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2646
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002647 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002648 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2649 codegen_->AddSlowPath(slow_path);
2650 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002651 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002652 __ Cbnz(Register(value), &non_zero);
2653 if (!index.IsConstant()) {
2654 __ Add(temp, array, offset);
2655 }
Artem Serov914d7a82017-02-07 14:33:49 +00002656 {
2657 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools
2658 // emitted.
2659 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2660 __ Str(wzr, destination);
2661 codegen_->MaybeRecordImplicitNullCheck(instruction);
2662 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002663 __ B(&done);
2664 __ Bind(&non_zero);
2665 }
2666
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002667 // Note that when Baker read barriers are enabled, the type
2668 // checks are performed without read barriers. This is fine,
2669 // even in the case where a class object is in the from-space
2670 // after the flip, as a comparison involving such a type would
2671 // not produce a false positive; it may of course produce a
2672 // false negative, in which case we would take the ArraySet
2673 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01002674
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002675 Register temp2 = temps.AcquireSameSizeAs(array);
2676 // /* HeapReference<Class> */ temp = array->klass_
Artem Serov914d7a82017-02-07 14:33:49 +00002677 {
2678 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2679 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2680 __ Ldr(temp, HeapOperand(array, class_offset));
2681 codegen_->MaybeRecordImplicitNullCheck(instruction);
2682 }
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002683 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01002684
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002685 // /* HeapReference<Class> */ temp = temp->component_type_
2686 __ Ldr(temp, HeapOperand(temp, component_offset));
2687 // /* HeapReference<Class> */ temp2 = value->klass_
2688 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2689 // If heap poisoning is enabled, no need to unpoison `temp`
2690 // nor `temp2`, as we are comparing two poisoned references.
2691 __ Cmp(temp, temp2);
2692 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01002693
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002694 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2695 vixl::aarch64::Label do_put;
2696 __ B(eq, &do_put);
2697 // If heap poisoning is enabled, the `temp` reference has
2698 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002699 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2700
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002701 // /* HeapReference<Class> */ temp = temp->super_class_
2702 __ Ldr(temp, HeapOperand(temp, super_offset));
2703 // If heap poisoning is enabled, no need to unpoison
2704 // `temp`, as we are comparing against null below.
2705 __ Cbnz(temp, slow_path->GetEntryLabel());
2706 __ Bind(&do_put);
2707 } else {
2708 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002709 }
2710 }
2711
2712 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002713 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002714 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002715 __ Mov(temp2, value.W());
2716 GetAssembler()->PoisonHeapReference(temp2);
2717 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002718 }
2719
2720 if (!index.IsConstant()) {
2721 __ Add(temp, array, offset);
2722 }
Artem Serov914d7a82017-02-07 14:33:49 +00002723 {
2724 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2725 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2726 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002727
Artem Serov914d7a82017-02-07 14:33:49 +00002728 if (!may_need_runtime_call_for_type_check) {
2729 codegen_->MaybeRecordImplicitNullCheck(instruction);
2730 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002731 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002732 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002733
2734 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2735
2736 if (done.IsLinked()) {
2737 __ Bind(&done);
2738 }
2739
2740 if (slow_path != nullptr) {
2741 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002742 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002743 }
2744}
2745
Alexandre Rames67555f72014-11-18 10:55:16 +00002746void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002747 RegisterSet caller_saves = RegisterSet::Empty();
2748 InvokeRuntimeCallingConvention calling_convention;
2749 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
2750 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
2751 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00002752 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002753 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002754}
2755
2756void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002757 BoundsCheckSlowPathARM64* slow_path =
2758 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002759 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00002760 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2761 __ B(slow_path->GetEntryLabel(), hs);
2762}
2763
Alexandre Rames67555f72014-11-18 10:55:16 +00002764void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2765 LocationSummary* locations =
2766 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2767 locations->SetInAt(0, Location::RequiresRegister());
2768 if (check->HasUses()) {
2769 locations->SetOut(Location::SameAsFirstInput());
2770 }
2771}
2772
2773void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2774 // We assume the class is not null.
2775 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2776 check->GetLoadClass(), check, check->GetDexPc(), true);
2777 codegen_->AddSlowPath(slow_path);
2778 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2779}
2780
Roland Levillain1a653882016-03-18 18:05:57 +00002781static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2782 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2783 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2784}
2785
2786void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2787 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2788 Location rhs_loc = instruction->GetLocations()->InAt(1);
2789 if (rhs_loc.IsConstant()) {
2790 // 0.0 is the only immediate that can be encoded directly in
2791 // an FCMP instruction.
2792 //
2793 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2794 // specify that in a floating-point comparison, positive zero
2795 // and negative zero are considered equal, so we can use the
2796 // literal 0.0 for both cases here.
2797 //
2798 // Note however that some methods (Float.equal, Float.compare,
2799 // Float.compareTo, Double.equal, Double.compare,
2800 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2801 // StrictMath.min) consider 0.0 to be (strictly) greater than
2802 // -0.0. So if we ever translate calls to these methods into a
2803 // HCompare instruction, we must handle the -0.0 case with
2804 // care here.
2805 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2806 __ Fcmp(lhs_reg, 0.0);
2807 } else {
2808 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2809 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002810}
2811
Serban Constantinescu02164b32014-11-13 14:05:07 +00002812void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002813 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002814 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2815 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002816 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002817 case Primitive::kPrimBoolean:
2818 case Primitive::kPrimByte:
2819 case Primitive::kPrimShort:
2820 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002821 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002822 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002823 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002824 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002825 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2826 break;
2827 }
2828 case Primitive::kPrimFloat:
2829 case Primitive::kPrimDouble: {
2830 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002831 locations->SetInAt(1,
2832 IsFloatingPointZeroConstant(compare->InputAt(1))
2833 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2834 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002835 locations->SetOut(Location::RequiresRegister());
2836 break;
2837 }
2838 default:
2839 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2840 }
2841}
2842
2843void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2844 Primitive::Type in_type = compare->InputAt(0)->GetType();
2845
2846 // 0 if: left == right
2847 // 1 if: left > right
2848 // -1 if: left < right
2849 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002850 case Primitive::kPrimBoolean:
2851 case Primitive::kPrimByte:
2852 case Primitive::kPrimShort:
2853 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002854 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002855 case Primitive::kPrimLong: {
2856 Register result = OutputRegister(compare);
2857 Register left = InputRegisterAt(compare, 0);
2858 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002859 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002860 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2861 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002862 break;
2863 }
2864 case Primitive::kPrimFloat:
2865 case Primitive::kPrimDouble: {
2866 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002867 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002868 __ Cset(result, ne);
2869 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002870 break;
2871 }
2872 default:
2873 LOG(FATAL) << "Unimplemented compare type " << in_type;
2874 }
2875}
2876
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002877void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002878 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002879
2880 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2881 locations->SetInAt(0, Location::RequiresFpuRegister());
2882 locations->SetInAt(1,
2883 IsFloatingPointZeroConstant(instruction->InputAt(1))
2884 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2885 : Location::RequiresFpuRegister());
2886 } else {
2887 // Integer cases.
2888 locations->SetInAt(0, Location::RequiresRegister());
2889 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2890 }
2891
David Brazdilb3e773e2016-01-26 11:28:37 +00002892 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002893 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002894 }
2895}
2896
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002897void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002898 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002899 return;
2900 }
2901
2902 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002903 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002904 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002905
Roland Levillain7f63c522015-07-13 15:54:55 +00002906 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002907 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002908 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002909 } else {
2910 // Integer cases.
2911 Register lhs = InputRegisterAt(instruction, 0);
2912 Operand rhs = InputOperandAt(instruction, 1);
2913 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002914 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002915 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002916}
2917
2918#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2919 M(Equal) \
2920 M(NotEqual) \
2921 M(LessThan) \
2922 M(LessThanOrEqual) \
2923 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002924 M(GreaterThanOrEqual) \
2925 M(Below) \
2926 M(BelowOrEqual) \
2927 M(Above) \
2928 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002929#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002930void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2931void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002932FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002933#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002934#undef FOR_EACH_CONDITION_INSTRUCTION
2935
Zheng Xuc6667102015-05-15 16:08:45 +08002936void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2937 DCHECK(instruction->IsDiv() || instruction->IsRem());
2938
2939 LocationSummary* locations = instruction->GetLocations();
2940 Location second = locations->InAt(1);
2941 DCHECK(second.IsConstant());
2942
2943 Register out = OutputRegister(instruction);
2944 Register dividend = InputRegisterAt(instruction, 0);
2945 int64_t imm = Int64FromConstant(second.GetConstant());
2946 DCHECK(imm == 1 || imm == -1);
2947
2948 if (instruction->IsRem()) {
2949 __ Mov(out, 0);
2950 } else {
2951 if (imm == 1) {
2952 __ Mov(out, dividend);
2953 } else {
2954 __ Neg(out, dividend);
2955 }
2956 }
2957}
2958
2959void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2960 DCHECK(instruction->IsDiv() || instruction->IsRem());
2961
2962 LocationSummary* locations = instruction->GetLocations();
2963 Location second = locations->InAt(1);
2964 DCHECK(second.IsConstant());
2965
2966 Register out = OutputRegister(instruction);
2967 Register dividend = InputRegisterAt(instruction, 0);
2968 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002969 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002970 int ctz_imm = CTZ(abs_imm);
2971
2972 UseScratchRegisterScope temps(GetVIXLAssembler());
2973 Register temp = temps.AcquireSameSizeAs(out);
2974
2975 if (instruction->IsDiv()) {
2976 __ Add(temp, dividend, abs_imm - 1);
2977 __ Cmp(dividend, 0);
2978 __ Csel(out, temp, dividend, lt);
2979 if (imm > 0) {
2980 __ Asr(out, out, ctz_imm);
2981 } else {
2982 __ Neg(out, Operand(out, ASR, ctz_imm));
2983 }
2984 } else {
2985 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
2986 __ Asr(temp, dividend, bits - 1);
2987 __ Lsr(temp, temp, bits - ctz_imm);
2988 __ Add(out, dividend, temp);
2989 __ And(out, out, abs_imm - 1);
2990 __ Sub(out, out, temp);
2991 }
2992}
2993
2994void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2995 DCHECK(instruction->IsDiv() || instruction->IsRem());
2996
2997 LocationSummary* locations = instruction->GetLocations();
2998 Location second = locations->InAt(1);
2999 DCHECK(second.IsConstant());
3000
3001 Register out = OutputRegister(instruction);
3002 Register dividend = InputRegisterAt(instruction, 0);
3003 int64_t imm = Int64FromConstant(second.GetConstant());
3004
3005 Primitive::Type type = instruction->GetResultType();
3006 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
3007
3008 int64_t magic;
3009 int shift;
3010 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
3011
3012 UseScratchRegisterScope temps(GetVIXLAssembler());
3013 Register temp = temps.AcquireSameSizeAs(out);
3014
3015 // temp = get_high(dividend * magic)
3016 __ Mov(temp, magic);
3017 if (type == Primitive::kPrimLong) {
3018 __ Smulh(temp, dividend, temp);
3019 } else {
3020 __ Smull(temp.X(), dividend, temp);
3021 __ Lsr(temp.X(), temp.X(), 32);
3022 }
3023
3024 if (imm > 0 && magic < 0) {
3025 __ Add(temp, temp, dividend);
3026 } else if (imm < 0 && magic > 0) {
3027 __ Sub(temp, temp, dividend);
3028 }
3029
3030 if (shift != 0) {
3031 __ Asr(temp, temp, shift);
3032 }
3033
3034 if (instruction->IsDiv()) {
3035 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
3036 } else {
3037 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
3038 // TODO: Strength reduction for msub.
3039 Register temp_imm = temps.AcquireSameSizeAs(out);
3040 __ Mov(temp_imm, imm);
3041 __ Msub(out, temp, temp_imm, dividend);
3042 }
3043}
3044
3045void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3046 DCHECK(instruction->IsDiv() || instruction->IsRem());
3047 Primitive::Type type = instruction->GetResultType();
3048 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3049
3050 LocationSummary* locations = instruction->GetLocations();
3051 Register out = OutputRegister(instruction);
3052 Location second = locations->InAt(1);
3053
3054 if (second.IsConstant()) {
3055 int64_t imm = Int64FromConstant(second.GetConstant());
3056
3057 if (imm == 0) {
3058 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3059 } else if (imm == 1 || imm == -1) {
3060 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003061 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08003062 DivRemByPowerOfTwo(instruction);
3063 } else {
3064 DCHECK(imm <= -2 || imm >= 2);
3065 GenerateDivRemWithAnyConstant(instruction);
3066 }
3067 } else {
3068 Register dividend = InputRegisterAt(instruction, 0);
3069 Register divisor = InputRegisterAt(instruction, 1);
3070 if (instruction->IsDiv()) {
3071 __ Sdiv(out, dividend, divisor);
3072 } else {
3073 UseScratchRegisterScope temps(GetVIXLAssembler());
3074 Register temp = temps.AcquireSameSizeAs(out);
3075 __ Sdiv(temp, dividend, divisor);
3076 __ Msub(out, temp, divisor, dividend);
3077 }
3078 }
3079}
3080
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003081void LocationsBuilderARM64::VisitDiv(HDiv* div) {
3082 LocationSummary* locations =
3083 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3084 switch (div->GetResultType()) {
3085 case Primitive::kPrimInt:
3086 case Primitive::kPrimLong:
3087 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08003088 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003089 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3090 break;
3091
3092 case Primitive::kPrimFloat:
3093 case Primitive::kPrimDouble:
3094 locations->SetInAt(0, Location::RequiresFpuRegister());
3095 locations->SetInAt(1, Location::RequiresFpuRegister());
3096 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3097 break;
3098
3099 default:
3100 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3101 }
3102}
3103
3104void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
3105 Primitive::Type type = div->GetResultType();
3106 switch (type) {
3107 case Primitive::kPrimInt:
3108 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08003109 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003110 break;
3111
3112 case Primitive::kPrimFloat:
3113 case Primitive::kPrimDouble:
3114 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3115 break;
3116
3117 default:
3118 LOG(FATAL) << "Unexpected div type " << type;
3119 }
3120}
3121
Alexandre Rames67555f72014-11-18 10:55:16 +00003122void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003123 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003124 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003125}
3126
3127void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3128 SlowPathCodeARM64* slow_path =
3129 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
3130 codegen_->AddSlowPath(slow_path);
3131 Location value = instruction->GetLocations()->InAt(0);
3132
Alexandre Rames3e69f162014-12-10 10:36:50 +00003133 Primitive::Type type = instruction->GetType();
3134
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003135 if (!Primitive::IsIntegralType(type)) {
3136 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00003137 return;
3138 }
3139
Alexandre Rames67555f72014-11-18 10:55:16 +00003140 if (value.IsConstant()) {
3141 int64_t divisor = Int64ConstantFrom(value);
3142 if (divisor == 0) {
3143 __ B(slow_path->GetEntryLabel());
3144 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003145 // A division by a non-null constant is valid. We don't need to perform
3146 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003147 }
3148 } else {
3149 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3150 }
3151}
3152
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003153void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3154 LocationSummary* locations =
3155 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3156 locations->SetOut(Location::ConstantLocation(constant));
3157}
3158
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003159void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3160 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003161 // Will be generated at use site.
3162}
3163
Alexandre Rames5319def2014-10-23 10:03:10 +01003164void LocationsBuilderARM64::VisitExit(HExit* exit) {
3165 exit->SetLocations(nullptr);
3166}
3167
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003168void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003169}
3170
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003171void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3172 LocationSummary* locations =
3173 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3174 locations->SetOut(Location::ConstantLocation(constant));
3175}
3176
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003177void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003178 // Will be generated at use site.
3179}
3180
David Brazdilfc6a86a2015-06-26 10:33:45 +00003181void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003182 DCHECK(!successor->IsExitBlock());
3183 HBasicBlock* block = got->GetBlock();
3184 HInstruction* previous = got->GetPrevious();
3185 HLoopInformation* info = block->GetLoopInformation();
3186
David Brazdil46e2a392015-03-16 17:31:52 +00003187 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003188 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3189 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3190 return;
3191 }
3192 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3193 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3194 }
3195 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003196 __ B(codegen_->GetLabelOf(successor));
3197 }
3198}
3199
David Brazdilfc6a86a2015-06-26 10:33:45 +00003200void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3201 got->SetLocations(nullptr);
3202}
3203
3204void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3205 HandleGoto(got, got->GetSuccessor());
3206}
3207
3208void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3209 try_boundary->SetLocations(nullptr);
3210}
3211
3212void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3213 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3214 if (!successor->IsExitBlock()) {
3215 HandleGoto(try_boundary, successor);
3216 }
3217}
3218
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003219void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003220 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003221 vixl::aarch64::Label* true_target,
3222 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003223 // FP branching requires both targets to be explicit. If either of the targets
3224 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003225 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003226 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003227
David Brazdil0debae72015-11-12 18:37:00 +00003228 if (true_target == nullptr && false_target == nullptr) {
3229 // Nothing to do. The code always falls through.
3230 return;
3231 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003232 // Constant condition, statically compared against "true" (integer value 1).
3233 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003234 if (true_target != nullptr) {
3235 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003236 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003237 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003238 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003239 if (false_target != nullptr) {
3240 __ B(false_target);
3241 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003242 }
David Brazdil0debae72015-11-12 18:37:00 +00003243 return;
3244 }
3245
3246 // The following code generates these patterns:
3247 // (1) true_target == nullptr && false_target != nullptr
3248 // - opposite condition true => branch to false_target
3249 // (2) true_target != nullptr && false_target == nullptr
3250 // - condition true => branch to true_target
3251 // (3) true_target != nullptr && false_target != nullptr
3252 // - condition true => branch to true_target
3253 // - branch to false_target
3254 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003255 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003256 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003257 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003258 if (true_target == nullptr) {
3259 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3260 } else {
3261 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3262 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003263 } else {
3264 // The condition instruction has not been materialized, use its inputs as
3265 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003266 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003267
David Brazdil0debae72015-11-12 18:37:00 +00003268 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00003269 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003270 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003271 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003272 IfCondition opposite_condition = condition->GetOppositeCondition();
3273 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003274 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003275 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003276 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003277 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003278 // Integer cases.
3279 Register lhs = InputRegisterAt(condition, 0);
3280 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003281
3282 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003283 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003284 if (true_target == nullptr) {
3285 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3286 non_fallthrough_target = false_target;
3287 } else {
3288 arm64_cond = ARM64Condition(condition->GetCondition());
3289 non_fallthrough_target = true_target;
3290 }
3291
Aart Bik086d27e2016-01-20 17:02:00 -08003292 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003293 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003294 switch (arm64_cond) {
3295 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003296 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003297 break;
3298 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003299 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003300 break;
3301 case lt:
3302 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003303 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003304 break;
3305 case ge:
3306 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003307 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003308 break;
3309 default:
3310 // Without the `static_cast` the compiler throws an error for
3311 // `-Werror=sign-promo`.
3312 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3313 }
3314 } else {
3315 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003316 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003317 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003318 }
3319 }
David Brazdil0debae72015-11-12 18:37:00 +00003320
3321 // If neither branch falls through (case 3), the conditional branch to `true_target`
3322 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3323 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003324 __ B(false_target);
3325 }
David Brazdil0debae72015-11-12 18:37:00 +00003326
3327 if (fallthrough_target.IsLinked()) {
3328 __ Bind(&fallthrough_target);
3329 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003330}
3331
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003332void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
3333 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003334 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003335 locations->SetInAt(0, Location::RequiresRegister());
3336 }
3337}
3338
3339void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003340 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3341 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003342 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3343 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3344 true_target = nullptr;
3345 }
3346 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3347 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3348 false_target = nullptr;
3349 }
David Brazdil0debae72015-11-12 18:37:00 +00003350 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003351}
3352
3353void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
3354 LocationSummary* locations = new (GetGraph()->GetArena())
3355 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01003356 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00003357 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003358 locations->SetInAt(0, Location::RequiresRegister());
3359 }
3360}
3361
3362void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003363 SlowPathCodeARM64* slow_path =
3364 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003365 GenerateTestAndBranch(deoptimize,
3366 /* condition_input_index */ 0,
3367 slow_path->GetEntryLabel(),
3368 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003369}
3370
Mingyao Yang063fc772016-08-02 11:02:54 -07003371void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3372 LocationSummary* locations = new (GetGraph()->GetArena())
3373 LocationSummary(flag, LocationSummary::kNoCall);
3374 locations->SetOut(Location::RequiresRegister());
3375}
3376
3377void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3378 __ Ldr(OutputRegister(flag),
3379 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3380}
3381
David Brazdilc0b601b2016-02-08 14:20:45 +00003382static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3383 return condition->IsCondition() &&
3384 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
3385}
3386
Alexandre Rames880f1192016-06-13 16:04:50 +01003387static inline Condition GetConditionForSelect(HCondition* condition) {
3388 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003389 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3390 : ARM64Condition(cond);
3391}
3392
David Brazdil74eb1b22015-12-14 11:44:01 +00003393void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3394 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01003395 if (Primitive::IsFloatingPointType(select->GetType())) {
3396 locations->SetInAt(0, Location::RequiresFpuRegister());
3397 locations->SetInAt(1, Location::RequiresFpuRegister());
3398 locations->SetOut(Location::RequiresFpuRegister());
3399 } else {
3400 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3401 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3402 bool is_true_value_constant = cst_true_value != nullptr;
3403 bool is_false_value_constant = cst_false_value != nullptr;
3404 // Ask VIXL whether we should synthesize constants in registers.
3405 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3406 Operand true_op = is_true_value_constant ?
3407 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3408 Operand false_op = is_false_value_constant ?
3409 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3410 bool true_value_in_register = false;
3411 bool false_value_in_register = false;
3412 MacroAssembler::GetCselSynthesisInformation(
3413 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3414 true_value_in_register |= !is_true_value_constant;
3415 false_value_in_register |= !is_false_value_constant;
3416
3417 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3418 : Location::ConstantLocation(cst_true_value));
3419 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3420 : Location::ConstantLocation(cst_false_value));
3421 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003422 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003423
David Brazdil74eb1b22015-12-14 11:44:01 +00003424 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3425 locations->SetInAt(2, Location::RequiresRegister());
3426 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003427}
3428
3429void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003430 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003431 Condition csel_cond;
3432
3433 if (IsBooleanValueOrMaterializedCondition(cond)) {
3434 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003435 // Use the condition flags set by the previous instruction.
3436 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003437 } else {
3438 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003439 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003440 }
3441 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003442 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003443 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003444 } else {
3445 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003446 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003447 }
3448
Alexandre Rames880f1192016-06-13 16:04:50 +01003449 if (Primitive::IsFloatingPointType(select->GetType())) {
3450 __ Fcsel(OutputFPRegister(select),
3451 InputFPRegisterAt(select, 1),
3452 InputFPRegisterAt(select, 0),
3453 csel_cond);
3454 } else {
3455 __ Csel(OutputRegister(select),
3456 InputOperandAt(select, 1),
3457 InputOperandAt(select, 0),
3458 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003459 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003460}
3461
David Srbecky0cf44932015-12-09 14:09:59 +00003462void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3463 new (GetGraph()->GetArena()) LocationSummary(info);
3464}
3465
David Srbeckyd28f4a02016-03-14 17:14:24 +00003466void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3467 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003468}
3469
3470void CodeGeneratorARM64::GenerateNop() {
3471 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003472}
3473
Alexandre Rames5319def2014-10-23 10:03:10 +01003474void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003475 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003476}
3477
3478void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003479 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003480}
3481
3482void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003483 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003484}
3485
3486void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003487 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003488}
3489
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003490// Temp is used for read barrier.
3491static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3492 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003493 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003494 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3495 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3496 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3497 return 1;
3498 }
3499 return 0;
3500}
3501
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003502// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003503// interface pointer, one for loading the current interface.
3504// The other checks have one temp for loading the object's class.
3505static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3506 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3507 return 3;
3508 }
3509 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003510}
3511
Alexandre Rames67555f72014-11-18 10:55:16 +00003512void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003513 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003514 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003515 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003516 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003517 case TypeCheckKind::kExactCheck:
3518 case TypeCheckKind::kAbstractClassCheck:
3519 case TypeCheckKind::kClassHierarchyCheck:
3520 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003521 call_kind =
3522 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01003523 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003524 break;
3525 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003526 case TypeCheckKind::kUnresolvedCheck:
3527 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003528 call_kind = LocationSummary::kCallOnSlowPath;
3529 break;
3530 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003531
Alexandre Rames67555f72014-11-18 10:55:16 +00003532 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003533 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003534 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003535 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003536 locations->SetInAt(0, Location::RequiresRegister());
3537 locations->SetInAt(1, Location::RequiresRegister());
3538 // The "out" register is used as a temporary, so it overlaps with the inputs.
3539 // Note that TypeCheckSlowPathARM64 uses this register too.
3540 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003541 // Add temps if necessary for read barriers.
3542 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003543}
3544
3545void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003546 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003547 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003548 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003549 Register obj = InputRegisterAt(instruction, 0);
3550 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003551 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003552 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003553 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3554 DCHECK_LE(num_temps, 1u);
3555 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003556 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3557 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3558 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3559 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003560
Scott Wakeling97c72b72016-06-24 16:19:36 +01003561 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003562 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003563
3564 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003565 // Avoid null check if we know `obj` is not null.
3566 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003567 __ Cbz(obj, &zero);
3568 }
3569
Roland Levillain44015862016-01-22 11:47:17 +00003570 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003571 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003572 // /* HeapReference<Class> */ out = obj->klass_
3573 GenerateReferenceLoadTwoRegisters(instruction,
3574 out_loc,
3575 obj_loc,
3576 class_offset,
3577 maybe_temp_loc,
3578 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003579 __ Cmp(out, cls);
3580 __ Cset(out, eq);
3581 if (zero.IsLinked()) {
3582 __ B(&done);
3583 }
3584 break;
3585 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003586
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003587 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003588 // /* HeapReference<Class> */ out = obj->klass_
3589 GenerateReferenceLoadTwoRegisters(instruction,
3590 out_loc,
3591 obj_loc,
3592 class_offset,
3593 maybe_temp_loc,
3594 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003595 // If the class is abstract, we eagerly fetch the super class of the
3596 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003597 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003598 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003599 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003600 GenerateReferenceLoadOneRegister(instruction,
3601 out_loc,
3602 super_offset,
3603 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003604 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003605 // If `out` is null, we use it for the result, and jump to `done`.
3606 __ Cbz(out, &done);
3607 __ Cmp(out, cls);
3608 __ B(ne, &loop);
3609 __ Mov(out, 1);
3610 if (zero.IsLinked()) {
3611 __ B(&done);
3612 }
3613 break;
3614 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003615
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003616 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003617 // /* HeapReference<Class> */ out = obj->klass_
3618 GenerateReferenceLoadTwoRegisters(instruction,
3619 out_loc,
3620 obj_loc,
3621 class_offset,
3622 maybe_temp_loc,
3623 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003624 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003625 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003626 __ Bind(&loop);
3627 __ Cmp(out, cls);
3628 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003629 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003630 GenerateReferenceLoadOneRegister(instruction,
3631 out_loc,
3632 super_offset,
3633 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003634 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003635 __ Cbnz(out, &loop);
3636 // If `out` is null, we use it for the result, and jump to `done`.
3637 __ B(&done);
3638 __ Bind(&success);
3639 __ Mov(out, 1);
3640 if (zero.IsLinked()) {
3641 __ B(&done);
3642 }
3643 break;
3644 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003645
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003646 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003647 // /* HeapReference<Class> */ out = obj->klass_
3648 GenerateReferenceLoadTwoRegisters(instruction,
3649 out_loc,
3650 obj_loc,
3651 class_offset,
3652 maybe_temp_loc,
3653 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003654 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003655 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003656 __ Cmp(out, cls);
3657 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003658 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003659 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003660 GenerateReferenceLoadOneRegister(instruction,
3661 out_loc,
3662 component_offset,
3663 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003664 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003665 // If `out` is null, we use it for the result, and jump to `done`.
3666 __ Cbz(out, &done);
3667 __ Ldrh(out, HeapOperand(out, primitive_offset));
3668 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3669 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003670 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003671 __ Mov(out, 1);
3672 __ B(&done);
3673 break;
3674 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003675
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003676 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003677 // No read barrier since the slow path will retry upon failure.
3678 // /* HeapReference<Class> */ out = obj->klass_
3679 GenerateReferenceLoadTwoRegisters(instruction,
3680 out_loc,
3681 obj_loc,
3682 class_offset,
3683 maybe_temp_loc,
3684 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003685 __ Cmp(out, cls);
3686 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003687 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3688 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003689 codegen_->AddSlowPath(slow_path);
3690 __ B(ne, slow_path->GetEntryLabel());
3691 __ Mov(out, 1);
3692 if (zero.IsLinked()) {
3693 __ B(&done);
3694 }
3695 break;
3696 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003697
Calin Juravle98893e12015-10-02 21:05:03 +01003698 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003699 case TypeCheckKind::kInterfaceCheck: {
3700 // Note that we indeed only call on slow path, but we always go
3701 // into the slow path for the unresolved and interface check
3702 // cases.
3703 //
3704 // We cannot directly call the InstanceofNonTrivial runtime
3705 // entry point without resorting to a type checking slow path
3706 // here (i.e. by calling InvokeRuntime directly), as it would
3707 // require to assign fixed registers for the inputs of this
3708 // HInstanceOf instruction (following the runtime calling
3709 // convention), which might be cluttered by the potential first
3710 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003711 //
3712 // TODO: Introduce a new runtime entry point taking the object
3713 // to test (instead of its class) as argument, and let it deal
3714 // with the read barrier issues. This will let us refactor this
3715 // case of the `switch` code as it was previously (with a direct
3716 // call to the runtime not using a type checking slow path).
3717 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003718 DCHECK(locations->OnlyCallsOnSlowPath());
3719 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3720 /* is_fatal */ false);
3721 codegen_->AddSlowPath(slow_path);
3722 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003723 if (zero.IsLinked()) {
3724 __ B(&done);
3725 }
3726 break;
3727 }
3728 }
3729
3730 if (zero.IsLinked()) {
3731 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003732 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003733 }
3734
3735 if (done.IsLinked()) {
3736 __ Bind(&done);
3737 }
3738
3739 if (slow_path != nullptr) {
3740 __ Bind(slow_path->GetExitLabel());
3741 }
3742}
3743
3744void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3745 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3746 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3747
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003748 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3749 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003750 case TypeCheckKind::kExactCheck:
3751 case TypeCheckKind::kAbstractClassCheck:
3752 case TypeCheckKind::kClassHierarchyCheck:
3753 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003754 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3755 LocationSummary::kCallOnSlowPath :
3756 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003757 break;
3758 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003759 case TypeCheckKind::kUnresolvedCheck:
3760 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003761 call_kind = LocationSummary::kCallOnSlowPath;
3762 break;
3763 }
3764
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003765 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3766 locations->SetInAt(0, Location::RequiresRegister());
3767 locations->SetInAt(1, Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003768 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
3769 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003770}
3771
3772void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003773 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003774 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003775 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003776 Register obj = InputRegisterAt(instruction, 0);
3777 Register cls = InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003778 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3779 DCHECK_GE(num_temps, 1u);
3780 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003781 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003782 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
3783 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003784 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003785 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3786 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3787 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3788 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3789 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3790 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3791 const uint32_t object_array_data_offset =
3792 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003793
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003794 bool is_type_check_slow_path_fatal = false;
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003795 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
3796 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
3797 // read barriers is done for performance and code size reasons.
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003798 if (!kEmitCompilerReadBarrier) {
3799 is_type_check_slow_path_fatal =
3800 (type_check_kind == TypeCheckKind::kExactCheck ||
3801 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3802 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3803 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3804 !instruction->CanThrowIntoCatchBlock();
3805 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003806 SlowPathCodeARM64* type_check_slow_path =
3807 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3808 is_type_check_slow_path_fatal);
3809 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003810
Scott Wakeling97c72b72016-06-24 16:19:36 +01003811 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003812 // Avoid null check if we know obj is not null.
3813 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003814 __ Cbz(obj, &done);
3815 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003816
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003817 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003818 case TypeCheckKind::kExactCheck:
3819 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003820 // /* HeapReference<Class> */ temp = obj->klass_
3821 GenerateReferenceLoadTwoRegisters(instruction,
3822 temp_loc,
3823 obj_loc,
3824 class_offset,
3825 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003826 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003827
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003828 __ Cmp(temp, cls);
3829 // Jump to slow path for throwing the exception or doing a
3830 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003831 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003832 break;
3833 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003834
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003835 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003836 // /* HeapReference<Class> */ temp = obj->klass_
3837 GenerateReferenceLoadTwoRegisters(instruction,
3838 temp_loc,
3839 obj_loc,
3840 class_offset,
3841 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003842 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003843
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003844 // If the class is abstract, we eagerly fetch the super class of the
3845 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003846 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003847 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003848 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003849 GenerateReferenceLoadOneRegister(instruction,
3850 temp_loc,
3851 super_offset,
3852 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003853 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003854
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003855 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3856 // exception.
3857 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3858 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003859 __ Cmp(temp, cls);
3860 __ B(ne, &loop);
3861 break;
3862 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003863
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003864 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003865 // /* HeapReference<Class> */ temp = obj->klass_
3866 GenerateReferenceLoadTwoRegisters(instruction,
3867 temp_loc,
3868 obj_loc,
3869 class_offset,
3870 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003871 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003872
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003873 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003874 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003875 __ Bind(&loop);
3876 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003877 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003878
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003879 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003880 GenerateReferenceLoadOneRegister(instruction,
3881 temp_loc,
3882 super_offset,
3883 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003884 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003885
3886 // If the class reference currently in `temp` is not null, jump
3887 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003888 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003889 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003890 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003891 break;
3892 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003893
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003894 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003895 // /* HeapReference<Class> */ temp = obj->klass_
3896 GenerateReferenceLoadTwoRegisters(instruction,
3897 temp_loc,
3898 obj_loc,
3899 class_offset,
3900 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003901 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003902
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003903 // Do an exact check.
3904 __ Cmp(temp, cls);
3905 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003906
3907 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003908 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003909 GenerateReferenceLoadOneRegister(instruction,
3910 temp_loc,
3911 component_offset,
3912 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003913 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003914
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003915 // If the component type is null, jump to the slow path to throw the exception.
3916 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3917 // Otherwise, the object is indeed an array. Further check that this component type is not a
3918 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003919 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3920 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003921 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003922 break;
3923 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003924
Calin Juravle98893e12015-10-02 21:05:03 +01003925 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003926 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003927 //
3928 // We cannot directly call the CheckCast runtime entry point
3929 // without resorting to a type checking slow path here (i.e. by
3930 // calling InvokeRuntime directly), as it would require to
3931 // assign fixed registers for the inputs of this HInstanceOf
3932 // instruction (following the runtime calling convention), which
3933 // might be cluttered by the potential first read barrier
3934 // emission at the beginning of this method.
3935 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003936 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003937 case TypeCheckKind::kInterfaceCheck: {
3938 // /* HeapReference<Class> */ temp = obj->klass_
3939 GenerateReferenceLoadTwoRegisters(instruction,
3940 temp_loc,
3941 obj_loc,
3942 class_offset,
3943 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003944 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003945
3946 // /* HeapReference<Class> */ temp = temp->iftable_
3947 GenerateReferenceLoadTwoRegisters(instruction,
3948 temp_loc,
3949 temp_loc,
3950 iftable_offset,
3951 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003952 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08003953 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003954 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08003955 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003956 vixl::aarch64::Label start_loop;
3957 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08003958 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003959 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
3960 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003961 // Go to next interface.
3962 __ Add(temp, temp, 2 * kHeapReferenceSize);
3963 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08003964 // Compare the classes and continue the loop if they do not match.
3965 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
3966 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003967 break;
3968 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003969 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003970 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003971
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003972 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003973}
3974
Alexandre Rames5319def2014-10-23 10:03:10 +01003975void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3976 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3977 locations->SetOut(Location::ConstantLocation(constant));
3978}
3979
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003980void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003981 // Will be generated at use site.
3982}
3983
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003984void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
3985 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3986 locations->SetOut(Location::ConstantLocation(constant));
3987}
3988
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003989void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003990 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003991}
3992
Calin Juravle175dc732015-08-25 15:42:32 +01003993void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3994 // The trampoline uses the same calling convention as dex calling conventions,
3995 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3996 // the method_idx.
3997 HandleInvoke(invoke);
3998}
3999
4000void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4001 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
4002}
4003
Alexandre Rames5319def2014-10-23 10:03:10 +01004004void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01004005 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01004006 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01004007}
4008
Alexandre Rames67555f72014-11-18 10:55:16 +00004009void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4010 HandleInvoke(invoke);
4011}
4012
4013void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4014 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004015 LocationSummary* locations = invoke->GetLocations();
4016 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004017 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00004018 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004019 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00004020
4021 // The register ip1 is required to be used for the hidden argument in
4022 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01004023 MacroAssembler* masm = GetVIXLAssembler();
4024 UseScratchRegisterScope scratch_scope(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00004025 scratch_scope.Exclude(ip1);
4026 __ Mov(ip1, invoke->GetDexMethodIndex());
4027
Artem Serov914d7a82017-02-07 14:33:49 +00004028 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
Alexandre Rames67555f72014-11-18 10:55:16 +00004029 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004030 __ Ldr(temp.W(), StackOperandFrom(receiver));
Artem Serov914d7a82017-02-07 14:33:49 +00004031 {
4032 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4033 // /* HeapReference<Class> */ temp = temp->klass_
4034 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
4035 codegen_->MaybeRecordImplicitNullCheck(invoke);
4036 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004037 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00004038 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004039 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07004040 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Artem Serov914d7a82017-02-07 14:33:49 +00004041 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00004042 }
Artem Serov914d7a82017-02-07 14:33:49 +00004043
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004044 // Instead of simply (possibly) unpoisoning `temp` here, we should
4045 // emit a read barrier for the previous class reference load.
4046 // However this is not required in practice, as this is an
4047 // intermediate/temporary reference and because the current
4048 // concurrent copying collector keeps the from-space memory
4049 // intact/accessible until the end of the marking phase (the
4050 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01004051 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004052 __ Ldr(temp,
4053 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
4054 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004055 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00004056 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004057 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00004058 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07004059 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004060
4061 {
4062 // Ensure the pc position is recorded immediately after the `blr` instruction.
4063 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4064
4065 // lr();
4066 __ blr(lr);
4067 DCHECK(!codegen_->IsLeafMethod());
4068 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4069 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004070}
4071
4072void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004073 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
4074 if (intrinsic.TryDispatch(invoke)) {
4075 return;
4076 }
4077
Alexandre Rames67555f72014-11-18 10:55:16 +00004078 HandleInvoke(invoke);
4079}
4080
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00004081void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004082 // Explicit clinit checks triggered by static invokes must have been pruned by
4083 // art::PrepareForRegisterAllocation.
4084 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004085
Andreas Gampe878d58c2015-01-15 23:24:00 -08004086 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
4087 if (intrinsic.TryDispatch(invoke)) {
4088 return;
4089 }
4090
Alexandre Rames67555f72014-11-18 10:55:16 +00004091 HandleInvoke(invoke);
4092}
4093
Andreas Gampe878d58c2015-01-15 23:24:00 -08004094static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
4095 if (invoke->GetLocations()->Intrinsified()) {
4096 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
4097 intrinsic.Dispatch(invoke);
4098 return true;
4099 }
4100 return false;
4101}
4102
Vladimir Markodc151b22015-10-15 18:02:30 +01004103HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
4104 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004105 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00004106 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01004107 return desired_dispatch_info;
4108}
4109
TatWai Chongd8c052a2016-11-02 16:12:48 +08004110Location CodeGeneratorARM64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
4111 Location temp) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004112 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004113 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4114 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004115 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4116 uint32_t offset =
4117 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004118 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004119 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004120 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004121 }
Vladimir Marko58155012015-08-19 12:49:41 +00004122 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004123 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004124 break;
4125 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
4126 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00004127 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00004128 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004129 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
4130 // Add ADRP with its PC-relative DexCache access patch.
Nicolas Geoffray5d37c152017-01-12 13:25:19 +00004131 const DexFile& dex_file = invoke->GetDexFileForPcRelativeDexCache();
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004132 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004133 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004134 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004135 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004136 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004137 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004138 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004139 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004140 }
Vladimir Marko58155012015-08-19 12:49:41 +00004141 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00004142 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004143 Register reg = XRegisterFrom(temp);
4144 Register method_reg;
4145 if (current_method.IsRegister()) {
4146 method_reg = XRegisterFrom(current_method);
4147 } else {
4148 DCHECK(invoke->GetLocations()->Intrinsified());
4149 DCHECK(!current_method.IsValid());
4150 method_reg = reg;
4151 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
4152 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00004153
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004154 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01004155 __ Ldr(reg.X(),
4156 MemOperand(method_reg.X(),
Andreas Gampe542451c2016-07-26 09:02:02 -07004157 ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00004158 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01004159 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
4160 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00004161 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
4162 break;
4163 }
4164 }
TatWai Chongd8c052a2016-11-02 16:12:48 +08004165 return callee_method;
4166}
4167
4168void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
4169 // All registers are assumed to be correctly set up.
4170 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00004171
4172 switch (invoke->GetCodePtrLocation()) {
4173 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
4174 __ Bl(&frame_entry_label_);
4175 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004176 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4177 // LR = callee_method->entry_point_from_quick_compiled_code_;
4178 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004179 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004180 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004181 {
4182 // To ensure that the pc position is recorded immediately after the `blr` instruction
4183 // BLR must be the last instruction emitted in this function.
4184 // Recording the pc will occur right after returning from this function.
4185 ExactAssemblyScope eas(GetVIXLAssembler(),
4186 kInstructionSize,
4187 CodeBufferCheckScope::kExactSize);
4188 // lr()
4189 __ blr(lr);
4190 }
Vladimir Marko58155012015-08-19 12:49:41 +00004191 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004192 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004193
Andreas Gampe878d58c2015-01-15 23:24:00 -08004194 DCHECK(!IsLeafMethod());
4195}
4196
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004197void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004198 // Use the calling convention instead of the location of the receiver, as
4199 // intrinsics may have put the receiver in a different register. In the intrinsics
4200 // slow path, the arguments have been moved to the right place, so here we are
4201 // guaranteed that the receiver is the first register of the calling convention.
4202 InvokeDexCallingConvention calling_convention;
4203 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004204 Register temp = XRegisterFrom(temp_in);
4205 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4206 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4207 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004208 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004209
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004210 DCHECK(receiver.IsRegister());
Artem Serov914d7a82017-02-07 14:33:49 +00004211
4212 {
4213 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4214 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4215 // /* HeapReference<Class> */ temp = receiver->klass_
4216 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
4217 MaybeRecordImplicitNullCheck(invoke);
4218 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004219 // Instead of simply (possibly) unpoisoning `temp` here, we should
4220 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004221 // intermediate/temporary reference and because the current
4222 // concurrent copying collector keeps the from-space memory
4223 // intact/accessible until the end of the marking phase (the
4224 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004225 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
4226 // temp = temp->GetMethodAt(method_offset);
4227 __ Ldr(temp, MemOperand(temp, method_offset));
4228 // lr = temp->GetEntryPoint();
4229 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
Artem Serov914d7a82017-02-07 14:33:49 +00004230 {
4231 // To ensure that the pc position is recorded immediately after the `blr` instruction
4232 // BLR should be the last instruction emitted in this function.
4233 // Recording the pc will occur right after returning from this function.
4234 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4235 // lr();
4236 __ blr(lr);
4237 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004238}
4239
Orion Hodsonac141392017-01-13 11:53:47 +00004240void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4241 HandleInvoke(invoke);
4242}
4243
4244void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4245 codegen_->GenerateInvokePolymorphicCall(invoke);
4246}
4247
Scott Wakeling97c72b72016-06-24 16:19:36 +01004248vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
4249 const DexFile& dex_file,
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004250 dex::StringIndex string_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004251 vixl::aarch64::Label* adrp_label) {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004252 return
4253 NewPcRelativePatch(dex_file, string_index.index_, adrp_label, &pc_relative_string_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004254}
4255
Scott Wakeling97c72b72016-06-24 16:19:36 +01004256vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
4257 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004258 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004259 vixl::aarch64::Label* adrp_label) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08004260 return NewPcRelativePatch(dex_file, type_index.index_, adrp_label, &pc_relative_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004261}
4262
Vladimir Marko1998cd02017-01-13 13:02:58 +00004263vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4264 const DexFile& dex_file,
4265 dex::TypeIndex type_index,
4266 vixl::aarch64::Label* adrp_label) {
4267 return NewPcRelativePatch(dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
4268}
4269
Scott Wakeling97c72b72016-06-24 16:19:36 +01004270vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
4271 const DexFile& dex_file,
4272 uint32_t element_offset,
4273 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004274 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
4275}
4276
Scott Wakeling97c72b72016-06-24 16:19:36 +01004277vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
4278 const DexFile& dex_file,
4279 uint32_t offset_or_index,
4280 vixl::aarch64::Label* adrp_label,
4281 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004282 // Add a patch entry and return the label.
4283 patches->emplace_back(dex_file, offset_or_index);
4284 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004285 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004286 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4287 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4288 return label;
4289}
4290
Scott Wakeling97c72b72016-06-24 16:19:36 +01004291vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004292 const DexFile& dex_file, dex::StringIndex string_index) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004293 return boot_image_string_patches_.GetOrCreate(
4294 StringReference(&dex_file, string_index),
4295 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4296}
4297
Scott Wakeling97c72b72016-06-24 16:19:36 +01004298vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Andreas Gampea5b09a62016-11-17 15:21:22 -08004299 const DexFile& dex_file, dex::TypeIndex type_index) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004300 return boot_image_type_patches_.GetOrCreate(
4301 TypeReference(&dex_file, type_index),
4302 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4303}
4304
Scott Wakeling97c72b72016-06-24 16:19:36 +01004305vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4306 uint64_t address) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004307 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
4308 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
4309 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
4310}
4311
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004312vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004313 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
4314 jit_string_roots_.Overwrite(StringReference(&dex_file, string_index),
4315 reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004316 return jit_string_patches_.GetOrCreate(
4317 StringReference(&dex_file, string_index),
4318 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4319}
4320
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004321vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004322 const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
4323 jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
4324 reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004325 return jit_class_patches_.GetOrCreate(
4326 TypeReference(&dex_file, type_index),
4327 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4328}
4329
Vladimir Markoaad75c62016-10-03 08:46:48 +00004330void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4331 vixl::aarch64::Register reg) {
4332 DCHECK(reg.IsX());
4333 SingleEmissionCheckScope guard(GetVIXLAssembler());
4334 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004335 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004336}
4337
4338void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4339 vixl::aarch64::Register out,
4340 vixl::aarch64::Register base) {
4341 DCHECK(out.IsX());
4342 DCHECK(base.IsX());
4343 SingleEmissionCheckScope guard(GetVIXLAssembler());
4344 __ Bind(fixup_label);
4345 __ add(out, base, Operand(/* offset placeholder */ 0));
4346}
4347
4348void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4349 vixl::aarch64::Register out,
4350 vixl::aarch64::Register base) {
4351 DCHECK(base.IsX());
4352 SingleEmissionCheckScope guard(GetVIXLAssembler());
4353 __ Bind(fixup_label);
4354 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4355}
4356
4357template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
4358inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4359 const ArenaDeque<PcRelativePatchInfo>& infos,
4360 ArenaVector<LinkerPatch>* linker_patches) {
4361 for (const PcRelativePatchInfo& info : infos) {
4362 linker_patches->push_back(Factory(info.label.GetLocation(),
4363 &info.target_dex_file,
4364 info.pc_insn_label->GetLocation(),
4365 info.offset_or_index));
4366 }
4367}
4368
Vladimir Marko58155012015-08-19 12:49:41 +00004369void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
4370 DCHECK(linker_patches->empty());
4371 size_t size =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004372 pc_relative_dex_cache_patches_.size() +
4373 boot_image_string_patches_.size() +
4374 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004375 boot_image_type_patches_.size() +
4376 pc_relative_type_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00004377 type_bss_entry_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004378 boot_image_address_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004379 linker_patches->reserve(size);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004380 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004381 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00004382 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004383 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004384 info.offset_or_index));
4385 }
4386 for (const auto& entry : boot_image_string_patches_) {
4387 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01004388 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4389 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004390 target_string.dex_file,
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004391 target_string.string_index.index_));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004392 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004393 if (!GetCompilerOptions().IsBootImage()) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00004394 DCHECK(pc_relative_type_patches_.empty());
Vladimir Markoaad75c62016-10-03 08:46:48 +00004395 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
4396 linker_patches);
4397 } else {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004398 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
4399 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004400 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
4401 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004402 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004403 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
4404 linker_patches);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004405 for (const auto& entry : boot_image_type_patches_) {
4406 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01004407 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4408 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004409 target_type.dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004410 target_type.type_index.index_));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004411 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004412 for (const auto& entry : boot_image_address_patches_) {
4413 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
Scott Wakeling97c72b72016-06-24 16:19:36 +01004414 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4415 linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset()));
Vladimir Marko58155012015-08-19 12:49:41 +00004416 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004417 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004418}
4419
Scott Wakeling97c72b72016-06-24 16:19:36 +01004420vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004421 Uint32ToLiteralMap* map) {
4422 return map->GetOrCreate(
4423 value,
4424 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4425}
4426
Scott Wakeling97c72b72016-06-24 16:19:36 +01004427vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004428 return uint64_literals_.GetOrCreate(
4429 value,
4430 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004431}
4432
Scott Wakeling97c72b72016-06-24 16:19:36 +01004433vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00004434 MethodReference target_method,
4435 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004436 return map->GetOrCreate(
4437 target_method,
4438 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00004439}
4440
Andreas Gampe878d58c2015-01-15 23:24:00 -08004441void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004442 // Explicit clinit checks triggered by static invokes must have been pruned by
4443 // art::PrepareForRegisterAllocation.
4444 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004445
Andreas Gampe878d58c2015-01-15 23:24:00 -08004446 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4447 return;
4448 }
4449
Artem Serov914d7a82017-02-07 14:33:49 +00004450 // Ensure that between the BLR (emitted by GenerateStaticOrDirectCall) and RecordPcInfo there
4451 // are no pools emitted.
4452 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004453 LocationSummary* locations = invoke->GetLocations();
4454 codegen_->GenerateStaticOrDirectCall(
4455 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00004456 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01004457}
4458
4459void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004460 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4461 return;
4462 }
4463
Artem Serov914d7a82017-02-07 14:33:49 +00004464 // Ensure that between the BLR (emitted by GenerateVirtualCall) and RecordPcInfo there
4465 // are no pools emitted.
4466 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004467 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004468 DCHECK(!codegen_->IsLeafMethod());
4469 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4470}
4471
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004472HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4473 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004474 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004475 case HLoadClass::LoadKind::kInvalid:
4476 LOG(FATAL) << "UNREACHABLE";
4477 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004478 case HLoadClass::LoadKind::kReferrersClass:
4479 break;
4480 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4481 DCHECK(!GetCompilerOptions().GetCompilePic());
4482 break;
4483 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
4484 DCHECK(GetCompilerOptions().GetCompilePic());
4485 break;
4486 case HLoadClass::LoadKind::kBootImageAddress:
4487 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004488 case HLoadClass::LoadKind::kBssEntry:
4489 DCHECK(!Runtime::Current()->UseJitCompilation());
4490 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004491 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004492 DCHECK(Runtime::Current()->UseJitCompilation());
4493 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004494 case HLoadClass::LoadKind::kDexCacheViaMethod:
4495 break;
4496 }
4497 return desired_class_load_kind;
4498}
4499
Alexandre Rames67555f72014-11-18 10:55:16 +00004500void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00004501 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4502 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004503 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00004504 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004505 cls,
4506 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00004507 LocationFrom(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004508 return;
4509 }
Vladimir Marko41559982017-01-06 14:04:23 +00004510 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004511
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004512 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
4513 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004514 ? LocationSummary::kCallOnSlowPath
4515 : LocationSummary::kNoCall;
4516 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004517 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004518 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004519 }
4520
Vladimir Marko41559982017-01-06 14:04:23 +00004521 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004522 locations->SetInAt(0, Location::RequiresRegister());
4523 }
4524 locations->SetOut(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004525}
4526
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004527// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4528// move.
4529void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00004530 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4531 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
4532 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01004533 return;
4534 }
Vladimir Marko41559982017-01-06 14:04:23 +00004535 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01004536
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004537 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004538 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004539
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004540 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
4541 ? kWithoutReadBarrier
4542 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004543 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00004544 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004545 case HLoadClass::LoadKind::kReferrersClass: {
4546 DCHECK(!cls->CanCallRuntime());
4547 DCHECK(!cls->MustGenerateClinitCheck());
4548 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4549 Register current_method = InputRegisterAt(cls, 0);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004550 GenerateGcRootFieldLoad(cls,
4551 out_loc,
4552 current_method,
4553 ArtMethod::DeclaringClassOffset().Int32Value(),
Roland Levillain00468f32016-10-27 18:02:48 +01004554 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004555 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004556 break;
4557 }
4558 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004559 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004560 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4561 cls->GetTypeIndex()));
4562 break;
4563 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004564 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004565 // Add ADRP with its PC-relative type patch.
4566 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08004567 dex::TypeIndex type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004568 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004569 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004570 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004571 vixl::aarch64::Label* add_label =
4572 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004573 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004574 break;
4575 }
4576 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004577 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004578 uint32_t address = dchecked_integral_cast<uint32_t>(
4579 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
4580 DCHECK_NE(address, 0u);
4581 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004582 break;
4583 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004584 case HLoadClass::LoadKind::kBssEntry: {
4585 // Add ADRP with its PC-relative Class .bss entry patch.
4586 const DexFile& dex_file = cls->GetDexFile();
4587 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Marko1998cd02017-01-13 13:02:58 +00004588 vixl::aarch64::Label* adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004589 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
4590 // Add LDR with its PC-relative Class patch.
4591 vixl::aarch64::Label* ldr_label =
Vladimir Marko1998cd02017-01-13 13:02:58 +00004592 codegen_->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004593 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4594 GenerateGcRootFieldLoad(cls,
4595 cls->GetLocations()->Out(),
4596 out.X(),
4597 /* placeholder */ 0u,
4598 ldr_label,
4599 kCompilerReadBarrierOption);
4600 generate_null_check = true;
4601 break;
4602 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004603 case HLoadClass::LoadKind::kJitTableAddress: {
4604 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
4605 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004606 cls->GetClass()));
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004607 GenerateGcRootFieldLoad(cls,
4608 out_loc,
4609 out.X(),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004610 /* offset */ 0,
Roland Levillain00468f32016-10-27 18:02:48 +01004611 /* fixup_label */ nullptr,
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004612 kCompilerReadBarrierOption);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004613 break;
4614 }
Vladimir Marko41559982017-01-06 14:04:23 +00004615 case HLoadClass::LoadKind::kDexCacheViaMethod:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004616 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00004617 LOG(FATAL) << "UNREACHABLE";
4618 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004619 }
4620
4621 if (generate_null_check || cls->MustGenerateClinitCheck()) {
4622 DCHECK(cls->CanCallRuntime());
4623 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
4624 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
4625 codegen_->AddSlowPath(slow_path);
4626 if (generate_null_check) {
4627 __ Cbz(out, slow_path->GetEntryLabel());
4628 }
4629 if (cls->MustGenerateClinitCheck()) {
4630 GenerateClassInitializationCheck(slow_path, out);
4631 } else {
4632 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004633 }
4634 }
4635}
4636
David Brazdilcb1c0552015-08-04 16:22:25 +01004637static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004638 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004639}
4640
Alexandre Rames67555f72014-11-18 10:55:16 +00004641void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4642 LocationSummary* locations =
4643 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4644 locations->SetOut(Location::RequiresRegister());
4645}
4646
4647void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004648 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4649}
4650
4651void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4652 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4653}
4654
4655void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4656 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004657}
4658
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004659HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4660 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004661 switch (desired_string_load_kind) {
4662 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4663 DCHECK(!GetCompilerOptions().GetCompilePic());
4664 break;
4665 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4666 DCHECK(GetCompilerOptions().GetCompilePic());
4667 break;
4668 case HLoadString::LoadKind::kBootImageAddress:
4669 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00004670 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01004671 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004672 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004673 case HLoadString::LoadKind::kJitTableAddress:
4674 DCHECK(Runtime::Current()->UseJitCompilation());
4675 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004676 case HLoadString::LoadKind::kDexCacheViaMethod:
4677 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004678 }
4679 return desired_string_load_kind;
4680}
4681
Alexandre Rames67555f72014-11-18 10:55:16 +00004682void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004683 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004684 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004685 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004686 InvokeRuntimeCallingConvention calling_convention;
4687 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
4688 } else {
4689 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004690 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
4691 if (!kUseReadBarrier || kUseBakerReadBarrier) {
4692 // Rely on the pResolveString and/or marking to save everything, including temps.
4693 RegisterSet caller_saves = RegisterSet::Empty();
4694 InvokeRuntimeCallingConvention calling_convention;
4695 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
4696 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
4697 RegisterFrom(calling_convention.GetReturnLocation(Primitive::kPrimNot),
4698 Primitive::kPrimNot).GetCode());
4699 locations->SetCustomSlowPathCallerSaves(caller_saves);
4700 } else {
4701 // For non-Baker read barrier we have a temp-clobbering call.
4702 }
4703 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004704 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004705}
4706
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004707// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4708// move.
4709void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00004710 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004711 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004712
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004713 switch (load->GetLoadKind()) {
4714 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004715 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4716 load->GetStringIndex()));
4717 return; // No dex cache slow path.
4718 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004719 // Add ADRP with its PC-relative String patch.
4720 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004721 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004722 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Scott Wakeling97c72b72016-06-24 16:19:36 +01004723 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004724 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004725 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004726 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004727 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004728 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004729 return; // No dex cache slow path.
4730 }
4731 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004732 uint32_t address = dchecked_integral_cast<uint32_t>(
4733 reinterpret_cast<uintptr_t>(load->GetString().Get()));
4734 DCHECK_NE(address, 0u);
4735 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004736 return; // No dex cache slow path.
4737 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004738 case HLoadString::LoadKind::kBssEntry: {
4739 // Add ADRP with its PC-relative String .bss entry patch.
4740 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004741 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004742 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004743 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4744 Register temp = temps.AcquireX();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004745 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004746 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004747 // Add LDR with its PC-relative String patch.
4748 vixl::aarch64::Label* ldr_label =
4749 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004750 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoaad75c62016-10-03 08:46:48 +00004751 GenerateGcRootFieldLoad(load,
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004752 out_loc,
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004753 temp,
Roland Levillain00468f32016-10-27 18:02:48 +01004754 /* offset placeholder */ 0u,
4755 ldr_label,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004756 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004757 SlowPathCodeARM64* slow_path =
4758 new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load, temp, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004759 codegen_->AddSlowPath(slow_path);
4760 __ Cbz(out.X(), slow_path->GetEntryLabel());
4761 __ Bind(slow_path->GetExitLabel());
4762 return;
4763 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004764 case HLoadString::LoadKind::kJitTableAddress: {
4765 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004766 load->GetStringIndex(),
4767 load->GetString()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004768 GenerateGcRootFieldLoad(load,
4769 out_loc,
4770 out.X(),
4771 /* offset */ 0,
4772 /* fixup_label */ nullptr,
4773 kCompilerReadBarrierOption);
4774 return;
4775 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004776 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004777 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004778 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004779
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004780 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004781 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004782 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004783 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004784 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
4785 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004786}
4787
Alexandre Rames5319def2014-10-23 10:03:10 +01004788void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4789 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4790 locations->SetOut(Location::ConstantLocation(constant));
4791}
4792
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004793void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004794 // Will be generated at use site.
4795}
4796
Alexandre Rames67555f72014-11-18 10:55:16 +00004797void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4798 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004799 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004800 InvokeRuntimeCallingConvention calling_convention;
4801 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4802}
4803
4804void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004805 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004806 instruction,
4807 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004808 if (instruction->IsEnter()) {
4809 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4810 } else {
4811 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4812 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004813}
4814
Alexandre Rames42d641b2014-10-27 14:00:51 +00004815void LocationsBuilderARM64::VisitMul(HMul* mul) {
4816 LocationSummary* locations =
4817 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4818 switch (mul->GetResultType()) {
4819 case Primitive::kPrimInt:
4820 case Primitive::kPrimLong:
4821 locations->SetInAt(0, Location::RequiresRegister());
4822 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004823 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004824 break;
4825
4826 case Primitive::kPrimFloat:
4827 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004828 locations->SetInAt(0, Location::RequiresFpuRegister());
4829 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004830 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004831 break;
4832
4833 default:
4834 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4835 }
4836}
4837
4838void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4839 switch (mul->GetResultType()) {
4840 case Primitive::kPrimInt:
4841 case Primitive::kPrimLong:
4842 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4843 break;
4844
4845 case Primitive::kPrimFloat:
4846 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004847 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004848 break;
4849
4850 default:
4851 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4852 }
4853}
4854
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004855void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4856 LocationSummary* locations =
4857 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4858 switch (neg->GetResultType()) {
4859 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004860 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004861 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004862 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004863 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004864
4865 case Primitive::kPrimFloat:
4866 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004867 locations->SetInAt(0, Location::RequiresFpuRegister());
4868 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004869 break;
4870
4871 default:
4872 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4873 }
4874}
4875
4876void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4877 switch (neg->GetResultType()) {
4878 case Primitive::kPrimInt:
4879 case Primitive::kPrimLong:
4880 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4881 break;
4882
4883 case Primitive::kPrimFloat:
4884 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004885 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004886 break;
4887
4888 default:
4889 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4890 }
4891}
4892
4893void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4894 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004895 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004896 InvokeRuntimeCallingConvention calling_convention;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004897 locations->SetOut(LocationFrom(x0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004898 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4899 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004900}
4901
4902void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004903 // Note: if heap poisoning is enabled, the entry point takes cares
4904 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004905 QuickEntrypointEnum entrypoint =
4906 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
4907 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004908 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004909}
4910
Alexandre Rames5319def2014-10-23 10:03:10 +01004911void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4912 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004913 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004914 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004915 if (instruction->IsStringAlloc()) {
4916 locations->AddTemp(LocationFrom(kArtMethodRegister));
4917 } else {
4918 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00004919 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004920 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4921}
4922
4923void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004924 // Note: if heap poisoning is enabled, the entry point takes cares
4925 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004926 if (instruction->IsStringAlloc()) {
4927 // String is allocated through StringFactory. Call NewEmptyString entry point.
4928 Location temp = instruction->GetLocations()->GetTemp(0);
Andreas Gampe542451c2016-07-26 09:02:02 -07004929 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004930 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4931 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004932
4933 {
4934 // Ensure the pc position is recorded immediately after the `blr` instruction.
4935 ExactAssemblyScope eas(GetVIXLAssembler(),
4936 kInstructionSize,
4937 CodeBufferCheckScope::kExactSize);
4938 __ blr(lr);
4939 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4940 }
David Brazdil6de19382016-01-08 17:37:10 +00004941 } else {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004942 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00004943 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00004944 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004945}
4946
4947void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4948 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004949 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004950 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004951}
4952
4953void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004954 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004955 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01004956 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01004957 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004958 break;
4959
4960 default:
4961 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
4962 }
4963}
4964
David Brazdil66d126e2015-04-03 16:02:44 +01004965void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
4966 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
4967 locations->SetInAt(0, Location::RequiresRegister());
4968 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4969}
4970
4971void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004972 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01004973}
4974
Alexandre Rames5319def2014-10-23 10:03:10 +01004975void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004976 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
4977 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01004978}
4979
Calin Juravle2ae48182016-03-16 14:05:09 +00004980void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
4981 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00004982 return;
4983 }
Artem Serov914d7a82017-02-07 14:33:49 +00004984 {
4985 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4986 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4987 Location obj = instruction->GetLocations()->InAt(0);
4988 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
4989 RecordPcInfo(instruction, instruction->GetDexPc());
4990 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00004991}
4992
Calin Juravle2ae48182016-03-16 14:05:09 +00004993void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004994 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00004995 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01004996
4997 LocationSummary* locations = instruction->GetLocations();
4998 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00004999
5000 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01005001}
5002
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005003void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005004 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005005}
5006
Alexandre Rames67555f72014-11-18 10:55:16 +00005007void LocationsBuilderARM64::VisitOr(HOr* instruction) {
5008 HandleBinaryOp(instruction);
5009}
5010
5011void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
5012 HandleBinaryOp(instruction);
5013}
5014
Alexandre Rames3e69f162014-12-10 10:36:50 +00005015void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5016 LOG(FATAL) << "Unreachable";
5017}
5018
5019void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
5020 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5021}
5022
Alexandre Rames5319def2014-10-23 10:03:10 +01005023void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
5024 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5025 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5026 if (location.IsStackSlot()) {
5027 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5028 } else if (location.IsDoubleStackSlot()) {
5029 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5030 }
5031 locations->SetOut(location);
5032}
5033
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005034void InstructionCodeGeneratorARM64::VisitParameterValue(
5035 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005036 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005037}
5038
5039void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
5040 LocationSummary* locations =
5041 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01005042 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005043}
5044
5045void InstructionCodeGeneratorARM64::VisitCurrentMethod(
5046 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5047 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01005048}
5049
5050void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
5051 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005052 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005053 locations->SetInAt(i, Location::Any());
5054 }
5055 locations->SetOut(Location::Any());
5056}
5057
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005058void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005059 LOG(FATAL) << "Unreachable";
5060}
5061
Serban Constantinescu02164b32014-11-13 14:05:07 +00005062void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005063 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00005064 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005065 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
5066 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005067 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
5068
5069 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005070 case Primitive::kPrimInt:
5071 case Primitive::kPrimLong:
5072 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08005073 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00005074 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5075 break;
5076
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005077 case Primitive::kPrimFloat:
5078 case Primitive::kPrimDouble: {
5079 InvokeRuntimeCallingConvention calling_convention;
5080 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
5081 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
5082 locations->SetOut(calling_convention.GetReturnLocation(type));
5083
5084 break;
5085 }
5086
Serban Constantinescu02164b32014-11-13 14:05:07 +00005087 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005088 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00005089 }
5090}
5091
5092void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
5093 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005094
Serban Constantinescu02164b32014-11-13 14:05:07 +00005095 switch (type) {
5096 case Primitive::kPrimInt:
5097 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08005098 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005099 break;
5100 }
5101
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005102 case Primitive::kPrimFloat:
5103 case Primitive::kPrimDouble: {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005104 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
5105 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005106 if (type == Primitive::kPrimFloat) {
5107 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5108 } else {
5109 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5110 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005111 break;
5112 }
5113
Serban Constantinescu02164b32014-11-13 14:05:07 +00005114 default:
5115 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005116 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00005117 }
5118}
5119
Calin Juravle27df7582015-04-17 19:12:31 +01005120void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5121 memory_barrier->SetLocations(nullptr);
5122}
5123
5124void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005125 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01005126}
5127
Alexandre Rames5319def2014-10-23 10:03:10 +01005128void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
5129 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5130 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005131 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01005132}
5133
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005134void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005135 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005136}
5137
5138void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
5139 instruction->SetLocations(nullptr);
5140}
5141
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005142void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005143 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005144}
5145
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005146void LocationsBuilderARM64::VisitRor(HRor* ror) {
5147 HandleBinaryOp(ror);
5148}
5149
5150void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
5151 HandleBinaryOp(ror);
5152}
5153
Serban Constantinescu02164b32014-11-13 14:05:07 +00005154void LocationsBuilderARM64::VisitShl(HShl* shl) {
5155 HandleShift(shl);
5156}
5157
5158void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5159 HandleShift(shl);
5160}
5161
5162void LocationsBuilderARM64::VisitShr(HShr* shr) {
5163 HandleShift(shr);
5164}
5165
5166void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5167 HandleShift(shr);
5168}
5169
Alexandre Rames5319def2014-10-23 10:03:10 +01005170void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005171 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005172}
5173
5174void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005175 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005176}
5177
Alexandre Rames67555f72014-11-18 10:55:16 +00005178void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005179 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00005180}
5181
5182void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005183 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005184}
5185
5186void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005187 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005188}
5189
Alexandre Rames67555f72014-11-18 10:55:16 +00005190void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005191 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005192}
5193
Calin Juravlee460d1d2015-09-29 04:52:17 +01005194void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5195 HUnresolvedInstanceFieldGet* instruction) {
5196 FieldAccessCallingConventionARM64 calling_convention;
5197 codegen_->CreateUnresolvedFieldLocationSummary(
5198 instruction, instruction->GetFieldType(), calling_convention);
5199}
5200
5201void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5202 HUnresolvedInstanceFieldGet* instruction) {
5203 FieldAccessCallingConventionARM64 calling_convention;
5204 codegen_->GenerateUnresolvedFieldAccess(instruction,
5205 instruction->GetFieldType(),
5206 instruction->GetFieldIndex(),
5207 instruction->GetDexPc(),
5208 calling_convention);
5209}
5210
5211void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5212 HUnresolvedInstanceFieldSet* instruction) {
5213 FieldAccessCallingConventionARM64 calling_convention;
5214 codegen_->CreateUnresolvedFieldLocationSummary(
5215 instruction, instruction->GetFieldType(), calling_convention);
5216}
5217
5218void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5219 HUnresolvedInstanceFieldSet* instruction) {
5220 FieldAccessCallingConventionARM64 calling_convention;
5221 codegen_->GenerateUnresolvedFieldAccess(instruction,
5222 instruction->GetFieldType(),
5223 instruction->GetFieldIndex(),
5224 instruction->GetDexPc(),
5225 calling_convention);
5226}
5227
5228void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5229 HUnresolvedStaticFieldGet* instruction) {
5230 FieldAccessCallingConventionARM64 calling_convention;
5231 codegen_->CreateUnresolvedFieldLocationSummary(
5232 instruction, instruction->GetFieldType(), calling_convention);
5233}
5234
5235void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5236 HUnresolvedStaticFieldGet* instruction) {
5237 FieldAccessCallingConventionARM64 calling_convention;
5238 codegen_->GenerateUnresolvedFieldAccess(instruction,
5239 instruction->GetFieldType(),
5240 instruction->GetFieldIndex(),
5241 instruction->GetDexPc(),
5242 calling_convention);
5243}
5244
5245void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5246 HUnresolvedStaticFieldSet* instruction) {
5247 FieldAccessCallingConventionARM64 calling_convention;
5248 codegen_->CreateUnresolvedFieldLocationSummary(
5249 instruction, instruction->GetFieldType(), calling_convention);
5250}
5251
5252void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5253 HUnresolvedStaticFieldSet* instruction) {
5254 FieldAccessCallingConventionARM64 calling_convention;
5255 codegen_->GenerateUnresolvedFieldAccess(instruction,
5256 instruction->GetFieldType(),
5257 instruction->GetFieldIndex(),
5258 instruction->GetDexPc(),
5259 calling_convention);
5260}
5261
Alexandre Rames5319def2014-10-23 10:03:10 +01005262void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005263 LocationSummary* locations =
5264 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005265 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexandre Rames5319def2014-10-23 10:03:10 +01005266}
5267
5268void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005269 HBasicBlock* block = instruction->GetBlock();
5270 if (block->GetLoopInformation() != nullptr) {
5271 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5272 // The back edge will generate the suspend check.
5273 return;
5274 }
5275 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5276 // The goto will generate the suspend check.
5277 return;
5278 }
5279 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01005280}
5281
Alexandre Rames67555f72014-11-18 10:55:16 +00005282void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
5283 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005284 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005285 InvokeRuntimeCallingConvention calling_convention;
5286 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5287}
5288
5289void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005290 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08005291 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005292}
5293
5294void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
5295 LocationSummary* locations =
5296 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
5297 Primitive::Type input_type = conversion->GetInputType();
5298 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00005299 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00005300 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
5301 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
5302 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5303 }
5304
Alexandre Rames542361f2015-01-29 16:57:31 +00005305 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005306 locations->SetInAt(0, Location::RequiresFpuRegister());
5307 } else {
5308 locations->SetInAt(0, Location::RequiresRegister());
5309 }
5310
Alexandre Rames542361f2015-01-29 16:57:31 +00005311 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005312 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5313 } else {
5314 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5315 }
5316}
5317
5318void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
5319 Primitive::Type result_type = conversion->GetResultType();
5320 Primitive::Type input_type = conversion->GetInputType();
5321
5322 DCHECK_NE(input_type, result_type);
5323
Alexandre Rames542361f2015-01-29 16:57:31 +00005324 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005325 int result_size = Primitive::ComponentSize(result_type);
5326 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00005327 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005328 Register output = OutputRegister(conversion);
5329 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00005330 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01005331 // 'int' values are used directly as W registers, discarding the top
5332 // bits, so we don't need to sign-extend and can just perform a move.
5333 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
5334 // top 32 bits of the target register. We theoretically could leave those
5335 // bits unchanged, but we would have to make sure that no code uses a
5336 // 32bit input value as a 64bit value assuming that the top 32 bits are
5337 // zero.
5338 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00005339 } else if (result_type == Primitive::kPrimChar ||
5340 (input_type == Primitive::kPrimChar && input_size < result_size)) {
5341 __ Ubfx(output,
5342 output.IsX() ? source.X() : source.W(),
5343 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005344 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00005345 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005346 }
Alexandre Rames542361f2015-01-29 16:57:31 +00005347 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005348 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00005349 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005350 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
5351 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00005352 } else if (Primitive::IsFloatingPointType(result_type) &&
5353 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005354 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
5355 } else {
5356 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
5357 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005358 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00005359}
Alexandre Rames67555f72014-11-18 10:55:16 +00005360
Serban Constantinescu02164b32014-11-13 14:05:07 +00005361void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
5362 HandleShift(ushr);
5363}
5364
5365void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
5366 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00005367}
5368
5369void LocationsBuilderARM64::VisitXor(HXor* instruction) {
5370 HandleBinaryOp(instruction);
5371}
5372
5373void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
5374 HandleBinaryOp(instruction);
5375}
5376
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005377void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005378 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005379 LOG(FATAL) << "Unreachable";
5380}
5381
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005382void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005383 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005384 LOG(FATAL) << "Unreachable";
5385}
5386
Mark Mendellfe57faa2015-09-18 09:26:15 -04005387// Simple implementation of packed switch - generate cascaded compare/jumps.
5388void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5389 LocationSummary* locations =
5390 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
5391 locations->SetInAt(0, Location::RequiresRegister());
5392}
5393
5394void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5395 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08005396 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04005397 Register value_reg = InputRegisterAt(switch_instr, 0);
5398 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
5399
Zheng Xu3927c8b2015-11-18 17:46:25 +08005400 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005401 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08005402 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
5403 // make sure we don't emit it if the target may run out of range.
5404 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
5405 // ranges and emit the tables only as required.
5406 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04005407
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005408 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08005409 // Current instruction id is an upper bound of the number of HIRs in the graph.
5410 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
5411 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005412 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5413 Register temp = temps.AcquireW();
5414 __ Subs(temp, value_reg, Operand(lower_bound));
5415
Zheng Xu3927c8b2015-11-18 17:46:25 +08005416 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005417 // Jump to successors[0] if value == lower_bound.
5418 __ B(eq, codegen_->GetLabelOf(successors[0]));
5419 int32_t last_index = 0;
5420 for (; num_entries - last_index > 2; last_index += 2) {
5421 __ Subs(temp, temp, Operand(2));
5422 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
5423 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
5424 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
5425 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
5426 }
5427 if (num_entries - last_index == 2) {
5428 // The last missing case_value.
5429 __ Cmp(temp, Operand(1));
5430 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08005431 }
5432
5433 // And the default for any other value.
5434 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
5435 __ B(codegen_->GetLabelOf(default_block));
5436 }
5437 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01005438 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08005439
5440 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5441
5442 // Below instructions should use at most one blocked register. Since there are two blocked
5443 // registers, we are free to block one.
5444 Register temp_w = temps.AcquireW();
5445 Register index;
5446 // Remove the bias.
5447 if (lower_bound != 0) {
5448 index = temp_w;
5449 __ Sub(index, value_reg, Operand(lower_bound));
5450 } else {
5451 index = value_reg;
5452 }
5453
5454 // Jump to default block if index is out of the range.
5455 __ Cmp(index, Operand(num_entries));
5456 __ B(hs, codegen_->GetLabelOf(default_block));
5457
5458 // In current VIXL implementation, it won't require any blocked registers to encode the
5459 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
5460 // register pressure.
5461 Register table_base = temps.AcquireX();
5462 // Load jump offset from the table.
5463 __ Adr(table_base, jump_table->GetTableStartLabel());
5464 Register jump_offset = temp_w;
5465 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
5466
5467 // Jump to target block by branching to table_base(pc related) + offset.
5468 Register target_address = table_base;
5469 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
5470 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005471 }
5472}
5473
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005474void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
5475 HInstruction* instruction,
5476 Location out,
5477 uint32_t offset,
5478 Location maybe_temp,
5479 ReadBarrierOption read_barrier_option) {
Roland Levillain44015862016-01-22 11:47:17 +00005480 Primitive::Type type = Primitive::kPrimNot;
5481 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005482 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005483 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005484 Register temp_reg = RegisterFrom(maybe_temp, type);
5485 if (kUseBakerReadBarrier) {
5486 // Load with fast path based Baker's read barrier.
5487 // /* HeapReference<Object> */ out = *(out + offset)
5488 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5489 out,
5490 out_reg,
5491 offset,
5492 temp_reg,
5493 /* needs_null_check */ false,
5494 /* use_load_acquire */ false);
5495 } else {
5496 // Load with slow path based read barrier.
5497 // Save the value of `out` into `maybe_temp` before overwriting it
5498 // in the following move operation, as we will need it for the
5499 // read barrier below.
5500 __ Mov(temp_reg, out_reg);
5501 // /* HeapReference<Object> */ out = *(out + offset)
5502 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5503 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5504 }
5505 } else {
5506 // Plain load with no read barrier.
5507 // /* HeapReference<Object> */ out = *(out + offset)
5508 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5509 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5510 }
5511}
5512
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005513void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
5514 HInstruction* instruction,
5515 Location out,
5516 Location obj,
5517 uint32_t offset,
5518 Location maybe_temp,
5519 ReadBarrierOption read_barrier_option) {
Roland Levillain44015862016-01-22 11:47:17 +00005520 Primitive::Type type = Primitive::kPrimNot;
5521 Register out_reg = RegisterFrom(out, type);
5522 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005523 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005524 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005525 if (kUseBakerReadBarrier) {
5526 // Load with fast path based Baker's read barrier.
5527 Register temp_reg = RegisterFrom(maybe_temp, type);
5528 // /* HeapReference<Object> */ out = *(obj + offset)
5529 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5530 out,
5531 obj_reg,
5532 offset,
5533 temp_reg,
5534 /* needs_null_check */ false,
5535 /* use_load_acquire */ false);
5536 } else {
5537 // Load with slow path based read barrier.
5538 // /* HeapReference<Object> */ out = *(obj + offset)
5539 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5540 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5541 }
5542 } else {
5543 // Plain load with no read barrier.
5544 // /* HeapReference<Object> */ out = *(obj + offset)
5545 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5546 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5547 }
5548}
5549
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005550void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(
5551 HInstruction* instruction,
5552 Location root,
5553 Register obj,
5554 uint32_t offset,
5555 vixl::aarch64::Label* fixup_label,
5556 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005557 DCHECK(fixup_label == nullptr || offset == 0u);
Roland Levillain44015862016-01-22 11:47:17 +00005558 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005559 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005560 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005561 if (kUseBakerReadBarrier) {
5562 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5563 // Baker's read barrier are used:
5564 //
5565 // root = obj.field;
Mathieu Chartierfe814e82016-11-09 14:32:49 -08005566 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5567 // if (temp != null) {
5568 // root = temp(root)
Roland Levillain44015862016-01-22 11:47:17 +00005569 // }
5570
5571 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005572 if (fixup_label == nullptr) {
5573 __ Ldr(root_reg, MemOperand(obj, offset));
5574 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005575 codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005576 }
Roland Levillain44015862016-01-22 11:47:17 +00005577 static_assert(
5578 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5579 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5580 "have different sizes.");
5581 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5582 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5583 "have different sizes.");
5584
Mathieu Chartierfe814e82016-11-09 14:32:49 -08005585 Register temp = lr;
Roland Levillain44015862016-01-22 11:47:17 +00005586
Mathieu Chartierfe814e82016-11-09 14:32:49 -08005587 // Slow path marking the GC root `root`. The entrypoint will alrady be loaded in temp.
5588 SlowPathCodeARM64* slow_path =
5589 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction,
5590 root,
5591 LocationFrom(temp));
5592 codegen_->AddSlowPath(slow_path);
5593 const int32_t entry_point_offset =
5594 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(root.reg());
5595 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5596 // Loading the entrypoint does not require a load acquire since it is only changed when
5597 // threads are suspended or running a checkpoint.
5598 __ Ldr(temp, MemOperand(tr, entry_point_offset));
5599 // The entrypoint is null when the GC is not marking, this prevents one load compared to
5600 // checking GetIsGcMarking.
Roland Levillain44015862016-01-22 11:47:17 +00005601 __ Cbnz(temp, slow_path->GetEntryLabel());
5602 __ Bind(slow_path->GetExitLabel());
5603 } else {
5604 // GC root loaded through a slow path for read barriers other
5605 // than Baker's.
5606 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005607 if (fixup_label == nullptr) {
5608 __ Add(root_reg.X(), obj.X(), offset);
5609 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005610 codegen_->EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005611 }
Roland Levillain44015862016-01-22 11:47:17 +00005612 // /* mirror::Object* */ root = root->Read()
5613 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5614 }
5615 } else {
5616 // Plain GC root load with no read barrier.
5617 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005618 if (fixup_label == nullptr) {
5619 __ Ldr(root_reg, MemOperand(obj, offset));
5620 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005621 codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005622 }
Roland Levillain44015862016-01-22 11:47:17 +00005623 // Note that GC roots are not affected by heap poisoning, thus we
5624 // do not have to unpoison `root_reg` here.
5625 }
5626}
5627
5628void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5629 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005630 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005631 uint32_t offset,
5632 Register temp,
5633 bool needs_null_check,
5634 bool use_load_acquire) {
5635 DCHECK(kEmitCompilerReadBarrier);
5636 DCHECK(kUseBakerReadBarrier);
5637
5638 // /* HeapReference<Object> */ ref = *(obj + offset)
5639 Location no_index = Location::NoLocation();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005640 size_t no_scale_factor = 0u;
Roland Levillainbfea3352016-06-23 13:48:47 +01005641 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5642 ref,
5643 obj,
5644 offset,
5645 no_index,
5646 no_scale_factor,
5647 temp,
5648 needs_null_check,
5649 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005650}
5651
5652void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5653 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005654 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005655 uint32_t data_offset,
5656 Location index,
5657 Register temp,
5658 bool needs_null_check) {
5659 DCHECK(kEmitCompilerReadBarrier);
5660 DCHECK(kUseBakerReadBarrier);
5661
5662 // Array cells are never volatile variables, therefore array loads
5663 // never use Load-Acquire instructions on ARM64.
5664 const bool use_load_acquire = false;
5665
Roland Levillainbfea3352016-06-23 13:48:47 +01005666 static_assert(
5667 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5668 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005669 // /* HeapReference<Object> */ ref =
5670 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005671 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5672 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5673 ref,
5674 obj,
5675 data_offset,
5676 index,
5677 scale_factor,
5678 temp,
5679 needs_null_check,
5680 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005681}
5682
5683void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5684 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005685 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005686 uint32_t offset,
5687 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005688 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005689 Register temp,
5690 bool needs_null_check,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005691 bool use_load_acquire,
5692 bool always_update_field) {
Roland Levillain44015862016-01-22 11:47:17 +00005693 DCHECK(kEmitCompilerReadBarrier);
5694 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005695 // If we are emitting an array load, we should not be using a
5696 // Load Acquire instruction. In other words:
5697 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5698 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005699
5700 MacroAssembler* masm = GetVIXLAssembler();
5701 UseScratchRegisterScope temps(masm);
5702
5703 // In slow path based read barriers, the read barrier call is
5704 // inserted after the original load. However, in fast path based
5705 // Baker's read barriers, we need to perform the load of
5706 // mirror::Object::monitor_ *before* the original reference load.
5707 // This load-load ordering is required by the read barrier.
5708 // The fast path/slow path (for Baker's algorithm) should look like:
5709 //
5710 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5711 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5712 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07005713 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain44015862016-01-22 11:47:17 +00005714 // if (is_gray) {
5715 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5716 // }
5717 //
5718 // Note: the original implementation in ReadBarrier::Barrier is
5719 // slightly more complex as it performs additional checks that we do
5720 // not do here for performance reasons.
5721
5722 Primitive::Type type = Primitive::kPrimNot;
5723 Register ref_reg = RegisterFrom(ref, type);
5724 DCHECK(obj.IsW());
5725 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5726
Artem Serov914d7a82017-02-07 14:33:49 +00005727 {
5728 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
5729 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5730 // /* int32_t */ monitor = obj->monitor_
5731 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5732 if (needs_null_check) {
5733 MaybeRecordImplicitNullCheck(instruction);
5734 }
Roland Levillain44015862016-01-22 11:47:17 +00005735 }
5736 // /* LockWord */ lock_word = LockWord(monitor)
5737 static_assert(sizeof(LockWord) == sizeof(int32_t),
5738 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005739
Vladimir Marko877a0332016-07-11 19:30:56 +01005740 // Introduce a dependency on the lock_word including rb_state,
5741 // to prevent load-load reordering, and without using
Roland Levillain44015862016-01-22 11:47:17 +00005742 // a memory barrier (which would be more expensive).
Roland Levillain0b671c02016-08-19 12:02:34 +01005743 // `obj` is unchanged by this operation, but its value now depends
5744 // on `temp`.
Vladimir Marko877a0332016-07-11 19:30:56 +01005745 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
Roland Levillain44015862016-01-22 11:47:17 +00005746
5747 // The actual reference load.
5748 if (index.IsValid()) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005749 // Load types involving an "index": ArrayGet,
5750 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5751 // intrinsics.
Roland Levillainbfea3352016-06-23 13:48:47 +01005752 if (use_load_acquire) {
5753 // UnsafeGetObjectVolatile intrinsic case.
5754 // Register `index` is not an index in an object array, but an
5755 // offset to an object reference field within object `obj`.
5756 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5757 DCHECK(instruction->GetLocations()->Intrinsified());
5758 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5759 << instruction->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005760 DCHECK_EQ(offset, 0u);
5761 DCHECK_EQ(scale_factor, 0u);
5762 DCHECK_EQ(needs_null_check, 0u);
Roland Levillainbfea3352016-06-23 13:48:47 +01005763 // /* HeapReference<Object> */ ref = *(obj + index)
5764 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5765 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005766 } else {
Roland Levillainbfea3352016-06-23 13:48:47 +01005767 // ArrayGet and UnsafeGetObject intrinsics cases.
5768 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5769 if (index.IsConstant()) {
5770 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5771 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5772 } else {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005773 Register temp3 = temps.AcquireW();
5774 __ Add(temp3, obj, offset);
5775 Load(type, ref_reg, HeapOperand(temp3, XRegisterFrom(index), LSL, scale_factor));
5776 temps.Release(temp3);
Roland Levillainbfea3352016-06-23 13:48:47 +01005777 }
Roland Levillain44015862016-01-22 11:47:17 +00005778 }
Roland Levillain44015862016-01-22 11:47:17 +00005779 } else {
5780 // /* HeapReference<Object> */ ref = *(obj + offset)
5781 MemOperand field = HeapOperand(obj, offset);
5782 if (use_load_acquire) {
5783 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
5784 } else {
5785 Load(type, ref_reg, field);
5786 }
5787 }
5788
5789 // Object* ref = ref_addr->AsMirrorPtr()
5790 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
5791
Vladimir Marko953437b2016-08-24 08:30:46 +00005792 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005793 SlowPathCodeARM64* slow_path;
5794 if (always_update_field) {
5795 // ReadBarrierMarkAndUpdateFieldSlowPathARM64 only supports
5796 // address of the form `obj + field_offset`, where `obj` is a
5797 // register and `field_offset` is a register. Thus `offset` and
5798 // `scale_factor` above are expected to be null in this code path.
5799 DCHECK_EQ(offset, 0u);
5800 DCHECK_EQ(scale_factor, 0u); /* "times 1" */
5801 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathARM64(
5802 instruction, ref, obj, /* field_offset */ index, temp);
5803 } else {
5804 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref);
5805 }
Roland Levillain44015862016-01-22 11:47:17 +00005806 AddSlowPath(slow_path);
5807
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07005808 // if (rb_state == ReadBarrier::GrayState())
Roland Levillain44015862016-01-22 11:47:17 +00005809 // ref = ReadBarrier::Mark(ref);
Vladimir Marko877a0332016-07-11 19:30:56 +01005810 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07005811 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5812 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko877a0332016-07-11 19:30:56 +01005813 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005814 __ Bind(slow_path->GetExitLabel());
5815}
5816
5817void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5818 Location out,
5819 Location ref,
5820 Location obj,
5821 uint32_t offset,
5822 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005823 DCHECK(kEmitCompilerReadBarrier);
5824
Roland Levillain44015862016-01-22 11:47:17 +00005825 // Insert a slow path based read barrier *after* the reference load.
5826 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005827 // If heap poisoning is enabled, the unpoisoning of the loaded
5828 // reference will be carried out by the runtime within the slow
5829 // path.
5830 //
5831 // Note that `ref` currently does not get unpoisoned (when heap
5832 // poisoning is enabled), which is alright as the `ref` argument is
5833 // not used by the artReadBarrierSlow entry point.
5834 //
5835 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5836 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5837 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5838 AddSlowPath(slow_path);
5839
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005840 __ B(slow_path->GetEntryLabel());
5841 __ Bind(slow_path->GetExitLabel());
5842}
5843
Roland Levillain44015862016-01-22 11:47:17 +00005844void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5845 Location out,
5846 Location ref,
5847 Location obj,
5848 uint32_t offset,
5849 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005850 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005851 // Baker's read barriers shall be handled by the fast path
5852 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5853 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005854 // If heap poisoning is enabled, unpoisoning will be taken care of
5855 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005856 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005857 } else if (kPoisonHeapReferences) {
5858 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5859 }
5860}
5861
Roland Levillain44015862016-01-22 11:47:17 +00005862void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5863 Location out,
5864 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005865 DCHECK(kEmitCompilerReadBarrier);
5866
Roland Levillain44015862016-01-22 11:47:17 +00005867 // Insert a slow path based read barrier *after* the GC root load.
5868 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005869 // Note that GC roots are not affected by heap poisoning, so we do
5870 // not need to do anything special for this here.
5871 SlowPathCodeARM64* slow_path =
5872 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5873 AddSlowPath(slow_path);
5874
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005875 __ B(slow_path->GetEntryLabel());
5876 __ Bind(slow_path->GetExitLabel());
5877}
5878
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005879void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5880 LocationSummary* locations =
5881 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5882 locations->SetInAt(0, Location::RequiresRegister());
5883 locations->SetOut(Location::RequiresRegister());
5884}
5885
5886void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5887 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005888 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005889 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005890 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005891 __ Ldr(XRegisterFrom(locations->Out()),
5892 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005893 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005894 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005895 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005896 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5897 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005898 __ Ldr(XRegisterFrom(locations->Out()),
5899 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005900 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005901}
5902
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005903static void PatchJitRootUse(uint8_t* code,
5904 const uint8_t* roots_data,
5905 vixl::aarch64::Literal<uint32_t>* literal,
5906 uint64_t index_in_table) {
5907 uint32_t literal_offset = literal->GetOffset();
5908 uintptr_t address =
5909 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
5910 uint8_t* data = code + literal_offset;
5911 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
5912}
5913
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005914void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
5915 for (const auto& entry : jit_string_patches_) {
5916 const auto& it = jit_string_roots_.find(entry.first);
5917 DCHECK(it != jit_string_roots_.end());
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005918 PatchJitRootUse(code, roots_data, entry.second, it->second);
5919 }
5920 for (const auto& entry : jit_class_patches_) {
5921 const auto& it = jit_class_roots_.find(entry.first);
5922 DCHECK(it != jit_class_roots_.end());
5923 PatchJitRootUse(code, roots_data, entry.second, it->second);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005924 }
5925}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005926
Alexandre Rames67555f72014-11-18 10:55:16 +00005927#undef __
5928#undef QUICK_ENTRY_POINT
5929
Alexandre Rames5319def2014-10-23 10:03:10 +01005930} // namespace arm64
5931} // namespace art