blob: 723446b45fa5a31e62ad4df6dd4edfd37df2e1e2 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Vladimir Markof4f2daa2017-03-20 18:26:59 +000019#include "arch/arm64/asm_support_arm64.h"
Serban Constantinescu579885a2015-02-22 20:51:33 +000020#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070021#include "art_method.h"
Andreas Gampe5678db52017-06-08 14:11:18 -070022#include "base/bit_utils.h"
23#include "base/bit_utils_iterator.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010024#include "class_table.h"
Zheng Xuc6667102015-05-15 16:08:45 +080025#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000026#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010027#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080028#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010029#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010030#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070031#include "heap_poisoning.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080032#include "intrinsics.h"
33#include "intrinsics_arm64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010034#include "linker/linker_patch.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070035#include "lock_word.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010036#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070037#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000038#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010039#include "thread.h"
40#include "utils/arm64/assembler_arm64.h"
41#include "utils/assembler.h"
42#include "utils/stack_checks.h"
43
Scott Wakeling97c72b72016-06-24 16:19:36 +010044using namespace vixl::aarch64; // NOLINT(build/namespaces)
Artem Serov914d7a82017-02-07 14:33:49 +000045using vixl::ExactAssemblyScope;
46using vixl::CodeBufferCheckScope;
47using vixl::EmissionCheckScope;
Alexandre Rames5319def2014-10-23 10:03:10 +010048
49#ifdef __
50#error "ARM64 Codegen VIXL macro-assembler macro already defined."
51#endif
52
Alexandre Rames5319def2014-10-23 10:03:10 +010053namespace art {
54
Roland Levillain22ccc3a2015-11-24 13:10:05 +000055template<class MirrorType>
56class GcRoot;
57
Alexandre Rames5319def2014-10-23 10:03:10 +010058namespace arm64 {
59
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::ARM64EncodableConstantOrRegister;
61using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080062using helpers::CPURegisterFrom;
63using helpers::DRegisterFrom;
64using helpers::FPRegisterFrom;
65using helpers::HeapOperand;
66using helpers::HeapOperandFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010067using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080068using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080069using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010070using helpers::InputRegisterAt;
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +010071using helpers::Int64FromLocation;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010072using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080073using helpers::LocationFrom;
74using helpers::OperandFromMemOperand;
75using helpers::OutputCPURegister;
76using helpers::OutputFPRegister;
77using helpers::OutputRegister;
Artem Serovd4bccf12017-04-03 18:47:32 +010078using helpers::QRegisterFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080079using helpers::RegisterFrom;
80using helpers::StackOperandFrom;
81using helpers::VIXLRegCodeFromART;
82using helpers::WRegisterFrom;
83using helpers::XRegisterFrom;
84
Vladimir Markof3e0ee22015-12-17 15:23:13 +000085// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080086// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
87// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000088static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010089
Vladimir Markof4f2daa2017-03-20 18:26:59 +000090// Reference load (except object array loads) is using LDR Wt, [Xn, #offset] which can handle
91// offset < 16KiB. For offsets >= 16KiB, the load shall be emitted as two or more instructions.
Vladimir Marko008e09f32018-08-06 15:42:43 +010092// For the Baker read barrier implementation using link-time generated thunks we need to split
Vladimir Markof4f2daa2017-03-20 18:26:59 +000093// the offset explicitly.
94constexpr uint32_t kReferenceLoadMinFarOffset = 16 * KB;
95
Vladimir Markof4f2daa2017-03-20 18:26:59 +000096// Some instructions have special requirements for a temporary, for example
97// LoadClass/kBssEntry and LoadString/kBssEntry for Baker read barrier require
98// temp that's not an R0 (to avoid an extra move) and Baker read barrier field
99// loads with large offsets need a fixed register to limit the number of link-time
100// thunks we generate. For these and similar cases, we want to reserve a specific
101// register that's neither callee-save nor an argument register. We choose x15.
102inline Location FixedTempLocation() {
103 return Location::RegisterLocation(x15.GetCode());
104}
105
Alexandre Rames5319def2014-10-23 10:03:10 +0100106inline Condition ARM64Condition(IfCondition cond) {
107 switch (cond) {
108 case kCondEQ: return eq;
109 case kCondNE: return ne;
110 case kCondLT: return lt;
111 case kCondLE: return le;
112 case kCondGT: return gt;
113 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -0700114 case kCondB: return lo;
115 case kCondBE: return ls;
116 case kCondA: return hi;
117 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +0100118 }
Roland Levillain7f63c522015-07-13 15:54:55 +0000119 LOG(FATAL) << "Unreachable";
120 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +0100121}
122
Vladimir Markod6e069b2016-01-18 11:11:01 +0000123inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
124 // The ARM64 condition codes can express all the necessary branches, see the
125 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
126 // There is no dex instruction or HIR that would need the missing conditions
127 // "equal or unordered" or "not equal".
128 switch (cond) {
129 case kCondEQ: return eq;
130 case kCondNE: return ne /* unordered */;
131 case kCondLT: return gt_bias ? cc : lt /* unordered */;
132 case kCondLE: return gt_bias ? ls : le /* unordered */;
133 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
134 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
135 default:
136 LOG(FATAL) << "UNREACHABLE";
137 UNREACHABLE();
138 }
139}
140
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100141Location ARM64ReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000142 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
143 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
144 // but we use the exact registers for clarity.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100145 if (return_type == DataType::Type::kFloat32) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000146 return LocationFrom(s0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100147 } else if (return_type == DataType::Type::kFloat64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000148 return LocationFrom(d0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100149 } else if (return_type == DataType::Type::kInt64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000150 return LocationFrom(x0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100151 } else if (return_type == DataType::Type::kVoid) {
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100152 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000153 } else {
154 return LocationFrom(w0);
155 }
156}
157
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100158Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000159 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100160}
161
Vladimir Marko3232dbb2018-07-25 15:42:46 +0100162static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
163 InvokeRuntimeCallingConvention calling_convention;
164 RegisterSet caller_saves = RegisterSet::Empty();
165 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
166 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
167 RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference),
168 DataType::Type::kReference).GetCode());
169 return caller_saves;
170}
171
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100172// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
173#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700174#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100175
Zheng Xuda403092015-04-24 17:35:39 +0800176// Calculate memory accessing operand for save/restore live registers.
177static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100178 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800179 int64_t spill_offset,
180 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100181 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
182 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
183 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800184 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100185 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800186 codegen->GetNumberOfFloatingPointRegisters()));
187
Vladimir Marko804b03f2016-09-14 16:26:36 +0100188 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
Artem Serov7957d952017-04-04 15:44:09 +0100189 unsigned v_reg_size = codegen->GetGraph()->HasSIMD() ? kQRegSize : kDRegSize;
190 CPURegList fp_list = CPURegList(CPURegister::kVRegister, v_reg_size, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800191
192 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
193 UseScratchRegisterScope temps(masm);
194
195 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100196 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
197 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800198 int64_t reg_size = kXRegSizeInBytes;
199 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
200 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100201 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800202 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
203 // If the offset does not fit in the instruction's immediate field, use an alternate register
204 // to compute the base address(float point registers spill base address).
205 Register new_base = temps.AcquireSameSizeAs(base);
206 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
207 base = new_base;
208 spill_offset = -core_spill_size;
209 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
210 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
211 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
212 }
213
214 if (is_save) {
215 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
216 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
217 } else {
218 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
219 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
220 }
221}
222
223void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800224 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100225 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
226 for (uint32_t i : LowToHighBits(core_spills)) {
227 // If the register holds an object, update the stack mask.
228 if (locations->RegisterContainsObject(i)) {
229 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800230 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100231 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
232 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
233 saved_core_stack_offsets_[i] = stack_offset;
234 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800235 }
236
Vladimir Marko804b03f2016-09-14 16:26:36 +0100237 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
238 for (uint32_t i : LowToHighBits(fp_spills)) {
239 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
240 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
241 saved_fpu_stack_offsets_[i] = stack_offset;
242 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800243 }
244
Vladimir Marko804b03f2016-09-14 16:26:36 +0100245 SaveRestoreLiveRegistersHelper(codegen,
246 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800247 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
248}
249
250void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100251 SaveRestoreLiveRegistersHelper(codegen,
252 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800253 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
254}
255
Alexandre Rames5319def2014-10-23 10:03:10 +0100256class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
257 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000258 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100259
Alexandre Rames67555f72014-11-18 10:55:16 +0000260 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100261 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000262 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100263
Alexandre Rames5319def2014-10-23 10:03:10 +0100264 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000265 if (instruction_->CanThrowIntoCatchBlock()) {
266 // Live registers will be restored in the catch block if caught.
267 SaveLiveRegisters(codegen, instruction_->GetLocations());
268 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000269 // We're moving two locations to locations that could overlap, so we need a parallel
270 // move resolver.
271 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100272 codegen->EmitParallelMoves(locations->InAt(0),
273 LocationFrom(calling_convention.GetRegisterAt(0)),
274 DataType::Type::kInt32,
275 locations->InAt(1),
276 LocationFrom(calling_convention.GetRegisterAt(1)),
277 DataType::Type::kInt32);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000278 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
279 ? kQuickThrowStringBounds
280 : kQuickThrowArrayBounds;
281 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100282 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800283 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100284 }
285
Alexandre Rames8158f282015-08-07 10:26:17 +0100286 bool IsFatal() const OVERRIDE { return true; }
287
Alexandre Rames9931f312015-06-19 14:47:01 +0100288 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
289
Alexandre Rames5319def2014-10-23 10:03:10 +0100290 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100291 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
292};
293
Alexandre Rames67555f72014-11-18 10:55:16 +0000294class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
295 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000296 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000297
298 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
299 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
300 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000301 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800302 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000303 }
304
Alexandre Rames8158f282015-08-07 10:26:17 +0100305 bool IsFatal() const OVERRIDE { return true; }
306
Alexandre Rames9931f312015-06-19 14:47:01 +0100307 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
308
Alexandre Rames67555f72014-11-18 10:55:16 +0000309 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000310 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
311};
312
313class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
314 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100315 LoadClassSlowPathARM64(HLoadClass* cls, HInstruction* at)
316 : SlowPathCodeARM64(at), cls_(cls) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000317 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100318 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Alexandre Rames67555f72014-11-18 10:55:16 +0000319 }
320
321 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000322 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000323 Location out = locations->Out();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100324 const uint32_t dex_pc = instruction_->GetDexPc();
325 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
326 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
Alexandre Rames67555f72014-11-18 10:55:16 +0000327
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100328 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames67555f72014-11-18 10:55:16 +0000329 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000330 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000331
Vladimir Markof3c52b42017-11-17 17:32:12 +0000332 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100333 if (must_resolve_type) {
334 DCHECK(IsSameDexFile(cls_->GetDexFile(), arm64_codegen->GetGraph()->GetDexFile()));
335 dex::TypeIndex type_index = cls_->GetTypeIndex();
336 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Vladimir Marko9d479252018-07-24 11:35:20 +0100337 arm64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
338 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100339 // If we also must_do_clinit, the resolved type is now in the correct register.
340 } else {
341 DCHECK(must_do_clinit);
342 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
343 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)),
344 source,
345 cls_->GetType());
346 }
347 if (must_do_clinit) {
348 arm64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
349 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800350 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000351
352 // Move the class to the desired location.
Alexandre Rames67555f72014-11-18 10:55:16 +0000353 if (out.IsValid()) {
354 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100355 DataType::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000356 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000357 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000358 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000359 __ B(GetExitLabel());
360 }
361
Alexandre Rames9931f312015-06-19 14:47:01 +0100362 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
363
Alexandre Rames67555f72014-11-18 10:55:16 +0000364 private:
365 // The class this slow path will load.
366 HLoadClass* const cls_;
367
Alexandre Rames67555f72014-11-18 10:55:16 +0000368 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
369};
370
Vladimir Markoaad75c62016-10-03 08:46:48 +0000371class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
372 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000373 explicit LoadStringSlowPathARM64(HLoadString* instruction)
374 : SlowPathCodeARM64(instruction) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000375
376 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
377 LocationSummary* locations = instruction_->GetLocations();
378 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
379 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
380
381 __ Bind(GetEntryLabel());
382 SaveLiveRegisters(codegen, locations);
383
Vladimir Markof3c52b42017-11-17 17:32:12 +0000384 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000385 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
386 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000387 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
388 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100389 DataType::Type type = instruction_->GetType();
Vladimir Markoaad75c62016-10-03 08:46:48 +0000390 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
391
392 RestoreLiveRegisters(codegen, locations);
393
Vladimir Markoaad75c62016-10-03 08:46:48 +0000394 __ B(GetExitLabel());
395 }
396
397 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
398
399 private:
400 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
401};
402
Alexandre Rames5319def2014-10-23 10:03:10 +0100403class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
404 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000405 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100406
Alexandre Rames67555f72014-11-18 10:55:16 +0000407 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
408 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100409 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000410 if (instruction_->CanThrowIntoCatchBlock()) {
411 // Live registers will be restored in the catch block if caught.
412 SaveLiveRegisters(codegen, instruction_->GetLocations());
413 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000414 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
415 instruction_,
416 instruction_->GetDexPc(),
417 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800418 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100419 }
420
Alexandre Rames8158f282015-08-07 10:26:17 +0100421 bool IsFatal() const OVERRIDE { return true; }
422
Alexandre Rames9931f312015-06-19 14:47:01 +0100423 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
424
Alexandre Rames5319def2014-10-23 10:03:10 +0100425 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100426 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
427};
428
429class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
430 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100431 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000432 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100433
Alexandre Rames67555f72014-11-18 10:55:16 +0000434 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Artem Serov7957d952017-04-04 15:44:09 +0100435 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +0000436 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100437 __ Bind(GetEntryLabel());
Artem Serov7957d952017-04-04 15:44:09 +0100438 SaveLiveRegisters(codegen, locations); // Only saves live 128-bit regs for SIMD.
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000439 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800440 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Artem Serov7957d952017-04-04 15:44:09 +0100441 RestoreLiveRegisters(codegen, locations); // Only restores live 128-bit regs for SIMD.
Alexandre Rames67555f72014-11-18 10:55:16 +0000442 if (successor_ == nullptr) {
443 __ B(GetReturnLabel());
444 } else {
445 __ B(arm64_codegen->GetLabelOf(successor_));
446 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100447 }
448
Scott Wakeling97c72b72016-06-24 16:19:36 +0100449 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100450 DCHECK(successor_ == nullptr);
451 return &return_label_;
452 }
453
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100454 HBasicBlock* GetSuccessor() const {
455 return successor_;
456 }
457
Alexandre Rames9931f312015-06-19 14:47:01 +0100458 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
459
Alexandre Rames5319def2014-10-23 10:03:10 +0100460 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100461 // If not null, the block to branch to after the suspend check.
462 HBasicBlock* const successor_;
463
464 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100465 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100466
467 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
468};
469
Alexandre Rames67555f72014-11-18 10:55:16 +0000470class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
471 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000472 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000473 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000474
475 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000476 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800477
Alexandre Rames3e69f162014-12-10 10:36:50 +0000478 DCHECK(instruction_->IsCheckCast()
479 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
480 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100481 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000482
Alexandre Rames67555f72014-11-18 10:55:16 +0000483 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000484
Vladimir Marko87584542017-12-12 17:47:52 +0000485 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000486 SaveLiveRegisters(codegen, locations);
487 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000488
489 // We're moving two locations to locations that could overlap, so we need a parallel
490 // move resolver.
491 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800492 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800493 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100494 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800495 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800496 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100497 DataType::Type::kReference);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000498 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000499 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800500 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100501 DataType::Type ret_type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000502 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
503 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
504 } else {
505 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800506 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
507 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000508 }
509
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000510 if (!is_fatal_) {
511 RestoreLiveRegisters(codegen, locations);
512 __ B(GetExitLabel());
513 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000514 }
515
Alexandre Rames9931f312015-06-19 14:47:01 +0100516 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100517 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100518
Alexandre Rames67555f72014-11-18 10:55:16 +0000519 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000520 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000521
Alexandre Rames67555f72014-11-18 10:55:16 +0000522 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
523};
524
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700525class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
526 public:
Aart Bik42249c32016-01-07 15:33:50 -0800527 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000528 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700529
530 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800531 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700532 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100533 LocationSummary* locations = instruction_->GetLocations();
534 SaveLiveRegisters(codegen, locations);
535 InvokeRuntimeCallingConvention calling_convention;
536 __ Mov(calling_convention.GetRegisterAt(0),
537 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000538 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100539 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700540 }
541
Alexandre Rames9931f312015-06-19 14:47:01 +0100542 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
543
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700544 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700545 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
546};
547
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100548class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
549 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000550 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100551
552 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
553 LocationSummary* locations = instruction_->GetLocations();
554 __ Bind(GetEntryLabel());
555 SaveLiveRegisters(codegen, locations);
556
557 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100558 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100559 parallel_move.AddMove(
560 locations->InAt(0),
561 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100562 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100563 nullptr);
564 parallel_move.AddMove(
565 locations->InAt(1),
566 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100567 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100568 nullptr);
569 parallel_move.AddMove(
570 locations->InAt(2),
571 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100572 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100573 nullptr);
574 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
575
576 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000577 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100578 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
579 RestoreLiveRegisters(codegen, locations);
580 __ B(GetExitLabel());
581 }
582
583 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
584
585 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100586 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
587};
588
Zheng Xu3927c8b2015-11-18 17:46:25 +0800589void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
590 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000591 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800592
593 // We are about to use the assembler to place literals directly. Make sure we have enough
594 // underlying code buffer and we have generated the jump table with right size.
Artem Serov914d7a82017-02-07 14:33:49 +0000595 EmissionCheckScope scope(codegen->GetVIXLAssembler(),
596 num_entries * sizeof(int32_t),
597 CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800598
599 __ Bind(&table_start_);
600 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
601 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100602 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800603 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100604 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800605 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
606 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
607 Literal<int32_t> literal(jump_offset);
608 __ place(&literal);
609 }
610}
611
Roland Levillain54f869e2017-03-06 13:54:11 +0000612// Abstract base class for read barrier slow paths marking a reference
613// `ref`.
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000614//
Roland Levillain54f869e2017-03-06 13:54:11 +0000615// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100616// barrier marking runtime entry point to be invoked or an empty
617// location; in the latter case, the read barrier marking runtime
618// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000619class ReadBarrierMarkSlowPathBaseARM64 : public SlowPathCodeARM64 {
620 protected:
621 ReadBarrierMarkSlowPathBaseARM64(HInstruction* instruction, Location ref, Location entrypoint)
622 : SlowPathCodeARM64(instruction), ref_(ref), entrypoint_(entrypoint) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000623 DCHECK(kEmitCompilerReadBarrier);
624 }
625
Roland Levillain54f869e2017-03-06 13:54:11 +0000626 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathBaseARM64"; }
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000627
Roland Levillain54f869e2017-03-06 13:54:11 +0000628 // Generate assembly code calling the read barrier marking runtime
629 // entry point (ReadBarrierMarkRegX).
630 void GenerateReadBarrierMarkRuntimeCall(CodeGenerator* codegen) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000631 // No need to save live registers; it's taken care of by the
632 // entrypoint. Also, there is no need to update the stack mask,
633 // as this runtime call will not trigger a garbage collection.
634 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
635 DCHECK_NE(ref_.reg(), LR);
636 DCHECK_NE(ref_.reg(), WSP);
637 DCHECK_NE(ref_.reg(), WZR);
638 // IP0 is used internally by the ReadBarrierMarkRegX entry point
639 // as a temporary, it cannot be the entry point's input/output.
640 DCHECK_NE(ref_.reg(), IP0);
641 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
642 // "Compact" slow path, saving two moves.
643 //
644 // Instead of using the standard runtime calling convention (input
645 // and output in W0):
646 //
647 // W0 <- ref
648 // W0 <- ReadBarrierMark(W0)
649 // ref <- W0
650 //
651 // we just use rX (the register containing `ref`) as input and output
652 // of a dedicated entrypoint:
653 //
654 // rX <- ReadBarrierMarkRegX(rX)
655 //
656 if (entrypoint_.IsValid()) {
657 arm64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
658 __ Blr(XRegisterFrom(entrypoint_));
659 } else {
660 // Entrypoint is not already loaded, load from the thread.
661 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100662 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000663 // This runtime call does not require a stack map.
664 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
665 }
666 }
667
668 // The location (register) of the marked object reference.
669 const Location ref_;
670
671 // The location of the entrypoint if it is already loaded.
672 const Location entrypoint_;
673
Roland Levillain54f869e2017-03-06 13:54:11 +0000674 private:
675 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathBaseARM64);
676};
677
Roland Levillain54f869e2017-03-06 13:54:11 +0000678// Slow path loading `obj`'s lock word, loading a reference from
679// object `*(obj + offset + (index << scale_factor))` into `ref`, and
680// marking `ref` if `obj` is gray according to the lock word (Baker
681// read barrier). The field `obj.field` in the object `obj` holding
682// this reference does not get updated by this slow path after marking
683// (see LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64
684// below for that).
685//
686// This means that after the execution of this slow path, `ref` will
687// always be up-to-date, but `obj.field` may not; i.e., after the
688// flip, `ref` will be a to-space reference, but `obj.field` will
689// probably still be a from-space reference (unless it gets updated by
690// another thread, or if another thread installed another object
691// reference (different from `ref`) in `obj.field`).
692//
693// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100694// barrier marking runtime entry point to be invoked or an empty
695// location; in the latter case, the read barrier marking runtime
696// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000697class LoadReferenceWithBakerReadBarrierSlowPathARM64 : public ReadBarrierMarkSlowPathBaseARM64 {
698 public:
699 LoadReferenceWithBakerReadBarrierSlowPathARM64(HInstruction* instruction,
700 Location ref,
701 Register obj,
702 uint32_t offset,
703 Location index,
704 size_t scale_factor,
705 bool needs_null_check,
706 bool use_load_acquire,
707 Register temp,
Roland Levillain97c46462017-05-11 14:04:03 +0100708 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000709 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint),
710 obj_(obj),
711 offset_(offset),
712 index_(index),
713 scale_factor_(scale_factor),
714 needs_null_check_(needs_null_check),
715 use_load_acquire_(use_load_acquire),
716 temp_(temp) {
717 DCHECK(kEmitCompilerReadBarrier);
718 DCHECK(kUseBakerReadBarrier);
719 }
720
721 const char* GetDescription() const OVERRIDE {
722 return "LoadReferenceWithBakerReadBarrierSlowPathARM64";
723 }
724
725 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
726 LocationSummary* locations = instruction_->GetLocations();
727 DCHECK(locations->CanCall());
728 DCHECK(ref_.IsRegister()) << ref_;
729 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
730 DCHECK(obj_.IsW());
731 DCHECK_NE(ref_.reg(), LocationFrom(temp_).reg());
Alexandre Rames5319def2014-10-23 10:03:10 +0100732 DCHECK(instruction_->IsInstanceFieldGet() ||
733 instruction_->IsStaticFieldGet() ||
734 instruction_->IsArrayGet() ||
735 instruction_->IsArraySet() ||
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000736 instruction_->IsInstanceOf() ||
737 instruction_->IsCheckCast() ||
738 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
739 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
740 << "Unexpected instruction in read barrier marking slow path: "
741 << instruction_->DebugName();
742 // The read barrier instrumentation of object ArrayGet
743 // instructions does not support the HIntermediateAddress
744 // instruction.
745 DCHECK(!(instruction_->IsArrayGet() &&
746 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
747
Roland Levillain54f869e2017-03-06 13:54:11 +0000748 // Temporary register `temp_`, used to store the lock word, must
749 // not be IP0 nor IP1, as we may use them to emit the reference
750 // load (in the call to GenerateRawReferenceLoad below), and we
751 // need the lock word to still be in `temp_` after the reference
752 // load.
753 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
754 DCHECK_NE(LocationFrom(temp_).reg(), IP1);
755
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000756 __ Bind(GetEntryLabel());
Roland Levillain54f869e2017-03-06 13:54:11 +0000757
758 // When using MaybeGenerateReadBarrierSlow, the read barrier call is
759 // inserted after the original load. However, in fast path based
760 // Baker's read barriers, we need to perform the load of
761 // mirror::Object::monitor_ *before* the original reference load.
762 // This load-load ordering is required by the read barrier.
Roland Levillainff487002017-03-07 16:50:01 +0000763 // The slow path (for Baker's algorithm) should look like:
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000764 //
Roland Levillain54f869e2017-03-06 13:54:11 +0000765 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
766 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
767 // HeapReference<mirror::Object> ref = *src; // Original reference load.
768 // bool is_gray = (rb_state == ReadBarrier::GrayState());
769 // if (is_gray) {
770 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
771 // }
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000772 //
Roland Levillain54f869e2017-03-06 13:54:11 +0000773 // Note: the original implementation in ReadBarrier::Barrier is
774 // slightly more complex as it performs additional checks that we do
775 // not do here for performance reasons.
776
777 // /* int32_t */ monitor = obj->monitor_
778 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
779 __ Ldr(temp_, HeapOperand(obj_, monitor_offset));
780 if (needs_null_check_) {
781 codegen->MaybeRecordImplicitNullCheck(instruction_);
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000782 }
Roland Levillain54f869e2017-03-06 13:54:11 +0000783 // /* LockWord */ lock_word = LockWord(monitor)
784 static_assert(sizeof(LockWord) == sizeof(int32_t),
785 "art::LockWord and int32_t have different sizes.");
786
787 // Introduce a dependency on the lock_word including rb_state,
788 // to prevent load-load reordering, and without using
789 // a memory barrier (which would be more expensive).
790 // `obj` is unchanged by this operation, but its value now depends
791 // on `temp`.
792 __ Add(obj_.X(), obj_.X(), Operand(temp_.X(), LSR, 32));
793
794 // The actual reference load.
795 // A possible implicit null check has already been handled above.
796 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
797 arm64_codegen->GenerateRawReferenceLoad(instruction_,
798 ref_,
799 obj_,
800 offset_,
801 index_,
802 scale_factor_,
803 /* needs_null_check */ false,
804 use_load_acquire_);
805
806 // Mark the object `ref` when `obj` is gray.
807 //
808 // if (rb_state == ReadBarrier::GrayState())
809 // ref = ReadBarrier::Mark(ref);
810 //
811 // Given the numeric representation, it's enough to check the low bit of the rb_state.
812 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
813 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
814 __ Tbz(temp_, LockWord::kReadBarrierStateShift, GetExitLabel());
815 GenerateReadBarrierMarkRuntimeCall(codegen);
816
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000817 __ B(GetExitLabel());
818 }
819
820 private:
Roland Levillain54f869e2017-03-06 13:54:11 +0000821 // The register containing the object holding the marked object reference field.
822 Register obj_;
823 // The offset, index and scale factor to access the reference in `obj_`.
824 uint32_t offset_;
825 Location index_;
826 size_t scale_factor_;
827 // Is a null check required?
828 bool needs_null_check_;
829 // Should this reference load use Load-Acquire semantics?
830 bool use_load_acquire_;
831 // A temporary register used to hold the lock word of `obj_`.
832 Register temp_;
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000833
Roland Levillain54f869e2017-03-06 13:54:11 +0000834 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierSlowPathARM64);
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000835};
836
Roland Levillain54f869e2017-03-06 13:54:11 +0000837// Slow path loading `obj`'s lock word, loading a reference from
838// object `*(obj + offset + (index << scale_factor))` into `ref`, and
839// marking `ref` if `obj` is gray according to the lock word (Baker
840// read barrier). If needed, this slow path also atomically updates
841// the field `obj.field` in the object `obj` holding this reference
842// after marking (contrary to
843// LoadReferenceWithBakerReadBarrierSlowPathARM64 above, which never
844// tries to update `obj.field`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100845//
846// This means that after the execution of this slow path, both `ref`
847// and `obj.field` will be up-to-date; i.e., after the flip, both will
848// hold the same to-space reference (unless another thread installed
849// another object reference (different from `ref`) in `obj.field`).
Roland Levillainba650a42017-03-06 13:52:32 +0000850//
Roland Levillain54f869e2017-03-06 13:54:11 +0000851// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100852// barrier marking runtime entry point to be invoked or an empty
853// location; in the latter case, the read barrier marking runtime
854// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000855class LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64
856 : public ReadBarrierMarkSlowPathBaseARM64 {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100857 public:
Roland Levillain97c46462017-05-11 14:04:03 +0100858 LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64(
859 HInstruction* instruction,
860 Location ref,
861 Register obj,
862 uint32_t offset,
863 Location index,
864 size_t scale_factor,
865 bool needs_null_check,
866 bool use_load_acquire,
867 Register temp,
868 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000869 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint),
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100870 obj_(obj),
Roland Levillain54f869e2017-03-06 13:54:11 +0000871 offset_(offset),
872 index_(index),
873 scale_factor_(scale_factor),
874 needs_null_check_(needs_null_check),
875 use_load_acquire_(use_load_acquire),
Roland Levillain35345a52017-02-27 14:32:08 +0000876 temp_(temp) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100877 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain54f869e2017-03-06 13:54:11 +0000878 DCHECK(kUseBakerReadBarrier);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100879 }
880
881 const char* GetDescription() const OVERRIDE {
Roland Levillain54f869e2017-03-06 13:54:11 +0000882 return "LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64";
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100883 }
884
885 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
886 LocationSummary* locations = instruction_->GetLocations();
887 Register ref_reg = WRegisterFrom(ref_);
888 DCHECK(locations->CanCall());
889 DCHECK(ref_.IsRegister()) << ref_;
890 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain54f869e2017-03-06 13:54:11 +0000891 DCHECK(obj_.IsW());
892 DCHECK_NE(ref_.reg(), LocationFrom(temp_).reg());
893
894 // This slow path is only used by the UnsafeCASObject intrinsic at the moment.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100895 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
896 << "Unexpected instruction in read barrier marking and field updating slow path: "
897 << instruction_->DebugName();
898 DCHECK(instruction_->GetLocations()->Intrinsified());
899 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
Roland Levillain54f869e2017-03-06 13:54:11 +0000900 DCHECK_EQ(offset_, 0u);
901 DCHECK_EQ(scale_factor_, 0u);
902 DCHECK_EQ(use_load_acquire_, false);
903 // The location of the offset of the marked reference field within `obj_`.
904 Location field_offset = index_;
905 DCHECK(field_offset.IsRegister()) << field_offset;
906
907 // Temporary register `temp_`, used to store the lock word, must
908 // not be IP0 nor IP1, as we may use them to emit the reference
909 // load (in the call to GenerateRawReferenceLoad below), and we
910 // need the lock word to still be in `temp_` after the reference
911 // load.
912 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
913 DCHECK_NE(LocationFrom(temp_).reg(), IP1);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100914
915 __ Bind(GetEntryLabel());
916
Roland Levillainff487002017-03-07 16:50:01 +0000917 // The implementation is similar to LoadReferenceWithBakerReadBarrierSlowPathARM64's:
918 //
919 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
920 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
921 // HeapReference<mirror::Object> ref = *src; // Original reference load.
922 // bool is_gray = (rb_state == ReadBarrier::GrayState());
923 // if (is_gray) {
924 // old_ref = ref;
925 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
926 // compareAndSwapObject(obj, field_offset, old_ref, ref);
927 // }
928
Roland Levillain54f869e2017-03-06 13:54:11 +0000929 // /* int32_t */ monitor = obj->monitor_
930 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
931 __ Ldr(temp_, HeapOperand(obj_, monitor_offset));
932 if (needs_null_check_) {
933 codegen->MaybeRecordImplicitNullCheck(instruction_);
934 }
935 // /* LockWord */ lock_word = LockWord(monitor)
936 static_assert(sizeof(LockWord) == sizeof(int32_t),
937 "art::LockWord and int32_t have different sizes.");
938
939 // Introduce a dependency on the lock_word including rb_state,
940 // to prevent load-load reordering, and without using
941 // a memory barrier (which would be more expensive).
942 // `obj` is unchanged by this operation, but its value now depends
943 // on `temp`.
944 __ Add(obj_.X(), obj_.X(), Operand(temp_.X(), LSR, 32));
945
946 // The actual reference load.
947 // A possible implicit null check has already been handled above.
948 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
949 arm64_codegen->GenerateRawReferenceLoad(instruction_,
950 ref_,
951 obj_,
952 offset_,
953 index_,
954 scale_factor_,
955 /* needs_null_check */ false,
956 use_load_acquire_);
957
958 // Mark the object `ref` when `obj` is gray.
959 //
960 // if (rb_state == ReadBarrier::GrayState())
961 // ref = ReadBarrier::Mark(ref);
962 //
963 // Given the numeric representation, it's enough to check the low bit of the rb_state.
964 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
965 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
966 __ Tbz(temp_, LockWord::kReadBarrierStateShift, GetExitLabel());
967
968 // Save the old value of the reference before marking it.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100969 // Note that we cannot use IP to save the old reference, as IP is
970 // used internally by the ReadBarrierMarkRegX entry point, and we
971 // need the old reference after the call to that entry point.
972 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
973 __ Mov(temp_.W(), ref_reg);
974
Roland Levillain54f869e2017-03-06 13:54:11 +0000975 GenerateReadBarrierMarkRuntimeCall(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100976
977 // If the new reference is different from the old reference,
Roland Levillain54f869e2017-03-06 13:54:11 +0000978 // update the field in the holder (`*(obj_ + field_offset)`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100979 //
980 // Note that this field could also hold a different object, if
981 // another thread had concurrently changed it. In that case, the
982 // LDXR/CMP/BNE sequence of instructions in the compare-and-set
983 // (CAS) operation below would abort the CAS, leaving the field
984 // as-is.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100985 __ Cmp(temp_.W(), ref_reg);
Roland Levillain54f869e2017-03-06 13:54:11 +0000986 __ B(eq, GetExitLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100987
988 // Update the the holder's field atomically. This may fail if
989 // mutator updates before us, but it's OK. This is achieved
990 // using a strong compare-and-set (CAS) operation with relaxed
991 // memory synchronization ordering, where the expected value is
992 // the old reference and the desired value is the new reference.
993
994 MacroAssembler* masm = arm64_codegen->GetVIXLAssembler();
995 UseScratchRegisterScope temps(masm);
996
997 // Convenience aliases.
998 Register base = obj_.W();
Roland Levillain54f869e2017-03-06 13:54:11 +0000999 Register offset = XRegisterFrom(field_offset);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001000 Register expected = temp_.W();
1001 Register value = ref_reg;
1002 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1003 Register tmp_value = temps.AcquireW(); // Value in memory.
1004
1005 __ Add(tmp_ptr, base.X(), Operand(offset));
1006
1007 if (kPoisonHeapReferences) {
1008 arm64_codegen->GetAssembler()->PoisonHeapReference(expected);
1009 if (value.Is(expected)) {
1010 // Do not poison `value`, as it is the same register as
1011 // `expected`, which has just been poisoned.
1012 } else {
1013 arm64_codegen->GetAssembler()->PoisonHeapReference(value);
1014 }
1015 }
1016
1017 // do {
1018 // tmp_value = [tmp_ptr] - expected;
1019 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1020
Roland Levillain24a4d112016-10-26 13:10:46 +01001021 vixl::aarch64::Label loop_head, comparison_failed, exit_loop;
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001022 __ Bind(&loop_head);
1023 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
1024 __ Cmp(tmp_value, expected);
Roland Levillain24a4d112016-10-26 13:10:46 +01001025 __ B(&comparison_failed, ne);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001026 __ Stxr(tmp_value, value, MemOperand(tmp_ptr));
1027 __ Cbnz(tmp_value, &loop_head);
Roland Levillain24a4d112016-10-26 13:10:46 +01001028 __ B(&exit_loop);
1029 __ Bind(&comparison_failed);
1030 __ Clrex();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001031 __ Bind(&exit_loop);
1032
1033 if (kPoisonHeapReferences) {
1034 arm64_codegen->GetAssembler()->UnpoisonHeapReference(expected);
1035 if (value.Is(expected)) {
1036 // Do not unpoison `value`, as it is the same register as
1037 // `expected`, which has just been unpoisoned.
1038 } else {
1039 arm64_codegen->GetAssembler()->UnpoisonHeapReference(value);
1040 }
1041 }
1042
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001043 __ B(GetExitLabel());
1044 }
1045
1046 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001047 // The register containing the object holding the marked object reference field.
1048 const Register obj_;
Roland Levillain54f869e2017-03-06 13:54:11 +00001049 // The offset, index and scale factor to access the reference in `obj_`.
1050 uint32_t offset_;
1051 Location index_;
1052 size_t scale_factor_;
1053 // Is a null check required?
1054 bool needs_null_check_;
1055 // Should this reference load use Load-Acquire semantics?
1056 bool use_load_acquire_;
1057 // A temporary register used to hold the lock word of `obj_`; and
1058 // also to hold the original reference value, when the reference is
1059 // marked.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001060 const Register temp_;
1061
Roland Levillain54f869e2017-03-06 13:54:11 +00001062 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001063};
1064
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001065// Slow path generating a read barrier for a heap reference.
1066class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
1067 public:
1068 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
1069 Location out,
1070 Location ref,
1071 Location obj,
1072 uint32_t offset,
1073 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +00001074 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001075 out_(out),
1076 ref_(ref),
1077 obj_(obj),
1078 offset_(offset),
1079 index_(index) {
1080 DCHECK(kEmitCompilerReadBarrier);
1081 // If `obj` is equal to `out` or `ref`, it means the initial object
1082 // has been overwritten by (or after) the heap object reference load
1083 // to be instrumented, e.g.:
1084 //
1085 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +00001086 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001087 //
1088 // In that case, we have lost the information about the original
1089 // object, and the emitted read barrier cannot work properly.
1090 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
1091 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
1092 }
1093
1094 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1095 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1096 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001097 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001098 DCHECK(locations->CanCall());
1099 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +01001100 DCHECK(instruction_->IsInstanceFieldGet() ||
1101 instruction_->IsStaticFieldGet() ||
1102 instruction_->IsArrayGet() ||
1103 instruction_->IsInstanceOf() ||
1104 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -07001105 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +00001106 << "Unexpected instruction in read barrier for heap reference slow path: "
1107 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +00001108 // The read barrier instrumentation of object ArrayGet
1109 // instructions does not support the HIntermediateAddress
1110 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001111 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +01001112 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001113
1114 __ Bind(GetEntryLabel());
1115
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001116 SaveLiveRegisters(codegen, locations);
1117
1118 // We may have to change the index's value, but as `index_` is a
1119 // constant member (like other "inputs" of this slow path),
1120 // introduce a copy of it, `index`.
1121 Location index = index_;
1122 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +01001123 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001124 if (instruction_->IsArrayGet()) {
1125 // Compute the actual memory offset and store it in `index`.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001126 Register index_reg = RegisterFrom(index_, DataType::Type::kInt32);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001127 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
1128 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
1129 // We are about to change the value of `index_reg` (see the
1130 // calls to vixl::MacroAssembler::Lsl and
1131 // vixl::MacroAssembler::Mov below), but it has
1132 // not been saved by the previous call to
1133 // art::SlowPathCode::SaveLiveRegisters, as it is a
1134 // callee-save register --
1135 // art::SlowPathCode::SaveLiveRegisters does not consider
1136 // callee-save registers, as it has been designed with the
1137 // assumption that callee-save registers are supposed to be
1138 // handled by the called function. So, as a callee-save
1139 // register, `index_reg` _would_ eventually be saved onto
1140 // the stack, but it would be too late: we would have
1141 // changed its value earlier. Therefore, we manually save
1142 // it here into another freely available register,
1143 // `free_reg`, chosen of course among the caller-save
1144 // registers (as a callee-save `free_reg` register would
1145 // exhibit the same problem).
1146 //
1147 // Note we could have requested a temporary register from
1148 // the register allocator instead; but we prefer not to, as
1149 // this is a slow path, and we know we can find a
1150 // caller-save register that is available.
1151 Register free_reg = FindAvailableCallerSaveRegister(codegen);
1152 __ Mov(free_reg.W(), index_reg);
1153 index_reg = free_reg;
1154 index = LocationFrom(index_reg);
1155 } else {
1156 // The initial register stored in `index_` has already been
1157 // saved in the call to art::SlowPathCode::SaveLiveRegisters
1158 // (as it is not a callee-save register), so we can freely
1159 // use it.
1160 }
1161 // Shifting the index value contained in `index_reg` by the scale
1162 // factor (2) cannot overflow in practice, as the runtime is
1163 // unable to allocate object arrays with a size larger than
1164 // 2^26 - 1 (that is, 2^28 - 4 bytes).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001165 __ Lsl(index_reg, index_reg, DataType::SizeShift(type));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001166 static_assert(
1167 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
1168 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
1169 __ Add(index_reg, index_reg, Operand(offset_));
1170 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01001171 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
1172 // intrinsics, `index_` is not shifted by a scale factor of 2
1173 // (as in the case of ArrayGet), as it is actually an offset
1174 // to an object field within an object.
1175 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001176 DCHECK(instruction_->GetLocations()->Intrinsified());
1177 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
1178 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
1179 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001180 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +01001181 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001182 }
1183 }
1184
1185 // We're moving two or three locations to locations that could
1186 // overlap, so we need a parallel move resolver.
1187 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +01001188 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001189 parallel_move.AddMove(ref_,
1190 LocationFrom(calling_convention.GetRegisterAt(0)),
1191 type,
1192 nullptr);
1193 parallel_move.AddMove(obj_,
1194 LocationFrom(calling_convention.GetRegisterAt(1)),
1195 type,
1196 nullptr);
1197 if (index.IsValid()) {
1198 parallel_move.AddMove(index,
1199 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001200 DataType::Type::kInt32,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001201 nullptr);
1202 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1203 } else {
1204 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1205 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
1206 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001207 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001208 instruction_,
1209 instruction_->GetDexPc(),
1210 this);
1211 CheckEntrypointTypes<
1212 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
1213 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1214
1215 RestoreLiveRegisters(codegen, locations);
1216
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001217 __ B(GetExitLabel());
1218 }
1219
1220 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
1221
1222 private:
1223 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001224 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
1225 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001226 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1227 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
1228 return Register(VIXLRegCodeFromART(i), kXRegSize);
1229 }
1230 }
1231 // We shall never fail to find a free caller-save register, as
1232 // there are more than two core caller-save registers on ARM64
1233 // (meaning it is possible to find one which is different from
1234 // `ref` and `obj`).
1235 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1236 LOG(FATAL) << "Could not find a free register";
1237 UNREACHABLE();
1238 }
1239
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001240 const Location out_;
1241 const Location ref_;
1242 const Location obj_;
1243 const uint32_t offset_;
1244 // An additional location containing an index to an array.
1245 // Only used for HArrayGet and the UnsafeGetObject &
1246 // UnsafeGetObjectVolatile intrinsics.
1247 const Location index_;
1248
1249 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
1250};
1251
1252// Slow path generating a read barrier for a GC root.
1253class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
1254 public:
1255 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +00001256 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +00001257 DCHECK(kEmitCompilerReadBarrier);
1258 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001259
1260 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1261 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001262 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001263 DCHECK(locations->CanCall());
1264 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +00001265 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1266 << "Unexpected instruction in read barrier for GC root slow path: "
1267 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001268
1269 __ Bind(GetEntryLabel());
1270 SaveLiveRegisters(codegen, locations);
1271
1272 InvokeRuntimeCallingConvention calling_convention;
1273 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1274 // The argument of the ReadBarrierForRootSlow is not a managed
1275 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
1276 // thus we need a 64-bit move here, and we cannot use
1277 //
1278 // arm64_codegen->MoveLocation(
1279 // LocationFrom(calling_convention.GetRegisterAt(0)),
1280 // root_,
1281 // type);
1282 //
1283 // which would emit a 32-bit move, as `type` is a (32-bit wide)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001284 // reference type (`DataType::Type::kReference`).
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001285 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001286 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001287 instruction_,
1288 instruction_->GetDexPc(),
1289 this);
1290 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1291 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1292
1293 RestoreLiveRegisters(codegen, locations);
1294 __ B(GetExitLabel());
1295 }
1296
1297 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
1298
1299 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001300 const Location out_;
1301 const Location root_;
1302
1303 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
1304};
1305
Alexandre Rames5319def2014-10-23 10:03:10 +01001306#undef __
1307
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001308Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(DataType::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001309 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001310 if (type == DataType::Type::kVoid) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001311 LOG(FATAL) << "Unreachable type " << type;
1312 }
1313
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001314 if (DataType::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001315 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
1316 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001317 } else if (!DataType::IsFloatingPointType(type) &&
Alexandre Rames542361f2015-01-29 16:57:31 +00001318 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001319 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
1320 } else {
1321 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001322 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
1323 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +01001324 }
1325
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001326 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001327 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +01001328 return next_location;
1329}
1330
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001331Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +01001332 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001333}
1334
Serban Constantinescu579885a2015-02-22 20:51:33 +00001335CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001336 const CompilerOptions& compiler_options,
1337 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +01001338 : CodeGenerator(graph,
1339 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001340 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +00001341 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001342 callee_saved_core_registers.GetList(),
1343 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001344 compiler_options,
1345 stats),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001346 block_labels_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1347 jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +01001348 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +00001349 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001350 move_resolver_(graph->GetAllocator(), this),
1351 assembler_(graph->GetAllocator()),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001352 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001353 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +01001354 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001355 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001356 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001357 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001358 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001359 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001360 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001361 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6fd16062018-06-26 11:02:04 +01001362 boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001363 baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001364 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001365 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001366 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Marko966b46f2018-08-03 10:20:19 +00001367 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1368 jit_baker_read_barrier_slow_paths_(std::less<uint32_t>(),
1369 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001370 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001371 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001372}
Alexandre Rames5319def2014-10-23 10:03:10 +01001373
Alexandre Rames67555f72014-11-18 10:55:16 +00001374#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +01001375
Zheng Xu3927c8b2015-11-18 17:46:25 +08001376void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001377 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001378 jump_table->EmitTable(this);
1379 }
1380}
1381
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001382void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001383 EmitJumpTables();
Vladimir Marko966b46f2018-08-03 10:20:19 +00001384
1385 // Emit JIT baker read barrier slow paths.
1386 DCHECK(Runtime::Current()->UseJitCompilation() || jit_baker_read_barrier_slow_paths_.empty());
1387 for (auto& entry : jit_baker_read_barrier_slow_paths_) {
1388 uint32_t encoded_data = entry.first;
1389 vixl::aarch64::Label* slow_path_entry = &entry.second.label;
1390 __ Bind(slow_path_entry);
1391 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name */ nullptr);
1392 }
1393
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001394 // Ensure we emit the literal pool.
1395 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +00001396
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001397 CodeGenerator::Finalize(allocator);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001398
1399 // Verify Baker read barrier linker patches.
1400 if (kIsDebugBuild) {
1401 ArrayRef<const uint8_t> code = allocator->GetMemory();
1402 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
1403 DCHECK(info.label.IsBound());
1404 uint32_t literal_offset = info.label.GetLocation();
1405 DCHECK_ALIGNED(literal_offset, 4u);
1406
1407 auto GetInsn = [&code](uint32_t offset) {
1408 DCHECK_ALIGNED(offset, 4u);
1409 return
1410 (static_cast<uint32_t>(code[offset + 0]) << 0) +
1411 (static_cast<uint32_t>(code[offset + 1]) << 8) +
1412 (static_cast<uint32_t>(code[offset + 2]) << 16)+
1413 (static_cast<uint32_t>(code[offset + 3]) << 24);
1414 };
1415
1416 const uint32_t encoded_data = info.custom_data;
1417 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
1418 // Check that the next instruction matches the expected LDR.
1419 switch (kind) {
1420 case BakerReadBarrierKind::kField: {
1421 DCHECK_GE(code.size() - literal_offset, 8u);
1422 uint32_t next_insn = GetInsn(literal_offset + 4u);
1423 // LDR (immediate) with correct base_reg.
1424 CheckValidReg(next_insn & 0x1fu); // Check destination register.
1425 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1426 CHECK_EQ(next_insn & 0xffc003e0u, 0xb9400000u | (base_reg << 5));
1427 break;
1428 }
1429 case BakerReadBarrierKind::kArray: {
1430 DCHECK_GE(code.size() - literal_offset, 8u);
1431 uint32_t next_insn = GetInsn(literal_offset + 4u);
1432 // LDR (register) with the correct base_reg, size=10 (32-bit), option=011 (extend = LSL),
1433 // and S=1 (shift amount = 2 for 32-bit version), i.e. LDR Wt, [Xn, Xm, LSL #2].
1434 CheckValidReg(next_insn & 0x1fu); // Check destination register.
1435 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1436 CHECK_EQ(next_insn & 0xffe0ffe0u, 0xb8607800u | (base_reg << 5));
1437 CheckValidReg((next_insn >> 16) & 0x1f); // Check index register
1438 break;
1439 }
1440 case BakerReadBarrierKind::kGcRoot: {
1441 DCHECK_GE(literal_offset, 4u);
1442 uint32_t prev_insn = GetInsn(literal_offset - 4u);
1443 // LDR (immediate) with correct root_reg.
1444 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1445 CHECK_EQ(prev_insn & 0xffc0001fu, 0xb9400000u | root_reg);
1446 break;
1447 }
1448 default:
1449 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
1450 UNREACHABLE();
1451 }
1452 }
1453 }
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001454}
1455
Zheng Xuad4450e2015-04-17 18:48:56 +08001456void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1457 // Note: There are 6 kinds of moves:
1458 // 1. constant -> GPR/FPR (non-cycle)
1459 // 2. constant -> stack (non-cycle)
1460 // 3. GPR/FPR -> GPR/FPR
1461 // 4. GPR/FPR -> stack
1462 // 5. stack -> GPR/FPR
1463 // 6. stack -> stack (non-cycle)
1464 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1465 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1466 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1467 // dependency.
1468 vixl_temps_.Open(GetVIXLAssembler());
1469}
1470
1471void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1472 vixl_temps_.Close();
1473}
1474
1475Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
Artem Serovd4bccf12017-04-03 18:47:32 +01001476 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister
1477 || kind == Location::kStackSlot || kind == Location::kDoubleStackSlot
1478 || kind == Location::kSIMDStackSlot);
1479 kind = (kind == Location::kFpuRegister || kind == Location::kSIMDStackSlot)
1480 ? Location::kFpuRegister
1481 : Location::kRegister;
Zheng Xuad4450e2015-04-17 18:48:56 +08001482 Location scratch = GetScratchLocation(kind);
1483 if (!scratch.Equals(Location::NoLocation())) {
1484 return scratch;
1485 }
1486 // Allocate from VIXL temp registers.
1487 if (kind == Location::kRegister) {
1488 scratch = LocationFrom(vixl_temps_.AcquireX());
1489 } else {
Roland Levillain952b2352017-05-03 19:49:14 +01001490 DCHECK_EQ(kind, Location::kFpuRegister);
Artem Serovd4bccf12017-04-03 18:47:32 +01001491 scratch = LocationFrom(codegen_->GetGraph()->HasSIMD()
1492 ? vixl_temps_.AcquireVRegisterOfSize(kQRegSize)
1493 : vixl_temps_.AcquireD());
Zheng Xuad4450e2015-04-17 18:48:56 +08001494 }
1495 AddScratchLocation(scratch);
1496 return scratch;
1497}
1498
1499void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1500 if (loc.IsRegister()) {
1501 vixl_temps_.Release(XRegisterFrom(loc));
1502 } else {
1503 DCHECK(loc.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001504 vixl_temps_.Release(codegen_->GetGraph()->HasSIMD() ? QRegisterFrom(loc) : DRegisterFrom(loc));
Zheng Xuad4450e2015-04-17 18:48:56 +08001505 }
1506 RemoveScratchLocation(loc);
1507}
1508
Alexandre Rames3e69f162014-12-10 10:36:50 +00001509void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001510 MoveOperands* move = moves_[index];
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001511 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001512}
1513
Alexandre Rames5319def2014-10-23 10:03:10 +01001514void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001515 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001516 __ Bind(&frame_entry_label_);
1517
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001518 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1519 UseScratchRegisterScope temps(masm);
1520 Register temp = temps.AcquireX();
1521 __ Ldrh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1522 __ Add(temp, temp, 1);
1523 __ Strh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1524 }
1525
Vladimir Marko33bff252017-11-01 14:35:42 +00001526 bool do_overflow_check =
1527 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm64) || !IsLeafMethod();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001528 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001529 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001530 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001531 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Vladimir Marko33bff252017-11-01 14:35:42 +00001532 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kArm64)));
Artem Serov914d7a82017-02-07 14:33:49 +00001533 {
1534 // Ensure that between load and RecordPcInfo there are no pools emitted.
1535 ExactAssemblyScope eas(GetVIXLAssembler(),
1536 kInstructionSize,
1537 CodeBufferCheckScope::kExactSize);
1538 __ ldr(wzr, MemOperand(temp, 0));
1539 RecordPcInfo(nullptr, 0);
1540 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001541 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001542
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001543 if (!HasEmptyFrame()) {
1544 int frame_size = GetFrameSize();
1545 // Stack layout:
1546 // sp[frame_size - 8] : lr.
1547 // ... : other preserved core registers.
1548 // ... : other preserved fp registers.
1549 // ... : reserved frame space.
1550 // sp[0] : current method.
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001551
1552 // Save the current method if we need it. Note that we do not
1553 // do this in HCurrentMethod, as the instruction might have been removed
1554 // in the SSA graph.
1555 if (RequiresCurrentMethod()) {
1556 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001557 } else {
1558 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001559 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001560 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001561 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1562 frame_size - GetCoreSpillSize());
1563 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1564 frame_size - FrameEntrySpillSize());
Mingyao Yang063fc772016-08-02 11:02:54 -07001565
1566 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1567 // Initialize should_deoptimize flag to 0.
1568 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1569 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1570 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001571 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01001572
1573 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01001574}
1575
1576void CodeGeneratorARM64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001577 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001578 if (!HasEmptyFrame()) {
1579 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001580 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1581 frame_size - FrameEntrySpillSize());
1582 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1583 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001584 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001585 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001586 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001587 __ Ret();
1588 GetAssembler()->cfi().RestoreState();
1589 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001590}
1591
Scott Wakeling97c72b72016-06-24 16:19:36 +01001592CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001593 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001594 return CPURegList(CPURegister::kRegister, kXRegSize,
1595 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001596}
1597
Scott Wakeling97c72b72016-06-24 16:19:36 +01001598CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001599 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1600 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001601 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1602 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001603}
1604
Alexandre Rames5319def2014-10-23 10:03:10 +01001605void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1606 __ Bind(GetLabelOf(block));
1607}
1608
Calin Juravle175dc732015-08-25 15:42:32 +01001609void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1610 DCHECK(location.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001611 __ Mov(RegisterFrom(location, DataType::Type::kInt32), value);
Calin Juravle175dc732015-08-25 15:42:32 +01001612}
1613
Calin Juravlee460d1d2015-09-29 04:52:17 +01001614void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1615 if (location.IsRegister()) {
1616 locations->AddTemp(location);
1617 } else {
1618 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1619 }
1620}
1621
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001622void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001623 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001624 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001625 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001626 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001627 if (value_can_be_null) {
1628 __ Cbz(value, &done);
1629 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001630 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001631 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001632 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001633 if (value_can_be_null) {
1634 __ Bind(&done);
1635 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001636}
1637
David Brazdil58282f42016-01-14 12:45:10 +00001638void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001639 // Blocked core registers:
1640 // lr : Runtime reserved.
1641 // tr : Runtime reserved.
Roland Levillain97c46462017-05-11 14:04:03 +01001642 // mr : Runtime reserved.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001643 // ip1 : VIXL core temp.
1644 // ip0 : VIXL core temp.
1645 //
1646 // Blocked fp registers:
1647 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001648 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1649 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001650 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001651 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001652 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001653
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001654 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001655 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001656 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001657 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001658
David Brazdil58282f42016-01-14 12:45:10 +00001659 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001660 // Stubs do not save callee-save floating point registers. If the graph
1661 // is debuggable, we need to deal with these registers differently. For
1662 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001663 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1664 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001665 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001666 }
1667 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001668}
1669
Alexandre Rames3e69f162014-12-10 10:36:50 +00001670size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1671 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1672 __ Str(reg, MemOperand(sp, stack_index));
1673 return kArm64WordSize;
1674}
1675
1676size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1677 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1678 __ Ldr(reg, MemOperand(sp, stack_index));
1679 return kArm64WordSize;
1680}
1681
1682size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1683 FPRegister reg = FPRegister(reg_id, kDRegSize);
1684 __ Str(reg, MemOperand(sp, stack_index));
1685 return kArm64WordSize;
1686}
1687
1688size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1689 FPRegister reg = FPRegister(reg_id, kDRegSize);
1690 __ Ldr(reg, MemOperand(sp, stack_index));
1691 return kArm64WordSize;
1692}
1693
Alexandre Rames5319def2014-10-23 10:03:10 +01001694void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001695 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001696}
1697
1698void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001699 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001700}
1701
Vladimir Markoa0431112018-06-25 09:32:54 +01001702const Arm64InstructionSetFeatures& CodeGeneratorARM64::GetInstructionSetFeatures() const {
1703 return *GetCompilerOptions().GetInstructionSetFeatures()->AsArm64InstructionSetFeatures();
1704}
1705
Alexandre Rames67555f72014-11-18 10:55:16 +00001706void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001707 if (constant->IsIntConstant()) {
1708 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1709 } else if (constant->IsLongConstant()) {
1710 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1711 } else if (constant->IsNullConstant()) {
1712 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001713 } else if (constant->IsFloatConstant()) {
1714 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1715 } else {
1716 DCHECK(constant->IsDoubleConstant());
1717 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1718 }
1719}
1720
Alexandre Rames3e69f162014-12-10 10:36:50 +00001721
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001722static bool CoherentConstantAndType(Location constant, DataType::Type type) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001723 DCHECK(constant.IsConstant());
1724 HConstant* cst = constant.GetConstant();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001725 return (cst->IsIntConstant() && type == DataType::Type::kInt32) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001726 // Null is mapped to a core W register, which we associate with kPrimInt.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001727 (cst->IsNullConstant() && type == DataType::Type::kInt32) ||
1728 (cst->IsLongConstant() && type == DataType::Type::kInt64) ||
1729 (cst->IsFloatConstant() && type == DataType::Type::kFloat32) ||
1730 (cst->IsDoubleConstant() && type == DataType::Type::kFloat64);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001731}
1732
Roland Levillain952b2352017-05-03 19:49:14 +01001733// Allocate a scratch register from the VIXL pool, querying first
1734// the floating-point register pool, and then the core register
1735// pool. This is essentially a reimplementation of
Roland Levillain558dea12017-01-27 19:40:44 +00001736// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
1737// using a different allocation strategy.
1738static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
1739 vixl::aarch64::UseScratchRegisterScope* temps,
1740 int size_in_bits) {
1741 return masm->GetScratchFPRegisterList()->IsEmpty()
1742 ? CPURegister(temps->AcquireRegisterOfSize(size_in_bits))
1743 : CPURegister(temps->AcquireVRegisterOfSize(size_in_bits));
1744}
1745
Calin Juravlee460d1d2015-09-29 04:52:17 +01001746void CodeGeneratorARM64::MoveLocation(Location destination,
1747 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001748 DataType::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001749 if (source.Equals(destination)) {
1750 return;
1751 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001752
1753 // A valid move can always be inferred from the destination and source
1754 // locations. When moving from and to a register, the argument type can be
1755 // used to generate 32bit instead of 64bit moves. In debug mode we also
1756 // checks the coherency of the locations and the type.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001757 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001758
1759 if (destination.IsRegister() || destination.IsFpuRegister()) {
1760 if (unspecified_type) {
1761 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1762 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001763 (src_cst != nullptr && (src_cst->IsIntConstant()
1764 || src_cst->IsFloatConstant()
1765 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001766 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001767 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexandre Rames67555f72014-11-18 10:55:16 +00001768 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001769 // If the source is a double stack slot or a 64bit constant, a 64bit
1770 // type is appropriate. Else the source is a register, and since the
1771 // type has not been specified, we chose a 64bit type to force a 64bit
1772 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001773 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexandre Rames67555f72014-11-18 10:55:16 +00001774 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001775 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001776 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1777 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001778 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001779 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1780 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1781 __ Ldr(dst, StackOperandFrom(source));
Artem Serovd4bccf12017-04-03 18:47:32 +01001782 } else if (source.IsSIMDStackSlot()) {
1783 __ Ldr(QRegisterFrom(destination), StackOperandFrom(source));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001784 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001785 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001786 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001787 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001788 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001789 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001790 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001791 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001792 DataType::Type source_type = DataType::Is64BitType(dst_type)
1793 ? DataType::Type::kInt64
1794 : DataType::Type::kInt32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001795 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1796 }
1797 } else {
1798 DCHECK(source.IsFpuRegister());
1799 if (destination.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001800 DataType::Type source_type = DataType::Is64BitType(dst_type)
1801 ? DataType::Type::kFloat64
1802 : DataType::Type::kFloat32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001803 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1804 } else {
1805 DCHECK(destination.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001806 if (GetGraph()->HasSIMD()) {
1807 __ Mov(QRegisterFrom(destination), QRegisterFrom(source));
1808 } else {
1809 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
1810 }
1811 }
1812 }
1813 } else if (destination.IsSIMDStackSlot()) {
1814 if (source.IsFpuRegister()) {
1815 __ Str(QRegisterFrom(source), StackOperandFrom(destination));
1816 } else {
1817 DCHECK(source.IsSIMDStackSlot());
1818 UseScratchRegisterScope temps(GetVIXLAssembler());
1819 if (GetVIXLAssembler()->GetScratchFPRegisterList()->IsEmpty()) {
1820 Register temp = temps.AcquireX();
1821 __ Ldr(temp, MemOperand(sp, source.GetStackIndex()));
1822 __ Str(temp, MemOperand(sp, destination.GetStackIndex()));
1823 __ Ldr(temp, MemOperand(sp, source.GetStackIndex() + kArm64WordSize));
1824 __ Str(temp, MemOperand(sp, destination.GetStackIndex() + kArm64WordSize));
1825 } else {
1826 FPRegister temp = temps.AcquireVRegisterOfSize(kQRegSize);
1827 __ Ldr(temp, StackOperandFrom(source));
1828 __ Str(temp, StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001829 }
1830 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001831 } else { // The destination is not a register. It must be a stack slot.
1832 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1833 if (source.IsRegister() || source.IsFpuRegister()) {
1834 if (unspecified_type) {
1835 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001836 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001837 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001838 dst_type =
1839 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001840 }
1841 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001842 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1843 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001844 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001845 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001846 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1847 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001848 UseScratchRegisterScope temps(GetVIXLAssembler());
1849 HConstant* src_cst = source.GetConstant();
1850 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001851 if (src_cst->IsZeroBitPattern()) {
Scott Wakeling79db9972017-01-19 14:08:42 +00001852 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant())
1853 ? Register(xzr)
1854 : Register(wzr);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001855 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001856 if (src_cst->IsIntConstant()) {
1857 temp = temps.AcquireW();
1858 } else if (src_cst->IsLongConstant()) {
1859 temp = temps.AcquireX();
1860 } else if (src_cst->IsFloatConstant()) {
1861 temp = temps.AcquireS();
1862 } else {
1863 DCHECK(src_cst->IsDoubleConstant());
1864 temp = temps.AcquireD();
1865 }
1866 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001867 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001868 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001869 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001870 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001871 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001872 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001873 // Use any scratch register (a core or a floating-point one)
1874 // from VIXL scratch register pools as a temporary.
1875 //
1876 // We used to only use the FP scratch register pool, but in some
1877 // rare cases the only register from this pool (D31) would
1878 // already be used (e.g. within a ParallelMove instruction, when
1879 // a move is blocked by a another move requiring a scratch FP
1880 // register, which would reserve D31). To prevent this issue, we
1881 // ask for a scratch register of any type (core or FP).
Roland Levillain558dea12017-01-27 19:40:44 +00001882 //
1883 // Also, we start by asking for a FP scratch register first, as the
Roland Levillain952b2352017-05-03 19:49:14 +01001884 // demand of scratch core registers is higher. This is why we
Roland Levillain558dea12017-01-27 19:40:44 +00001885 // use AcquireFPOrCoreCPURegisterOfSize instead of
1886 // UseScratchRegisterScope::AcquireCPURegisterOfSize, which
1887 // allocates core scratch registers first.
1888 CPURegister temp = AcquireFPOrCoreCPURegisterOfSize(
1889 GetVIXLAssembler(),
1890 &temps,
1891 (destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001892 __ Ldr(temp, StackOperandFrom(source));
1893 __ Str(temp, StackOperandFrom(destination));
1894 }
1895 }
1896}
1897
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001898void CodeGeneratorARM64::Load(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001899 CPURegister dst,
1900 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001901 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001902 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001903 case DataType::Type::kUint8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001904 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001905 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001906 case DataType::Type::kInt8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001907 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001908 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001909 case DataType::Type::kUint16:
Alexandre Rames67555f72014-11-18 10:55:16 +00001910 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001911 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001912 case DataType::Type::kInt16:
1913 __ Ldrsh(Register(dst), src);
1914 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001915 case DataType::Type::kInt32:
1916 case DataType::Type::kReference:
1917 case DataType::Type::kInt64:
1918 case DataType::Type::kFloat32:
1919 case DataType::Type::kFloat64:
1920 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001921 __ Ldr(dst, src);
1922 break;
Aart Bik66c158e2018-01-31 12:55:04 -08001923 case DataType::Type::kUint32:
1924 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001925 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001926 LOG(FATAL) << "Unreachable type " << type;
1927 }
1928}
1929
Calin Juravle77520bc2015-01-12 18:45:46 +00001930void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001931 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001932 const MemOperand& src,
1933 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001934 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001935 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001936 Register temp_base = temps.AcquireX();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001937 DataType::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001938
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001939 DCHECK(!src.IsPreIndex());
1940 DCHECK(!src.IsPostIndex());
1941
1942 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001943 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Artem Serov914d7a82017-02-07 14:33:49 +00001944 {
1945 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1946 MemOperand base = MemOperand(temp_base);
1947 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001948 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001949 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001950 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00001951 {
1952 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1953 __ ldarb(Register(dst), base);
1954 if (needs_null_check) {
1955 MaybeRecordImplicitNullCheck(instruction);
1956 }
1957 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001958 if (type == DataType::Type::kInt8) {
1959 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
Artem Serov914d7a82017-02-07 14:33:49 +00001960 }
1961 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001962 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001963 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00001964 {
1965 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1966 __ ldarh(Register(dst), base);
1967 if (needs_null_check) {
1968 MaybeRecordImplicitNullCheck(instruction);
1969 }
1970 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001971 if (type == DataType::Type::kInt16) {
1972 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
1973 }
Artem Serov914d7a82017-02-07 14:33:49 +00001974 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001975 case DataType::Type::kInt32:
1976 case DataType::Type::kReference:
1977 case DataType::Type::kInt64:
1978 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001979 {
1980 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1981 __ ldar(Register(dst), base);
1982 if (needs_null_check) {
1983 MaybeRecordImplicitNullCheck(instruction);
1984 }
1985 }
1986 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001987 case DataType::Type::kFloat32:
1988 case DataType::Type::kFloat64: {
Artem Serov914d7a82017-02-07 14:33:49 +00001989 DCHECK(dst.IsFPRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001990 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001991
Artem Serov914d7a82017-02-07 14:33:49 +00001992 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1993 {
1994 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1995 __ ldar(temp, base);
1996 if (needs_null_check) {
1997 MaybeRecordImplicitNullCheck(instruction);
1998 }
1999 }
2000 __ Fmov(FPRegister(dst), temp);
2001 break;
Roland Levillain44015862016-01-22 11:47:17 +00002002 }
Aart Bik66c158e2018-01-31 12:55:04 -08002003 case DataType::Type::kUint32:
2004 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002005 case DataType::Type::kVoid:
Artem Serov914d7a82017-02-07 14:33:49 +00002006 LOG(FATAL) << "Unreachable type " << type;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002007 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002008 }
2009}
2010
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002011void CodeGeneratorARM64::Store(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002012 CPURegister src,
2013 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002014 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002015 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002016 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002017 case DataType::Type::kInt8:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002018 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002019 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002020 case DataType::Type::kUint16:
2021 case DataType::Type::kInt16:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002022 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002023 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002024 case DataType::Type::kInt32:
2025 case DataType::Type::kReference:
2026 case DataType::Type::kInt64:
2027 case DataType::Type::kFloat32:
2028 case DataType::Type::kFloat64:
2029 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002030 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00002031 break;
Aart Bik66c158e2018-01-31 12:55:04 -08002032 case DataType::Type::kUint32:
2033 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002034 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002035 LOG(FATAL) << "Unreachable type " << type;
2036 }
2037}
2038
Artem Serov914d7a82017-02-07 14:33:49 +00002039void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002040 DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002041 CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +00002042 const MemOperand& dst,
2043 bool needs_null_check) {
2044 MacroAssembler* masm = GetVIXLAssembler();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002045 UseScratchRegisterScope temps(GetVIXLAssembler());
2046 Register temp_base = temps.AcquireX();
2047
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002048 DCHECK(!dst.IsPreIndex());
2049 DCHECK(!dst.IsPostIndex());
2050
2051 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08002052 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01002053 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002054 MemOperand base = MemOperand(temp_base);
Artem Serov914d7a82017-02-07 14:33:49 +00002055 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002056 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002057 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002058 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002059 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00002060 {
2061 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2062 __ stlrb(Register(src), base);
2063 if (needs_null_check) {
2064 MaybeRecordImplicitNullCheck(instruction);
2065 }
2066 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002067 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002068 case DataType::Type::kUint16:
2069 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00002070 {
2071 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2072 __ stlrh(Register(src), base);
2073 if (needs_null_check) {
2074 MaybeRecordImplicitNullCheck(instruction);
2075 }
2076 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002077 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002078 case DataType::Type::kInt32:
2079 case DataType::Type::kReference:
2080 case DataType::Type::kInt64:
2081 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00002082 {
2083 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2084 __ stlr(Register(src), base);
2085 if (needs_null_check) {
2086 MaybeRecordImplicitNullCheck(instruction);
2087 }
2088 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002089 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002090 case DataType::Type::kFloat32:
2091 case DataType::Type::kFloat64: {
2092 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002093 Register temp_src;
2094 if (src.IsZero()) {
2095 // The zero register is used to avoid synthesizing zero constants.
2096 temp_src = Register(src);
2097 } else {
2098 DCHECK(src.IsFPRegister());
2099 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
2100 __ Fmov(temp_src, FPRegister(src));
2101 }
Artem Serov914d7a82017-02-07 14:33:49 +00002102 {
2103 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2104 __ stlr(temp_src, base);
2105 if (needs_null_check) {
2106 MaybeRecordImplicitNullCheck(instruction);
2107 }
2108 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002109 break;
2110 }
Aart Bik66c158e2018-01-31 12:55:04 -08002111 case DataType::Type::kUint32:
2112 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002113 case DataType::Type::kVoid:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002114 LOG(FATAL) << "Unreachable type " << type;
2115 }
2116}
2117
Calin Juravle175dc732015-08-25 15:42:32 +01002118void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
2119 HInstruction* instruction,
2120 uint32_t dex_pc,
2121 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01002122 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00002123
2124 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value()));
2125 {
2126 // Ensure the pc position is recorded immediately after the `blr` instruction.
2127 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
2128 __ blr(lr);
2129 if (EntrypointRequiresStackMap(entrypoint)) {
2130 RecordPcInfo(instruction, dex_pc, slow_path);
2131 }
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00002132 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002133}
2134
Roland Levillaindec8f632016-07-22 17:10:06 +01002135void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
2136 HInstruction* instruction,
2137 SlowPathCode* slow_path) {
2138 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Roland Levillaindec8f632016-07-22 17:10:06 +01002139 __ Ldr(lr, MemOperand(tr, entry_point_offset));
2140 __ Blr(lr);
2141}
2142
Alexandre Rames67555f72014-11-18 10:55:16 +00002143void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002144 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002145 UseScratchRegisterScope temps(GetVIXLAssembler());
2146 Register temp = temps.AcquireW();
Vladimir Markodc682aa2018-01-04 18:42:57 +00002147 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
2148 const size_t status_byte_offset =
2149 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
2150 constexpr uint32_t shifted_initialized_value =
2151 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002152
Serban Constantinescu02164b32014-11-13 14:05:07 +00002153 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002154 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Vladimir Markodc682aa2018-01-04 18:42:57 +00002155 __ Add(temp, class_reg, status_byte_offset);
Igor Murashkin86083f72017-10-27 10:59:04 -07002156 __ Ldarb(temp, HeapOperand(temp));
Vladimir Markodc682aa2018-01-04 18:42:57 +00002157 __ Cmp(temp, shifted_initialized_value);
Vladimir Marko2c64a832018-01-04 11:31:56 +00002158 __ B(lo, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00002159 __ Bind(slow_path->GetExitLabel());
2160}
Alexandre Rames5319def2014-10-23 10:03:10 +01002161
Vladimir Marko175e7862018-03-27 09:03:13 +00002162void InstructionCodeGeneratorARM64::GenerateBitstringTypeCheckCompare(
2163 HTypeCheckInstruction* check, vixl::aarch64::Register temp) {
2164 uint32_t path_to_root = check->GetBitstringPathToRoot();
2165 uint32_t mask = check->GetBitstringMask();
2166 DCHECK(IsPowerOfTwo(mask + 1));
2167 size_t mask_bits = WhichPowerOf2(mask + 1);
2168
2169 if (mask_bits == 16u) {
2170 // Load only the bitstring part of the status word.
2171 __ Ldrh(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
2172 } else {
2173 // /* uint32_t */ temp = temp->status_
2174 __ Ldr(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
2175 // Extract the bitstring bits.
2176 __ Ubfx(temp, temp, 0, mask_bits);
2177 }
2178 // Compare the bitstring bits to `path_to_root`.
2179 __ Cmp(temp, path_to_root);
2180}
2181
Roland Levillain44015862016-01-22 11:47:17 +00002182void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002183 BarrierType type = BarrierAll;
2184
2185 switch (kind) {
2186 case MemBarrierKind::kAnyAny:
2187 case MemBarrierKind::kAnyStore: {
2188 type = BarrierAll;
2189 break;
2190 }
2191 case MemBarrierKind::kLoadAny: {
2192 type = BarrierReads;
2193 break;
2194 }
2195 case MemBarrierKind::kStoreStore: {
2196 type = BarrierWrites;
2197 break;
2198 }
2199 default:
2200 LOG(FATAL) << "Unexpected memory barrier " << kind;
2201 }
2202 __ Dmb(InnerShareable, type);
2203}
2204
Serban Constantinescu02164b32014-11-13 14:05:07 +00002205void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
2206 HBasicBlock* successor) {
2207 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002208 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
2209 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002210 slow_path =
2211 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathARM64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002212 instruction->SetSlowPath(slow_path);
2213 codegen_->AddSlowPath(slow_path);
2214 if (successor != nullptr) {
2215 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002216 }
2217 } else {
2218 DCHECK_EQ(slow_path->GetSuccessor(), successor);
2219 }
2220
Serban Constantinescu02164b32014-11-13 14:05:07 +00002221 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
2222 Register temp = temps.AcquireW();
2223
Andreas Gampe542451c2016-07-26 09:02:02 -07002224 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002225 if (successor == nullptr) {
2226 __ Cbnz(temp, slow_path->GetEntryLabel());
2227 __ Bind(slow_path->GetReturnLabel());
2228 } else {
2229 __ Cbz(temp, codegen_->GetLabelOf(successor));
2230 __ B(slow_path->GetEntryLabel());
2231 // slow_path will return to GetLabelOf(successor).
2232 }
2233}
2234
Alexandre Rames5319def2014-10-23 10:03:10 +01002235InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
2236 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08002237 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01002238 assembler_(codegen->GetAssembler()),
2239 codegen_(codegen) {}
2240
Alexandre Rames67555f72014-11-18 10:55:16 +00002241void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002242 DCHECK_EQ(instr->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01002243 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002244 DataType::Type type = instr->GetResultType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002245 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002246 case DataType::Type::kInt32:
2247 case DataType::Type::kInt64:
Alexandre Rames5319def2014-10-23 10:03:10 +01002248 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002249 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002250 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002251 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002252
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002253 case DataType::Type::kFloat32:
2254 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002255 locations->SetInAt(0, Location::RequiresFpuRegister());
2256 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002257 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002258 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002259
Alexandre Rames5319def2014-10-23 10:03:10 +01002260 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002261 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002262 }
2263}
2264
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002265void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction,
2266 const FieldInfo& field_info) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002267 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2268
2269 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002270 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Rames09a99962015-04-15 11:47:56 +01002271 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002272 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2273 object_field_get_with_read_barrier
2274 ? LocationSummary::kCallOnSlowPath
2275 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002276 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002277 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002278 if (!field_info.IsVolatile()) {
2279 // We need a temporary register for the read barrier load in
2280 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier()
2281 // only if the offset is too big.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002282 if (field_info.GetFieldOffset().Uint32Value() >= kReferenceLoadMinFarOffset) {
2283 locations->AddTemp(FixedTempLocation());
2284 }
2285 } else {
Vladimir Marko008e09f32018-08-06 15:42:43 +01002286 // Volatile fields need a temporary register for the read barrier marking slow
2287 // path in CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier().
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002288 locations->AddTemp(Location::RequiresRegister());
2289 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002290 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002291 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002292 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002293 locations->SetOut(Location::RequiresFpuRegister());
2294 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002295 // The output overlaps for an object field get when read barriers
2296 // are enabled: we do not want the load to overwrite the object's
2297 // location, as we need it to emit the read barrier.
2298 locations->SetOut(
2299 Location::RequiresRegister(),
2300 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01002301 }
2302}
2303
2304void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
2305 const FieldInfo& field_info) {
2306 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00002307 LocationSummary* locations = instruction->GetLocations();
2308 Location base_loc = locations->InAt(0);
2309 Location out = locations->Out();
2310 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Vladimir Marko61b92282017-10-11 13:23:17 +01002311 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
2312 DataType::Type load_type = instruction->GetType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002313 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01002314
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002315 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier &&
Vladimir Marko61b92282017-10-11 13:23:17 +01002316 load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002317 // Object FieldGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002318 // /* HeapReference<Object> */ out = *(base + offset)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002319 Register base = RegisterFrom(base_loc, DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002320 Location maybe_temp =
2321 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
Roland Levillain44015862016-01-22 11:47:17 +00002322 // Note that potential implicit null checks are handled in this
2323 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
2324 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2325 instruction,
2326 out,
2327 base,
2328 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002329 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00002330 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002331 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00002332 } else {
2333 // General case.
2334 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002335 // Note that a potential implicit null check is handled in this
2336 // CodeGeneratorARM64::LoadAcquire call.
2337 // NB: LoadAcquire will record the pc info if needed.
2338 codegen_->LoadAcquire(
2339 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01002340 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002341 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2342 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Vladimir Marko61b92282017-10-11 13:23:17 +01002343 codegen_->Load(load_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01002344 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01002345 }
Vladimir Marko61b92282017-10-11 13:23:17 +01002346 if (load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002347 // If read barriers are enabled, emit read barriers other than
2348 // Baker's using a slow path (and also unpoison the loaded
2349 // reference, if heap poisoning is enabled).
2350 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
2351 }
Roland Levillain4d027112015-07-01 15:41:14 +01002352 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002353}
2354
2355void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
2356 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002357 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01002358 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002359 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
2360 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002361 } else if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002362 locations->SetInAt(1, Location::RequiresFpuRegister());
2363 } else {
2364 locations->SetInAt(1, Location::RequiresRegister());
2365 }
2366}
2367
2368void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002369 const FieldInfo& field_info,
2370 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002371 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2372
2373 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002374 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01002375 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01002376 Offset offset = field_info.GetFieldOffset();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002377 DataType::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002378
Roland Levillain4d027112015-07-01 15:41:14 +01002379 {
2380 // We use a block to end the scratch scope before the write barrier, thus
2381 // freeing the temporary registers so they can be used in `MarkGCCard`.
2382 UseScratchRegisterScope temps(GetVIXLAssembler());
2383
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002384 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01002385 DCHECK(value.IsW());
2386 Register temp = temps.AcquireW();
2387 __ Mov(temp, value.W());
2388 GetAssembler()->PoisonHeapReference(temp.W());
2389 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01002390 }
Roland Levillain4d027112015-07-01 15:41:14 +01002391
2392 if (field_info.IsVolatile()) {
Artem Serov914d7a82017-02-07 14:33:49 +00002393 codegen_->StoreRelease(
2394 instruction, field_type, source, HeapOperand(obj, offset), /* needs_null_check */ true);
Roland Levillain4d027112015-07-01 15:41:14 +01002395 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002396 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2397 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002398 codegen_->Store(field_type, source, HeapOperand(obj, offset));
2399 codegen_->MaybeRecordImplicitNullCheck(instruction);
2400 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002401 }
2402
2403 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002404 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01002405 }
2406}
2407
Alexandre Rames67555f72014-11-18 10:55:16 +00002408void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002409 DataType::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002410
2411 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002412 case DataType::Type::kInt32:
2413 case DataType::Type::kInt64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002414 Register dst = OutputRegister(instr);
2415 Register lhs = InputRegisterAt(instr, 0);
2416 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01002417 if (instr->IsAdd()) {
2418 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002419 } else if (instr->IsAnd()) {
2420 __ And(dst, lhs, rhs);
2421 } else if (instr->IsOr()) {
2422 __ Orr(dst, lhs, rhs);
2423 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002424 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002425 } else if (instr->IsRor()) {
2426 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002427 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002428 __ Ror(dst, lhs, shift);
2429 } else {
2430 // Ensure shift distance is in the same size register as the result. If
2431 // we are rotating a long and the shift comes in a w register originally,
2432 // we don't need to sxtw for use as an x since the shift distances are
2433 // all & reg_bits - 1.
2434 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
2435 }
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002436 } else if (instr->IsMin() || instr->IsMax()) {
2437 __ Cmp(lhs, rhs);
2438 __ Csel(dst, lhs, rhs, instr->IsMin() ? lt : gt);
Alexandre Rames67555f72014-11-18 10:55:16 +00002439 } else {
2440 DCHECK(instr->IsXor());
2441 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002442 }
2443 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002444 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002445 case DataType::Type::kFloat32:
2446 case DataType::Type::kFloat64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002447 FPRegister dst = OutputFPRegister(instr);
2448 FPRegister lhs = InputFPRegisterAt(instr, 0);
2449 FPRegister rhs = InputFPRegisterAt(instr, 1);
2450 if (instr->IsAdd()) {
2451 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002452 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002453 __ Fsub(dst, lhs, rhs);
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002454 } else if (instr->IsMin()) {
2455 __ Fmin(dst, lhs, rhs);
2456 } else if (instr->IsMax()) {
2457 __ Fmax(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002458 } else {
2459 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002460 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002461 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002462 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002463 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002464 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002465 }
2466}
2467
Serban Constantinescu02164b32014-11-13 14:05:07 +00002468void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2469 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2470
Vladimir Markoca6fff82017-10-03 14:49:14 +01002471 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002472 DataType::Type type = instr->GetResultType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002473 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002474 case DataType::Type::kInt32:
2475 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002476 locations->SetInAt(0, Location::RequiresRegister());
2477 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Artem Serov87c97052016-09-23 13:34:31 +01002478 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002479 break;
2480 }
2481 default:
2482 LOG(FATAL) << "Unexpected shift type " << type;
2483 }
2484}
2485
2486void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2487 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2488
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002489 DataType::Type type = instr->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002490 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002491 case DataType::Type::kInt32:
2492 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002493 Register dst = OutputRegister(instr);
2494 Register lhs = InputRegisterAt(instr, 0);
2495 Operand rhs = InputOperandAt(instr, 1);
2496 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002497 uint32_t shift_value = rhs.GetImmediate() &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002498 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002499 if (instr->IsShl()) {
2500 __ Lsl(dst, lhs, shift_value);
2501 } else if (instr->IsShr()) {
2502 __ Asr(dst, lhs, shift_value);
2503 } else {
2504 __ Lsr(dst, lhs, shift_value);
2505 }
2506 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002507 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002508
2509 if (instr->IsShl()) {
2510 __ Lsl(dst, lhs, rhs_reg);
2511 } else if (instr->IsShr()) {
2512 __ Asr(dst, lhs, rhs_reg);
2513 } else {
2514 __ Lsr(dst, lhs, rhs_reg);
2515 }
2516 }
2517 break;
2518 }
2519 default:
2520 LOG(FATAL) << "Unexpected shift operation type " << type;
2521 }
2522}
2523
Alexandre Rames5319def2014-10-23 10:03:10 +01002524void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002525 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002526}
2527
2528void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002529 HandleBinaryOp(instruction);
2530}
2531
2532void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2533 HandleBinaryOp(instruction);
2534}
2535
2536void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2537 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002538}
2539
Artem Serov7fc63502016-02-09 17:15:29 +00002540void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002541 DCHECK(DataType::IsIntegralType(instr->GetType())) << instr->GetType();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002542 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002543 locations->SetInAt(0, Location::RequiresRegister());
2544 // There is no immediate variant of negated bitwise instructions in AArch64.
2545 locations->SetInAt(1, Location::RequiresRegister());
2546 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2547}
2548
Artem Serov7fc63502016-02-09 17:15:29 +00002549void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002550 Register dst = OutputRegister(instr);
2551 Register lhs = InputRegisterAt(instr, 0);
2552 Register rhs = InputRegisterAt(instr, 1);
2553
2554 switch (instr->GetOpKind()) {
2555 case HInstruction::kAnd:
2556 __ Bic(dst, lhs, rhs);
2557 break;
2558 case HInstruction::kOr:
2559 __ Orn(dst, lhs, rhs);
2560 break;
2561 case HInstruction::kXor:
2562 __ Eon(dst, lhs, rhs);
2563 break;
2564 default:
2565 LOG(FATAL) << "Unreachable";
2566 }
2567}
2568
Anton Kirilov74234da2017-01-13 14:42:47 +00002569void LocationsBuilderARM64::VisitDataProcWithShifterOp(
2570 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002571 DCHECK(instruction->GetType() == DataType::Type::kInt32 ||
2572 instruction->GetType() == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002573 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002574 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames8626b742015-11-25 16:28:08 +00002575 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2576 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2577 } else {
2578 locations->SetInAt(0, Location::RequiresRegister());
2579 }
2580 locations->SetInAt(1, Location::RequiresRegister());
2581 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2582}
2583
Anton Kirilov74234da2017-01-13 14:42:47 +00002584void InstructionCodeGeneratorARM64::VisitDataProcWithShifterOp(
2585 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002586 DataType::Type type = instruction->GetType();
Alexandre Rames8626b742015-11-25 16:28:08 +00002587 HInstruction::InstructionKind kind = instruction->GetInstrKind();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002588 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002589 Register out = OutputRegister(instruction);
2590 Register left;
2591 if (kind != HInstruction::kNeg) {
2592 left = InputRegisterAt(instruction, 0);
2593 }
Anton Kirilov74234da2017-01-13 14:42:47 +00002594 // If this `HDataProcWithShifterOp` was created by merging a type conversion as the
Alexandre Rames8626b742015-11-25 16:28:08 +00002595 // shifter operand operation, the IR generating `right_reg` (input to the type
2596 // conversion) can have a different type from the current instruction's type,
2597 // so we manually indicate the type.
2598 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Alexandre Rames8626b742015-11-25 16:28:08 +00002599 Operand right_operand(0);
2600
Anton Kirilov74234da2017-01-13 14:42:47 +00002601 HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2602 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002603 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2604 } else {
Anton Kirilov74234da2017-01-13 14:42:47 +00002605 right_operand = Operand(right_reg,
2606 helpers::ShiftFromOpKind(op_kind),
2607 instruction->GetShiftAmount());
Alexandre Rames8626b742015-11-25 16:28:08 +00002608 }
2609
2610 // Logical binary operations do not support extension operations in the
2611 // operand. Note that VIXL would still manage if it was passed by generating
2612 // the extension as a separate instruction.
2613 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2614 DCHECK(!right_operand.IsExtendedRegister() ||
2615 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2616 kind != HInstruction::kNeg));
2617 switch (kind) {
2618 case HInstruction::kAdd:
2619 __ Add(out, left, right_operand);
2620 break;
2621 case HInstruction::kAnd:
2622 __ And(out, left, right_operand);
2623 break;
2624 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002625 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002626 __ Neg(out, right_operand);
2627 break;
2628 case HInstruction::kOr:
2629 __ Orr(out, left, right_operand);
2630 break;
2631 case HInstruction::kSub:
2632 __ Sub(out, left, right_operand);
2633 break;
2634 case HInstruction::kXor:
2635 __ Eor(out, left, right_operand);
2636 break;
2637 default:
2638 LOG(FATAL) << "Unexpected operation kind: " << kind;
2639 UNREACHABLE();
2640 }
2641}
2642
Artem Serov328429f2016-07-06 16:23:04 +01002643void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002644 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002645 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002646 locations->SetInAt(0, Location::RequiresRegister());
2647 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
Artem Serov87c97052016-09-23 13:34:31 +01002648 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002649}
2650
Roland Levillain19c54192016-11-04 13:44:09 +00002651void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002652 __ Add(OutputRegister(instruction),
2653 InputRegisterAt(instruction, 0),
2654 Operand(InputOperandAt(instruction, 1)));
2655}
2656
Artem Serove1811ed2017-04-27 16:50:47 +01002657void LocationsBuilderARM64::VisitIntermediateAddressIndex(HIntermediateAddressIndex* instruction) {
2658 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002659 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serove1811ed2017-04-27 16:50:47 +01002660
2661 HIntConstant* shift = instruction->GetShift()->AsIntConstant();
2662
2663 locations->SetInAt(0, Location::RequiresRegister());
2664 // For byte case we don't need to shift the index variable so we can encode the data offset into
2665 // ADD instruction. For other cases we prefer the data_offset to be in register; that will hoist
2666 // data offset constant generation out of the loop and reduce the critical path length in the
2667 // loop.
2668 locations->SetInAt(1, shift->GetValue() == 0
2669 ? Location::ConstantLocation(instruction->GetOffset()->AsIntConstant())
2670 : Location::RequiresRegister());
2671 locations->SetInAt(2, Location::ConstantLocation(shift));
2672 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2673}
2674
2675void InstructionCodeGeneratorARM64::VisitIntermediateAddressIndex(
2676 HIntermediateAddressIndex* instruction) {
2677 Register index_reg = InputRegisterAt(instruction, 0);
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002678 uint32_t shift = Int64FromLocation(instruction->GetLocations()->InAt(2));
Artem Serove1811ed2017-04-27 16:50:47 +01002679 uint32_t offset = instruction->GetOffset()->AsIntConstant()->GetValue();
2680
2681 if (shift == 0) {
2682 __ Add(OutputRegister(instruction), index_reg, offset);
2683 } else {
2684 Register offset_reg = InputRegisterAt(instruction, 1);
2685 __ Add(OutputRegister(instruction), offset_reg, Operand(index_reg, LSL, shift));
2686 }
2687}
2688
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002689void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002690 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002691 new (GetGraph()->GetAllocator()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002692 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2693 if (instr->GetOpKind() == HInstruction::kSub &&
2694 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002695 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002696 // Don't allocate register for Mneg instruction.
2697 } else {
2698 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2699 Location::RequiresRegister());
2700 }
2701 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2702 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002703 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2704}
2705
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002706void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002707 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002708 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2709 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002710
2711 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2712 // This fixup should be carried out for all multiply-accumulate instructions:
2713 // madd, msub, smaddl, smsubl, umaddl and umsubl.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002714 if (instr->GetType() == DataType::Type::kInt64 &&
Alexandre Rames418318f2015-11-20 15:55:47 +00002715 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2716 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002717 vixl::aarch64::Instruction* prev =
2718 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002719 if (prev->IsLoadOrStore()) {
2720 // Make sure we emit only exactly one nop.
Artem Serov914d7a82017-02-07 14:33:49 +00002721 ExactAssemblyScope scope(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002722 __ nop();
2723 }
2724 }
2725
2726 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002727 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002728 __ Madd(res, mul_left, mul_right, accumulator);
2729 } else {
2730 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002731 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002732 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002733 __ Mneg(res, mul_left, mul_right);
2734 } else {
2735 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2736 __ Msub(res, mul_left, mul_right, accumulator);
2737 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002738 }
2739}
2740
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002741void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002742 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002743 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002744 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002745 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2746 object_array_get_with_read_barrier
2747 ? LocationSummary::kCallOnSlowPath
2748 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002749 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002750 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002751 if (instruction->GetIndex()->IsConstant()) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002752 // Array loads with constant index are treated as field loads.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002753 // We need a temporary register for the read barrier load in
2754 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier()
2755 // only if the offset is too big.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002756 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
2757 uint32_t index = instruction->GetIndex()->AsIntConstant()->GetValue();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002758 offset += index << DataType::SizeShift(DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002759 if (offset >= kReferenceLoadMinFarOffset) {
2760 locations->AddTemp(FixedTempLocation());
2761 }
2762 } else {
Vladimir Marko008e09f32018-08-06 15:42:43 +01002763 // We need a non-scratch temporary for the array data pointer in
2764 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier().
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002765 locations->AddTemp(Location::RequiresRegister());
2766 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002767 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002768 locations->SetInAt(0, Location::RequiresRegister());
2769 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002770 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002771 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2772 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002773 // The output overlaps in the case of an object array get with
2774 // read barriers enabled: we do not want the move to overwrite the
2775 // array's location, as we need it to emit the read barrier.
2776 locations->SetOut(
2777 Location::RequiresRegister(),
2778 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002779 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002780}
2781
2782void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002783 DataType::Type type = instruction->GetType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002784 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002785 LocationSummary* locations = instruction->GetLocations();
2786 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002787 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002788 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002789 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2790 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002791 MacroAssembler* masm = GetVIXLAssembler();
2792 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002793
Roland Levillain19c54192016-11-04 13:44:09 +00002794 // The read barrier instrumentation of object ArrayGet instructions
2795 // does not support the HIntermediateAddress instruction.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002796 DCHECK(!((type == DataType::Type::kReference) &&
Roland Levillain19c54192016-11-04 13:44:09 +00002797 instruction->GetArray()->IsIntermediateAddress() &&
2798 kEmitCompilerReadBarrier));
2799
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002800 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00002801 // Object ArrayGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002802 // Note that a potential implicit null check is handled in the
2803 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
Vladimir Marko66d691d2017-04-07 17:53:39 +01002804 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002805 if (index.IsConstant()) {
2806 // Array load with a constant index can be treated as a field load.
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002807 offset += Int64FromLocation(index) << DataType::SizeShift(type);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002808 Location maybe_temp =
2809 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
2810 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2811 out,
2812 obj.W(),
2813 offset,
2814 maybe_temp,
Vladimir Marko66d691d2017-04-07 17:53:39 +01002815 /* needs_null_check */ false,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002816 /* use_load_acquire */ false);
2817 } else {
2818 Register temp = WRegisterFrom(locations->GetTemp(0));
2819 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko008e09f32018-08-06 15:42:43 +01002820 out, obj.W(), offset, index, temp, /* needs_null_check */ false);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002821 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002822 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002823 // General case.
2824 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002825 Register length;
2826 if (maybe_compressed_char_at) {
2827 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2828 length = temps.AcquireW();
Artem Serov914d7a82017-02-07 14:33:49 +00002829 {
2830 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2831 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2832
2833 if (instruction->GetArray()->IsIntermediateAddress()) {
2834 DCHECK_LT(count_offset, offset);
2835 int64_t adjusted_offset =
2836 static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2837 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2838 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2839 } else {
2840 __ Ldr(length, HeapOperand(obj, count_offset));
2841 }
2842 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002843 }
jessicahandojo05765752016-09-09 19:01:32 -07002844 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002845 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002846 if (maybe_compressed_char_at) {
2847 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002848 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2849 "Expecting 0=compressed, 1=uncompressed");
2850 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002851 __ Ldrb(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002852 HeapOperand(obj, offset + Int64FromLocation(index)));
jessicahandojo05765752016-09-09 19:01:32 -07002853 __ B(&done);
2854 __ Bind(&uncompressed_load);
2855 __ Ldrh(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002856 HeapOperand(obj, offset + (Int64FromLocation(index) << 1)));
jessicahandojo05765752016-09-09 19:01:32 -07002857 __ Bind(&done);
2858 } else {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002859 offset += Int64FromLocation(index) << DataType::SizeShift(type);
jessicahandojo05765752016-09-09 19:01:32 -07002860 source = HeapOperand(obj, offset);
2861 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002862 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002863 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002864 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002865 // We do not need to compute the intermediate address from the array: the
2866 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002867 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002868 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002869 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002870 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2871 }
2872 temp = obj;
2873 } else {
2874 __ Add(temp, obj, offset);
2875 }
jessicahandojo05765752016-09-09 19:01:32 -07002876 if (maybe_compressed_char_at) {
2877 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002878 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2879 "Expecting 0=compressed, 1=uncompressed");
2880 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002881 __ Ldrb(Register(OutputCPURegister(instruction)),
2882 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2883 __ B(&done);
2884 __ Bind(&uncompressed_load);
2885 __ Ldrh(Register(OutputCPURegister(instruction)),
2886 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2887 __ Bind(&done);
2888 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002889 source = HeapOperand(temp, XRegisterFrom(index), LSL, DataType::SizeShift(type));
jessicahandojo05765752016-09-09 19:01:32 -07002890 }
Roland Levillain44015862016-01-22 11:47:17 +00002891 }
jessicahandojo05765752016-09-09 19:01:32 -07002892 if (!maybe_compressed_char_at) {
Artem Serov914d7a82017-02-07 14:33:49 +00002893 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2894 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
jessicahandojo05765752016-09-09 19:01:32 -07002895 codegen_->Load(type, OutputCPURegister(instruction), source);
2896 codegen_->MaybeRecordImplicitNullCheck(instruction);
2897 }
Roland Levillain44015862016-01-22 11:47:17 +00002898
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002899 if (type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002900 static_assert(
2901 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2902 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2903 Location obj_loc = locations->InAt(0);
2904 if (index.IsConstant()) {
2905 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2906 } else {
2907 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2908 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002909 }
Roland Levillain4d027112015-07-01 15:41:14 +01002910 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002911}
2912
Alexandre Rames5319def2014-10-23 10:03:10 +01002913void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002914 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002915 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002916 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002917}
2918
2919void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002920 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002921 vixl::aarch64::Register out = OutputRegister(instruction);
Artem Serov914d7a82017-02-07 14:33:49 +00002922 {
2923 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2924 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2925 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
2926 codegen_->MaybeRecordImplicitNullCheck(instruction);
2927 }
jessicahandojo05765752016-09-09 19:01:32 -07002928 // Mask out compression flag from String's array length.
2929 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002930 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002931 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002932}
2933
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002934void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002935 DataType::Type value_type = instruction->GetComponentType();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002936
2937 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002938 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002939 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002940 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002941 LocationSummary::kCallOnSlowPath :
2942 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002943 locations->SetInAt(0, Location::RequiresRegister());
2944 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002945 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2946 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002947 } else if (DataType::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002948 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002949 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002950 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002951 }
2952}
2953
2954void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002955 DataType::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002956 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002957 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002958 bool needs_write_barrier =
2959 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002960
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002961 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002962 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002963 CPURegister source = value;
2964 Location index = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002965 size_t offset = mirror::Array::DataOffset(DataType::Size(value_type)).Uint32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002966 MemOperand destination = HeapOperand(array);
2967 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002968
2969 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002970 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002971 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002972 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002973 destination = HeapOperand(array, offset);
2974 } else {
2975 UseScratchRegisterScope temps(masm);
2976 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002977 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002978 // We do not need to compute the intermediate address from the array: the
2979 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002980 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002981 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002982 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002983 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2984 }
2985 temp = array;
2986 } else {
2987 __ Add(temp, array, offset);
2988 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002989 destination = HeapOperand(temp,
2990 XRegisterFrom(index),
2991 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002992 DataType::SizeShift(value_type));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002993 }
Artem Serov914d7a82017-02-07 14:33:49 +00002994 {
2995 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2996 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2997 codegen_->Store(value_type, value, destination);
2998 codegen_->MaybeRecordImplicitNullCheck(instruction);
2999 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003000 } else {
Artem Serov328429f2016-07-06 16:23:04 +01003001 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003002 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003003 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01003004 {
3005 // We use a block to end the scratch scope before the write barrier, thus
3006 // freeing the temporary registers so they can be used in `MarkGCCard`.
3007 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003008 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01003009 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003010 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003011 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01003012 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01003013 destination = HeapOperand(temp,
3014 XRegisterFrom(index),
3015 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003016 DataType::SizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01003017 }
3018
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003019 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3020 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3021 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3022
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003023 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01003024 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathARM64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003025 codegen_->AddSlowPath(slow_path);
3026 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003027 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003028 __ Cbnz(Register(value), &non_zero);
3029 if (!index.IsConstant()) {
3030 __ Add(temp, array, offset);
3031 }
Artem Serov914d7a82017-02-07 14:33:49 +00003032 {
3033 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools
3034 // emitted.
3035 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3036 __ Str(wzr, destination);
3037 codegen_->MaybeRecordImplicitNullCheck(instruction);
3038 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003039 __ B(&done);
3040 __ Bind(&non_zero);
3041 }
3042
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003043 // Note that when Baker read barriers are enabled, the type
3044 // checks are performed without read barriers. This is fine,
3045 // even in the case where a class object is in the from-space
3046 // after the flip, as a comparison involving such a type would
3047 // not produce a false positive; it may of course produce a
3048 // false negative, in which case we would take the ArraySet
3049 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01003050
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003051 Register temp2 = temps.AcquireSameSizeAs(array);
3052 // /* HeapReference<Class> */ temp = array->klass_
Artem Serov914d7a82017-02-07 14:33:49 +00003053 {
3054 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
3055 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3056 __ Ldr(temp, HeapOperand(array, class_offset));
3057 codegen_->MaybeRecordImplicitNullCheck(instruction);
3058 }
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003059 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01003060
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003061 // /* HeapReference<Class> */ temp = temp->component_type_
3062 __ Ldr(temp, HeapOperand(temp, component_offset));
3063 // /* HeapReference<Class> */ temp2 = value->klass_
3064 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
3065 // If heap poisoning is enabled, no need to unpoison `temp`
3066 // nor `temp2`, as we are comparing two poisoned references.
3067 __ Cmp(temp, temp2);
3068 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01003069
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003070 if (instruction->StaticTypeOfArrayIsObjectArray()) {
3071 vixl::aarch64::Label do_put;
3072 __ B(eq, &do_put);
3073 // If heap poisoning is enabled, the `temp` reference has
3074 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003075 GetAssembler()->MaybeUnpoisonHeapReference(temp);
3076
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003077 // /* HeapReference<Class> */ temp = temp->super_class_
3078 __ Ldr(temp, HeapOperand(temp, super_offset));
3079 // If heap poisoning is enabled, no need to unpoison
3080 // `temp`, as we are comparing against null below.
3081 __ Cbnz(temp, slow_path->GetEntryLabel());
3082 __ Bind(&do_put);
3083 } else {
3084 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003085 }
3086 }
3087
3088 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01003089 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003090 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01003091 __ Mov(temp2, value.W());
3092 GetAssembler()->PoisonHeapReference(temp2);
3093 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003094 }
3095
3096 if (!index.IsConstant()) {
3097 __ Add(temp, array, offset);
Vladimir Markod1ef8732017-04-18 13:55:13 +01003098 } else {
3099 // We no longer need the `temp` here so release it as the store below may
3100 // need a scratch register (if the constant index makes the offset too large)
3101 // and the poisoned `source` could be using the other scratch register.
3102 temps.Release(temp);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003103 }
Artem Serov914d7a82017-02-07 14:33:49 +00003104 {
3105 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
3106 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3107 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003108
Artem Serov914d7a82017-02-07 14:33:49 +00003109 if (!may_need_runtime_call_for_type_check) {
3110 codegen_->MaybeRecordImplicitNullCheck(instruction);
3111 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003112 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003113 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003114
3115 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
3116
3117 if (done.IsLinked()) {
3118 __ Bind(&done);
3119 }
3120
3121 if (slow_path != nullptr) {
3122 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01003123 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003124 }
3125}
3126
Alexandre Rames67555f72014-11-18 10:55:16 +00003127void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003128 RegisterSet caller_saves = RegisterSet::Empty();
3129 InvokeRuntimeCallingConvention calling_convention;
3130 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
3131 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
3132 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00003133 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00003134 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00003135}
3136
3137void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01003138 BoundsCheckSlowPathARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003139 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003140 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00003141 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
3142 __ B(slow_path->GetEntryLabel(), hs);
3143}
3144
Alexandre Rames67555f72014-11-18 10:55:16 +00003145void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
3146 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003147 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexandre Rames67555f72014-11-18 10:55:16 +00003148 locations->SetInAt(0, Location::RequiresRegister());
3149 if (check->HasUses()) {
3150 locations->SetOut(Location::SameAsFirstInput());
3151 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01003152 // Rely on the type initialization to save everything we need.
3153 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Alexandre Rames67555f72014-11-18 10:55:16 +00003154}
3155
3156void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
3157 // We assume the class is not null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01003158 SlowPathCodeARM64* slow_path =
3159 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(check->GetLoadClass(), check);
Alexandre Rames67555f72014-11-18 10:55:16 +00003160 codegen_->AddSlowPath(slow_path);
3161 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
3162}
3163
Roland Levillain1a653882016-03-18 18:05:57 +00003164static bool IsFloatingPointZeroConstant(HInstruction* inst) {
3165 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
3166 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
3167}
3168
3169void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
3170 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
3171 Location rhs_loc = instruction->GetLocations()->InAt(1);
3172 if (rhs_loc.IsConstant()) {
3173 // 0.0 is the only immediate that can be encoded directly in
3174 // an FCMP instruction.
3175 //
3176 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
3177 // specify that in a floating-point comparison, positive zero
3178 // and negative zero are considered equal, so we can use the
3179 // literal 0.0 for both cases here.
3180 //
3181 // Note however that some methods (Float.equal, Float.compare,
3182 // Float.compareTo, Double.equal, Double.compare,
3183 // Double.compareTo, Math.max, Math.min, StrictMath.max,
3184 // StrictMath.min) consider 0.0 to be (strictly) greater than
3185 // -0.0. So if we ever translate calls to these methods into a
3186 // HCompare instruction, we must handle the -0.0 case with
3187 // care here.
3188 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
3189 __ Fcmp(lhs_reg, 0.0);
3190 } else {
3191 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
3192 }
Roland Levillain7f63c522015-07-13 15:54:55 +00003193}
3194
Serban Constantinescu02164b32014-11-13 14:05:07 +00003195void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003196 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003197 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003198 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01003199 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003200 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003201 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003202 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003203 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003204 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003205 case DataType::Type::kInt32:
3206 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003207 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00003208 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00003209 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3210 break;
3211 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003212 case DataType::Type::kFloat32:
3213 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003214 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00003215 locations->SetInAt(1,
3216 IsFloatingPointZeroConstant(compare->InputAt(1))
3217 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
3218 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00003219 locations->SetOut(Location::RequiresRegister());
3220 break;
3221 }
3222 default:
3223 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3224 }
3225}
3226
3227void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003228 DataType::Type in_type = compare->InputAt(0)->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00003229
3230 // 0 if: left == right
3231 // 1 if: left > right
3232 // -1 if: left < right
3233 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003234 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003235 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003236 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003237 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003238 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003239 case DataType::Type::kInt32:
3240 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003241 Register result = OutputRegister(compare);
3242 Register left = InputRegisterAt(compare, 0);
3243 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003244 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08003245 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
3246 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00003247 break;
3248 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003249 case DataType::Type::kFloat32:
3250 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003251 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00003252 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003253 __ Cset(result, ne);
3254 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01003255 break;
3256 }
3257 default:
3258 LOG(FATAL) << "Unimplemented compare type " << in_type;
3259 }
3260}
3261
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003262void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003263 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00003264
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003265 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003266 locations->SetInAt(0, Location::RequiresFpuRegister());
3267 locations->SetInAt(1,
3268 IsFloatingPointZeroConstant(instruction->InputAt(1))
3269 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
3270 : Location::RequiresFpuRegister());
3271 } else {
3272 // Integer cases.
3273 locations->SetInAt(0, Location::RequiresRegister());
3274 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
3275 }
3276
David Brazdilb3e773e2016-01-26 11:28:37 +00003277 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00003278 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01003279 }
3280}
3281
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003282void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003283 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003284 return;
3285 }
3286
3287 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01003288 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00003289 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01003290
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003291 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00003292 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003293 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00003294 } else {
3295 // Integer cases.
3296 Register lhs = InputRegisterAt(instruction, 0);
3297 Operand rhs = InputOperandAt(instruction, 1);
3298 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003299 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00003300 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003301}
3302
3303#define FOR_EACH_CONDITION_INSTRUCTION(M) \
3304 M(Equal) \
3305 M(NotEqual) \
3306 M(LessThan) \
3307 M(LessThanOrEqual) \
3308 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07003309 M(GreaterThanOrEqual) \
3310 M(Below) \
3311 M(BelowOrEqual) \
3312 M(Above) \
3313 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01003314#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003315void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
3316void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01003317FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00003318#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01003319#undef FOR_EACH_CONDITION_INSTRUCTION
3320
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003321void InstructionCodeGeneratorARM64::GenerateIntDivForPower2Denom(HDiv* instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003322 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003323 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003324 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
3325
3326 Register out = OutputRegister(instruction);
3327 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01003328
3329 if (abs_imm == 2) {
3330 int bits = DataType::Size(instruction->GetResultType()) * kBitsPerByte;
3331 __ Add(out, dividend, Operand(dividend, LSR, bits - 1));
3332 } else {
3333 UseScratchRegisterScope temps(GetVIXLAssembler());
3334 Register temp = temps.AcquireSameSizeAs(out);
3335 __ Add(temp, dividend, abs_imm - 1);
3336 __ Cmp(dividend, 0);
3337 __ Csel(out, temp, dividend, lt);
3338 }
3339
Zheng Xuc6667102015-05-15 16:08:45 +08003340 int ctz_imm = CTZ(abs_imm);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003341 if (imm > 0) {
3342 __ Asr(out, out, ctz_imm);
Zheng Xuc6667102015-05-15 16:08:45 +08003343 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003344 __ Neg(out, Operand(out, ASR, ctz_imm));
Zheng Xuc6667102015-05-15 16:08:45 +08003345 }
3346}
3347
3348void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3349 DCHECK(instruction->IsDiv() || instruction->IsRem());
3350
3351 LocationSummary* locations = instruction->GetLocations();
3352 Location second = locations->InAt(1);
3353 DCHECK(second.IsConstant());
3354
3355 Register out = OutputRegister(instruction);
3356 Register dividend = InputRegisterAt(instruction, 0);
3357 int64_t imm = Int64FromConstant(second.GetConstant());
3358
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003359 DataType::Type type = instruction->GetResultType();
3360 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Zheng Xuc6667102015-05-15 16:08:45 +08003361
3362 int64_t magic;
3363 int shift;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003364 CalculateMagicAndShiftForDivRem(
3365 imm, type == DataType::Type::kInt64 /* is_long */, &magic, &shift);
Zheng Xuc6667102015-05-15 16:08:45 +08003366
3367 UseScratchRegisterScope temps(GetVIXLAssembler());
3368 Register temp = temps.AcquireSameSizeAs(out);
3369
3370 // temp = get_high(dividend * magic)
3371 __ Mov(temp, magic);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003372 if (type == DataType::Type::kInt64) {
Zheng Xuc6667102015-05-15 16:08:45 +08003373 __ Smulh(temp, dividend, temp);
3374 } else {
3375 __ Smull(temp.X(), dividend, temp);
3376 __ Lsr(temp.X(), temp.X(), 32);
3377 }
3378
3379 if (imm > 0 && magic < 0) {
3380 __ Add(temp, temp, dividend);
3381 } else if (imm < 0 && magic > 0) {
3382 __ Sub(temp, temp, dividend);
3383 }
3384
3385 if (shift != 0) {
3386 __ Asr(temp, temp, shift);
3387 }
3388
3389 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003390 __ Sub(out, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08003391 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003392 __ Sub(temp, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08003393 // TODO: Strength reduction for msub.
3394 Register temp_imm = temps.AcquireSameSizeAs(out);
3395 __ Mov(temp_imm, imm);
3396 __ Msub(out, temp, temp_imm, dividend);
3397 }
3398}
3399
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003400void InstructionCodeGeneratorARM64::GenerateIntDivForConstDenom(HDiv *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003401 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Zheng Xuc6667102015-05-15 16:08:45 +08003402
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003403 if (imm == 0) {
3404 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3405 return;
3406 }
Zheng Xuc6667102015-05-15 16:08:45 +08003407
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003408 if (IsPowerOfTwo(AbsOrMin(imm))) {
3409 GenerateIntDivForPower2Denom(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003410 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003411 // Cases imm == -1 or imm == 1 are handled by InstructionSimplifier.
3412 DCHECK(imm < -2 || imm > 2) << imm;
3413 GenerateDivRemWithAnyConstant(instruction);
3414 }
3415}
3416
3417void InstructionCodeGeneratorARM64::GenerateIntDiv(HDiv *instruction) {
3418 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
3419 << instruction->GetResultType();
3420
3421 if (instruction->GetLocations()->InAt(1).IsConstant()) {
3422 GenerateIntDivForConstDenom(instruction);
3423 } else {
3424 Register out = OutputRegister(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003425 Register dividend = InputRegisterAt(instruction, 0);
3426 Register divisor = InputRegisterAt(instruction, 1);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003427 __ Sdiv(out, dividend, divisor);
Zheng Xuc6667102015-05-15 16:08:45 +08003428 }
3429}
3430
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003431void LocationsBuilderARM64::VisitDiv(HDiv* div) {
3432 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003433 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003434 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003435 case DataType::Type::kInt32:
3436 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003437 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08003438 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003439 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3440 break;
3441
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003442 case DataType::Type::kFloat32:
3443 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003444 locations->SetInAt(0, Location::RequiresFpuRegister());
3445 locations->SetInAt(1, Location::RequiresFpuRegister());
3446 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3447 break;
3448
3449 default:
3450 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3451 }
3452}
3453
3454void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003455 DataType::Type type = div->GetResultType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003456 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003457 case DataType::Type::kInt32:
3458 case DataType::Type::kInt64:
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003459 GenerateIntDiv(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003460 break;
3461
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003462 case DataType::Type::kFloat32:
3463 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003464 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3465 break;
3466
3467 default:
3468 LOG(FATAL) << "Unexpected div type " << type;
3469 }
3470}
3471
Alexandre Rames67555f72014-11-18 10:55:16 +00003472void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003473 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003474 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003475}
3476
3477void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3478 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003479 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003480 codegen_->AddSlowPath(slow_path);
3481 Location value = instruction->GetLocations()->InAt(0);
3482
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003483 DataType::Type type = instruction->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +00003484
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003485 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003486 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00003487 return;
3488 }
3489
Alexandre Rames67555f72014-11-18 10:55:16 +00003490 if (value.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003491 int64_t divisor = Int64FromLocation(value);
Alexandre Rames67555f72014-11-18 10:55:16 +00003492 if (divisor == 0) {
3493 __ B(slow_path->GetEntryLabel());
3494 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003495 // A division by a non-null constant is valid. We don't need to perform
3496 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003497 }
3498 } else {
3499 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3500 }
3501}
3502
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003503void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3504 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003505 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003506 locations->SetOut(Location::ConstantLocation(constant));
3507}
3508
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003509void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3510 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003511 // Will be generated at use site.
3512}
3513
Alexandre Rames5319def2014-10-23 10:03:10 +01003514void LocationsBuilderARM64::VisitExit(HExit* exit) {
3515 exit->SetLocations(nullptr);
3516}
3517
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003518void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003519}
3520
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003521void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3522 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003523 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003524 locations->SetOut(Location::ConstantLocation(constant));
3525}
3526
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003527void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003528 // Will be generated at use site.
3529}
3530
David Brazdilfc6a86a2015-06-26 10:33:45 +00003531void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003532 if (successor->IsExitBlock()) {
3533 DCHECK(got->GetPrevious()->AlwaysThrows());
3534 return; // no code needed
3535 }
3536
Serban Constantinescu02164b32014-11-13 14:05:07 +00003537 HBasicBlock* block = got->GetBlock();
3538 HInstruction* previous = got->GetPrevious();
3539 HLoopInformation* info = block->GetLoopInformation();
3540
David Brazdil46e2a392015-03-16 17:31:52 +00003541 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00003542 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
3543 UseScratchRegisterScope temps(GetVIXLAssembler());
3544 Register temp1 = temps.AcquireX();
3545 Register temp2 = temps.AcquireX();
3546 __ Ldr(temp1, MemOperand(sp, 0));
3547 __ Ldrh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3548 __ Add(temp2, temp2, 1);
3549 __ Strh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3550 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003551 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3552 return;
3553 }
3554 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3555 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01003556 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003557 }
3558 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003559 __ B(codegen_->GetLabelOf(successor));
3560 }
3561}
3562
David Brazdilfc6a86a2015-06-26 10:33:45 +00003563void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3564 got->SetLocations(nullptr);
3565}
3566
3567void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3568 HandleGoto(got, got->GetSuccessor());
3569}
3570
3571void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3572 try_boundary->SetLocations(nullptr);
3573}
3574
3575void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3576 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3577 if (!successor->IsExitBlock()) {
3578 HandleGoto(try_boundary, successor);
3579 }
3580}
3581
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003582void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003583 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003584 vixl::aarch64::Label* true_target,
3585 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003586 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003587
David Brazdil0debae72015-11-12 18:37:00 +00003588 if (true_target == nullptr && false_target == nullptr) {
3589 // Nothing to do. The code always falls through.
3590 return;
3591 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003592 // Constant condition, statically compared against "true" (integer value 1).
3593 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003594 if (true_target != nullptr) {
3595 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003596 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003597 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003598 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003599 if (false_target != nullptr) {
3600 __ B(false_target);
3601 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003602 }
David Brazdil0debae72015-11-12 18:37:00 +00003603 return;
3604 }
3605
3606 // The following code generates these patterns:
3607 // (1) true_target == nullptr && false_target != nullptr
3608 // - opposite condition true => branch to false_target
3609 // (2) true_target != nullptr && false_target == nullptr
3610 // - condition true => branch to true_target
3611 // (3) true_target != nullptr && false_target != nullptr
3612 // - condition true => branch to true_target
3613 // - branch to false_target
3614 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003615 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003616 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003617 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003618 if (true_target == nullptr) {
3619 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3620 } else {
3621 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3622 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003623 } else {
3624 // The condition instruction has not been materialized, use its inputs as
3625 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003626 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003627
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003628 DataType::Type type = condition->InputAt(0)->GetType();
3629 if (DataType::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003630 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003631 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003632 IfCondition opposite_condition = condition->GetOppositeCondition();
3633 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003634 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003635 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003636 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003637 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003638 // Integer cases.
3639 Register lhs = InputRegisterAt(condition, 0);
3640 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003641
3642 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003643 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003644 if (true_target == nullptr) {
3645 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3646 non_fallthrough_target = false_target;
3647 } else {
3648 arm64_cond = ARM64Condition(condition->GetCondition());
3649 non_fallthrough_target = true_target;
3650 }
3651
Aart Bik086d27e2016-01-20 17:02:00 -08003652 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003653 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003654 switch (arm64_cond) {
3655 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003656 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003657 break;
3658 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003659 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003660 break;
3661 case lt:
3662 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003663 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003664 break;
3665 case ge:
3666 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003667 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003668 break;
3669 default:
3670 // Without the `static_cast` the compiler throws an error for
3671 // `-Werror=sign-promo`.
3672 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3673 }
3674 } else {
3675 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003676 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003677 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003678 }
3679 }
David Brazdil0debae72015-11-12 18:37:00 +00003680
3681 // If neither branch falls through (case 3), the conditional branch to `true_target`
3682 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3683 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003684 __ B(false_target);
3685 }
3686}
3687
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003688void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003689 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003690 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003691 locations->SetInAt(0, Location::RequiresRegister());
3692 }
3693}
3694
3695void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003696 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3697 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003698 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3699 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3700 true_target = nullptr;
3701 }
3702 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3703 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3704 false_target = nullptr;
3705 }
David Brazdil0debae72015-11-12 18:37:00 +00003706 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003707}
3708
3709void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003710 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003711 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01003712 InvokeRuntimeCallingConvention calling_convention;
3713 RegisterSet caller_saves = RegisterSet::Empty();
3714 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
3715 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00003716 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003717 locations->SetInAt(0, Location::RequiresRegister());
3718 }
3719}
3720
3721void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003722 SlowPathCodeARM64* slow_path =
3723 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003724 GenerateTestAndBranch(deoptimize,
3725 /* condition_input_index */ 0,
3726 slow_path->GetEntryLabel(),
3727 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003728}
3729
Mingyao Yang063fc772016-08-02 11:02:54 -07003730void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003731 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07003732 LocationSummary(flag, LocationSummary::kNoCall);
3733 locations->SetOut(Location::RequiresRegister());
3734}
3735
3736void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3737 __ Ldr(OutputRegister(flag),
3738 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3739}
3740
David Brazdilc0b601b2016-02-08 14:20:45 +00003741static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3742 return condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003743 DataType::IsFloatingPointType(condition->InputAt(0)->GetType());
David Brazdilc0b601b2016-02-08 14:20:45 +00003744}
3745
Alexandre Rames880f1192016-06-13 16:04:50 +01003746static inline Condition GetConditionForSelect(HCondition* condition) {
3747 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003748 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3749 : ARM64Condition(cond);
3750}
3751
David Brazdil74eb1b22015-12-14 11:44:01 +00003752void LocationsBuilderARM64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003753 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003754 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003755 locations->SetInAt(0, Location::RequiresFpuRegister());
3756 locations->SetInAt(1, Location::RequiresFpuRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08003757 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames880f1192016-06-13 16:04:50 +01003758 } else {
3759 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3760 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3761 bool is_true_value_constant = cst_true_value != nullptr;
3762 bool is_false_value_constant = cst_false_value != nullptr;
3763 // Ask VIXL whether we should synthesize constants in registers.
3764 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3765 Operand true_op = is_true_value_constant ?
3766 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3767 Operand false_op = is_false_value_constant ?
3768 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3769 bool true_value_in_register = false;
3770 bool false_value_in_register = false;
3771 MacroAssembler::GetCselSynthesisInformation(
3772 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3773 true_value_in_register |= !is_true_value_constant;
3774 false_value_in_register |= !is_false_value_constant;
3775
3776 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3777 : Location::ConstantLocation(cst_true_value));
3778 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3779 : Location::ConstantLocation(cst_false_value));
Donghui Bai426b49c2016-11-08 14:55:38 +08003780 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
David Brazdil74eb1b22015-12-14 11:44:01 +00003781 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003782
David Brazdil74eb1b22015-12-14 11:44:01 +00003783 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3784 locations->SetInAt(2, Location::RequiresRegister());
3785 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003786}
3787
3788void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003789 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003790 Condition csel_cond;
3791
3792 if (IsBooleanValueOrMaterializedCondition(cond)) {
3793 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003794 // Use the condition flags set by the previous instruction.
3795 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003796 } else {
3797 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003798 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003799 }
3800 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003801 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003802 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003803 } else {
3804 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003805 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003806 }
3807
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003808 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003809 __ Fcsel(OutputFPRegister(select),
3810 InputFPRegisterAt(select, 1),
3811 InputFPRegisterAt(select, 0),
3812 csel_cond);
3813 } else {
3814 __ Csel(OutputRegister(select),
3815 InputOperandAt(select, 1),
3816 InputOperandAt(select, 0),
3817 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003818 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003819}
3820
David Srbecky0cf44932015-12-09 14:09:59 +00003821void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003822 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00003823}
3824
David Srbeckyd28f4a02016-03-14 17:14:24 +00003825void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3826 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003827}
3828
3829void CodeGeneratorARM64::GenerateNop() {
3830 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003831}
3832
Alexandre Rames5319def2014-10-23 10:03:10 +01003833void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00003834 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003835}
3836
3837void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003838 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003839}
3840
3841void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003842 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003843}
3844
3845void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003846 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003847}
3848
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003849// Temp is used for read barrier.
3850static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3851 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003852 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003853 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3854 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3855 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3856 return 1;
3857 }
3858 return 0;
3859}
3860
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003861// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003862// interface pointer, one for loading the current interface.
3863// The other checks have one temp for loading the object's class.
3864static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3865 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3866 return 3;
3867 }
3868 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003869}
3870
Alexandre Rames67555f72014-11-18 10:55:16 +00003871void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003872 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003873 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003874 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003875 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003876 case TypeCheckKind::kExactCheck:
3877 case TypeCheckKind::kAbstractClassCheck:
3878 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00003879 case TypeCheckKind::kArrayObjectCheck: {
3880 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
3881 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
3882 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003883 break;
Vladimir Marko87584542017-12-12 17:47:52 +00003884 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003885 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003886 case TypeCheckKind::kUnresolvedCheck:
3887 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003888 call_kind = LocationSummary::kCallOnSlowPath;
3889 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00003890 case TypeCheckKind::kBitstringCheck:
3891 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003892 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003893
Vladimir Markoca6fff82017-10-03 14:49:14 +01003894 LocationSummary* locations =
3895 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003896 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003897 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003898 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003899 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00003900 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
3901 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
3902 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
3903 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
3904 } else {
3905 locations->SetInAt(1, Location::RequiresRegister());
3906 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003907 // The "out" register is used as a temporary, so it overlaps with the inputs.
3908 // Note that TypeCheckSlowPathARM64 uses this register too.
3909 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003910 // Add temps if necessary for read barriers.
3911 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003912}
3913
3914void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003915 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003916 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003917 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003918 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00003919 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
3920 ? Register()
3921 : InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003922 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003923 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003924 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3925 DCHECK_LE(num_temps, 1u);
3926 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003927 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3928 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3929 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3930 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003931
Scott Wakeling97c72b72016-06-24 16:19:36 +01003932 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003933 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003934
3935 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003936 // Avoid null check if we know `obj` is not null.
3937 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003938 __ Cbz(obj, &zero);
3939 }
3940
Roland Levillain44015862016-01-22 11:47:17 +00003941 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003942 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003943 ReadBarrierOption read_barrier_option =
3944 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003945 // /* HeapReference<Class> */ out = obj->klass_
3946 GenerateReferenceLoadTwoRegisters(instruction,
3947 out_loc,
3948 obj_loc,
3949 class_offset,
3950 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003951 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003952 __ Cmp(out, cls);
3953 __ Cset(out, eq);
3954 if (zero.IsLinked()) {
3955 __ B(&done);
3956 }
3957 break;
3958 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003959
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003960 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003961 ReadBarrierOption read_barrier_option =
3962 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003963 // /* HeapReference<Class> */ out = obj->klass_
3964 GenerateReferenceLoadTwoRegisters(instruction,
3965 out_loc,
3966 obj_loc,
3967 class_offset,
3968 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003969 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003970 // If the class is abstract, we eagerly fetch the super class of the
3971 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003972 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003973 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003974 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003975 GenerateReferenceLoadOneRegister(instruction,
3976 out_loc,
3977 super_offset,
3978 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003979 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003980 // If `out` is null, we use it for the result, and jump to `done`.
3981 __ Cbz(out, &done);
3982 __ Cmp(out, cls);
3983 __ B(ne, &loop);
3984 __ Mov(out, 1);
3985 if (zero.IsLinked()) {
3986 __ B(&done);
3987 }
3988 break;
3989 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003990
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003991 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003992 ReadBarrierOption read_barrier_option =
3993 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003994 // /* HeapReference<Class> */ out = obj->klass_
3995 GenerateReferenceLoadTwoRegisters(instruction,
3996 out_loc,
3997 obj_loc,
3998 class_offset,
3999 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004000 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004001 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004002 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004003 __ Bind(&loop);
4004 __ Cmp(out, cls);
4005 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004006 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004007 GenerateReferenceLoadOneRegister(instruction,
4008 out_loc,
4009 super_offset,
4010 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004011 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004012 __ Cbnz(out, &loop);
4013 // If `out` is null, we use it for the result, and jump to `done`.
4014 __ B(&done);
4015 __ Bind(&success);
4016 __ Mov(out, 1);
4017 if (zero.IsLinked()) {
4018 __ B(&done);
4019 }
4020 break;
4021 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004022
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004023 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00004024 ReadBarrierOption read_barrier_option =
4025 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08004026 // /* HeapReference<Class> */ out = obj->klass_
4027 GenerateReferenceLoadTwoRegisters(instruction,
4028 out_loc,
4029 obj_loc,
4030 class_offset,
4031 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004032 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004033 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004034 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004035 __ Cmp(out, cls);
4036 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004037 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004038 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004039 GenerateReferenceLoadOneRegister(instruction,
4040 out_loc,
4041 component_offset,
4042 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004043 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004044 // If `out` is null, we use it for the result, and jump to `done`.
4045 __ Cbz(out, &done);
4046 __ Ldrh(out, HeapOperand(out, primitive_offset));
4047 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
4048 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004049 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004050 __ Mov(out, 1);
4051 __ B(&done);
4052 break;
4053 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004054
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004055 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08004056 // No read barrier since the slow path will retry upon failure.
4057 // /* HeapReference<Class> */ out = obj->klass_
4058 GenerateReferenceLoadTwoRegisters(instruction,
4059 out_loc,
4060 obj_loc,
4061 class_offset,
4062 maybe_temp_loc,
4063 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004064 __ Cmp(out, cls);
4065 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01004066 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4067 instruction, /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004068 codegen_->AddSlowPath(slow_path);
4069 __ B(ne, slow_path->GetEntryLabel());
4070 __ Mov(out, 1);
4071 if (zero.IsLinked()) {
4072 __ B(&done);
4073 }
4074 break;
4075 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004076
Calin Juravle98893e12015-10-02 21:05:03 +01004077 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004078 case TypeCheckKind::kInterfaceCheck: {
4079 // Note that we indeed only call on slow path, but we always go
4080 // into the slow path for the unresolved and interface check
4081 // cases.
4082 //
4083 // We cannot directly call the InstanceofNonTrivial runtime
4084 // entry point without resorting to a type checking slow path
4085 // here (i.e. by calling InvokeRuntime directly), as it would
4086 // require to assign fixed registers for the inputs of this
4087 // HInstanceOf instruction (following the runtime calling
4088 // convention), which might be cluttered by the potential first
4089 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00004090 //
4091 // TODO: Introduce a new runtime entry point taking the object
4092 // to test (instead of its class) as argument, and let it deal
4093 // with the read barrier issues. This will let us refactor this
4094 // case of the `switch` code as it was previously (with a direct
4095 // call to the runtime not using a type checking slow path).
4096 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004097 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01004098 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4099 instruction, /* is_fatal */ false);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004100 codegen_->AddSlowPath(slow_path);
4101 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004102 if (zero.IsLinked()) {
4103 __ B(&done);
4104 }
4105 break;
4106 }
Vladimir Marko175e7862018-03-27 09:03:13 +00004107
4108 case TypeCheckKind::kBitstringCheck: {
4109 // /* HeapReference<Class> */ temp = obj->klass_
4110 GenerateReferenceLoadTwoRegisters(instruction,
4111 out_loc,
4112 obj_loc,
4113 class_offset,
4114 maybe_temp_loc,
4115 kWithoutReadBarrier);
4116
4117 GenerateBitstringTypeCheckCompare(instruction, out);
4118 __ Cset(out, eq);
4119 if (zero.IsLinked()) {
4120 __ B(&done);
4121 }
4122 break;
4123 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004124 }
4125
4126 if (zero.IsLinked()) {
4127 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01004128 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004129 }
4130
4131 if (done.IsLinked()) {
4132 __ Bind(&done);
4133 }
4134
4135 if (slow_path != nullptr) {
4136 __ Bind(slow_path->GetExitLabel());
4137 }
4138}
4139
4140void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004141 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00004142 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004143 LocationSummary* locations =
4144 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004145 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00004146 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
4147 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
4148 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
4149 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
4150 } else {
4151 locations->SetInAt(1, Location::RequiresRegister());
4152 }
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004153 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
4154 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004155}
4156
4157void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00004158 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004159 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004160 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004161 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00004162 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
4163 ? Register()
4164 : InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004165 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
4166 DCHECK_GE(num_temps, 1u);
4167 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004168 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004169 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
4170 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004171 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004172 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4173 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4174 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
4175 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
4176 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
4177 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
4178 const uint32_t object_array_data_offset =
4179 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004180
Vladimir Marko87584542017-12-12 17:47:52 +00004181 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004182 SlowPathCodeARM64* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004183 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4184 instruction, is_type_check_slow_path_fatal);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004185 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004186
Scott Wakeling97c72b72016-06-24 16:19:36 +01004187 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004188 // Avoid null check if we know obj is not null.
4189 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01004190 __ Cbz(obj, &done);
4191 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004192
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004193 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004194 case TypeCheckKind::kExactCheck:
4195 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004196 // /* HeapReference<Class> */ temp = obj->klass_
4197 GenerateReferenceLoadTwoRegisters(instruction,
4198 temp_loc,
4199 obj_loc,
4200 class_offset,
4201 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004202 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004203
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004204 __ Cmp(temp, cls);
4205 // Jump to slow path for throwing the exception or doing a
4206 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004207 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004208 break;
4209 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004210
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004211 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004212 // /* HeapReference<Class> */ temp = obj->klass_
4213 GenerateReferenceLoadTwoRegisters(instruction,
4214 temp_loc,
4215 obj_loc,
4216 class_offset,
4217 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004218 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004219
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004220 // If the class is abstract, we eagerly fetch the super class of the
4221 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004222 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004223 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004224 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004225 GenerateReferenceLoadOneRegister(instruction,
4226 temp_loc,
4227 super_offset,
4228 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004229 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004230
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004231 // If the class reference currently in `temp` is null, jump to the slow path to throw the
4232 // exception.
4233 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
4234 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004235 __ Cmp(temp, cls);
4236 __ B(ne, &loop);
4237 break;
4238 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004239
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004240 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004241 // /* HeapReference<Class> */ temp = obj->klass_
4242 GenerateReferenceLoadTwoRegisters(instruction,
4243 temp_loc,
4244 obj_loc,
4245 class_offset,
4246 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004247 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004248
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004249 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004250 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004251 __ Bind(&loop);
4252 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004253 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004254
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004255 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004256 GenerateReferenceLoadOneRegister(instruction,
4257 temp_loc,
4258 super_offset,
4259 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004260 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004261
4262 // If the class reference currently in `temp` is not null, jump
4263 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004264 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004265 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004266 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004267 break;
4268 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004269
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004270 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004271 // /* HeapReference<Class> */ temp = obj->klass_
4272 GenerateReferenceLoadTwoRegisters(instruction,
4273 temp_loc,
4274 obj_loc,
4275 class_offset,
4276 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004277 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004278
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004279 // Do an exact check.
4280 __ Cmp(temp, cls);
4281 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004282
4283 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004284 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004285 GenerateReferenceLoadOneRegister(instruction,
4286 temp_loc,
4287 component_offset,
4288 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004289 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004290
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004291 // If the component type is null, jump to the slow path to throw the exception.
4292 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
4293 // Otherwise, the object is indeed an array. Further check that this component type is not a
4294 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004295 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
4296 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004297 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004298 break;
4299 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004300
Calin Juravle98893e12015-10-02 21:05:03 +01004301 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004302 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004303 //
4304 // We cannot directly call the CheckCast runtime entry point
4305 // without resorting to a type checking slow path here (i.e. by
4306 // calling InvokeRuntime directly), as it would require to
4307 // assign fixed registers for the inputs of this HInstanceOf
4308 // instruction (following the runtime calling convention), which
4309 // might be cluttered by the potential first read barrier
4310 // emission at the beginning of this method.
4311 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004312 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004313 case TypeCheckKind::kInterfaceCheck: {
4314 // /* HeapReference<Class> */ temp = obj->klass_
4315 GenerateReferenceLoadTwoRegisters(instruction,
4316 temp_loc,
4317 obj_loc,
4318 class_offset,
4319 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004320 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004321
4322 // /* HeapReference<Class> */ temp = temp->iftable_
4323 GenerateReferenceLoadTwoRegisters(instruction,
4324 temp_loc,
4325 temp_loc,
4326 iftable_offset,
4327 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004328 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08004329 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004330 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08004331 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004332 vixl::aarch64::Label start_loop;
4333 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004334 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004335 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
4336 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004337 // Go to next interface.
4338 __ Add(temp, temp, 2 * kHeapReferenceSize);
4339 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004340 // Compare the classes and continue the loop if they do not match.
4341 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
4342 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004343 break;
4344 }
Vladimir Marko175e7862018-03-27 09:03:13 +00004345
4346 case TypeCheckKind::kBitstringCheck: {
4347 // /* HeapReference<Class> */ temp = obj->klass_
4348 GenerateReferenceLoadTwoRegisters(instruction,
4349 temp_loc,
4350 obj_loc,
4351 class_offset,
4352 maybe_temp2_loc,
4353 kWithoutReadBarrier);
4354
4355 GenerateBitstringTypeCheckCompare(instruction, temp);
4356 __ B(ne, type_check_slow_path->GetEntryLabel());
4357 break;
4358 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004359 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00004360 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004361
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004362 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004363}
4364
Alexandre Rames5319def2014-10-23 10:03:10 +01004365void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004366 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01004367 locations->SetOut(Location::ConstantLocation(constant));
4368}
4369
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004370void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004371 // Will be generated at use site.
4372}
4373
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004374void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004375 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004376 locations->SetOut(Location::ConstantLocation(constant));
4377}
4378
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004379void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004380 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004381}
4382
Calin Juravle175dc732015-08-25 15:42:32 +01004383void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4384 // The trampoline uses the same calling convention as dex calling conventions,
4385 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
4386 // the method_idx.
4387 HandleInvoke(invoke);
4388}
4389
4390void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4391 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004392 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Calin Juravle175dc732015-08-25 15:42:32 +01004393}
4394
Alexandre Rames5319def2014-10-23 10:03:10 +01004395void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01004396 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01004397 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01004398}
4399
Alexandre Rames67555f72014-11-18 10:55:16 +00004400void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4401 HandleInvoke(invoke);
4402}
4403
4404void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4405 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004406 LocationSummary* locations = invoke->GetLocations();
4407 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004408 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00004409 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004410 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00004411
4412 // The register ip1 is required to be used for the hidden argument in
4413 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01004414 MacroAssembler* masm = GetVIXLAssembler();
4415 UseScratchRegisterScope scratch_scope(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00004416 scratch_scope.Exclude(ip1);
4417 __ Mov(ip1, invoke->GetDexMethodIndex());
4418
Artem Serov914d7a82017-02-07 14:33:49 +00004419 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
Alexandre Rames67555f72014-11-18 10:55:16 +00004420 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004421 __ Ldr(temp.W(), StackOperandFrom(receiver));
Artem Serov914d7a82017-02-07 14:33:49 +00004422 {
4423 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4424 // /* HeapReference<Class> */ temp = temp->klass_
4425 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
4426 codegen_->MaybeRecordImplicitNullCheck(invoke);
4427 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004428 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00004429 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004430 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07004431 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Artem Serov914d7a82017-02-07 14:33:49 +00004432 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00004433 }
Artem Serov914d7a82017-02-07 14:33:49 +00004434
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004435 // Instead of simply (possibly) unpoisoning `temp` here, we should
4436 // emit a read barrier for the previous class reference load.
4437 // However this is not required in practice, as this is an
4438 // intermediate/temporary reference and because the current
4439 // concurrent copying collector keeps the from-space memory
4440 // intact/accessible until the end of the marking phase (the
4441 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01004442 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004443 __ Ldr(temp,
4444 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
4445 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004446 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00004447 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004448 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00004449 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07004450 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004451
4452 {
4453 // Ensure the pc position is recorded immediately after the `blr` instruction.
4454 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4455
4456 // lr();
4457 __ blr(lr);
4458 DCHECK(!codegen_->IsLeafMethod());
4459 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4460 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004461
4462 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004463}
4464
4465void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004466 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004467 if (intrinsic.TryDispatch(invoke)) {
4468 return;
4469 }
4470
Alexandre Rames67555f72014-11-18 10:55:16 +00004471 HandleInvoke(invoke);
4472}
4473
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00004474void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004475 // Explicit clinit checks triggered by static invokes must have been pruned by
4476 // art::PrepareForRegisterAllocation.
4477 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004478
Vladimir Markoca6fff82017-10-03 14:49:14 +01004479 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004480 if (intrinsic.TryDispatch(invoke)) {
4481 return;
4482 }
4483
Alexandre Rames67555f72014-11-18 10:55:16 +00004484 HandleInvoke(invoke);
4485}
4486
Andreas Gampe878d58c2015-01-15 23:24:00 -08004487static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
4488 if (invoke->GetLocations()->Intrinsified()) {
4489 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
4490 intrinsic.Dispatch(invoke);
4491 return true;
4492 }
4493 return false;
4494}
4495
Vladimir Markodc151b22015-10-15 18:02:30 +01004496HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
4497 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004498 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00004499 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01004500 return desired_dispatch_info;
4501}
4502
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004503void CodeGeneratorARM64::GenerateStaticOrDirectCall(
4504 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004505 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004506 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4507 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004508 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4509 uint32_t offset =
4510 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004511 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004512 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004513 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004514 }
Vladimir Marko58155012015-08-19 12:49:41 +00004515 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004516 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004517 break;
Vladimir Marko65979462017-05-19 17:25:12 +01004518 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
4519 DCHECK(GetCompilerOptions().IsBootImage());
4520 // Add ADRP with its PC-relative method patch.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004521 vixl::aarch64::Label* adrp_label = NewBootImageMethodPatch(invoke->GetTargetMethod());
Vladimir Marko65979462017-05-19 17:25:12 +01004522 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4523 // Add ADD with its PC-relative method patch.
4524 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004525 NewBootImageMethodPatch(invoke->GetTargetMethod(), adrp_label);
Vladimir Marko65979462017-05-19 17:25:12 +01004526 EmitAddPlaceholder(add_label, XRegisterFrom(temp), XRegisterFrom(temp));
4527 break;
4528 }
Vladimir Markob066d432018-01-03 13:14:37 +00004529 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
4530 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004531 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00004532 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_offset);
4533 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4534 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
4535 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_offset, adrp_label);
4536 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
4537 EmitLdrOffsetPlaceholder(ldr_label, WRegisterFrom(temp), XRegisterFrom(temp));
4538 break;
4539 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004540 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Markob066d432018-01-03 13:14:37 +00004541 // Add ADRP with its PC-relative .bss entry patch.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004542 MethodReference target_method(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
4543 vixl::aarch64::Label* adrp_label = NewMethodBssEntryPatch(target_method);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004544 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Markob066d432018-01-03 13:14:37 +00004545 // Add LDR with its PC-relative .bss entry patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004546 vixl::aarch64::Label* ldr_label =
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004547 NewMethodBssEntryPatch(target_method, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004548 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004549 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004550 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004551 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
4552 // Load method address from literal pool.
4553 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
4554 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004555 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
4556 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
4557 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko58155012015-08-19 12:49:41 +00004558 }
4559 }
4560
4561 switch (invoke->GetCodePtrLocation()) {
4562 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004563 {
4564 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
4565 ExactAssemblyScope eas(GetVIXLAssembler(),
4566 kInstructionSize,
4567 CodeBufferCheckScope::kExactSize);
4568 __ bl(&frame_entry_label_);
4569 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
4570 }
Vladimir Marko58155012015-08-19 12:49:41 +00004571 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004572 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4573 // LR = callee_method->entry_point_from_quick_compiled_code_;
4574 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004575 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004576 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004577 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004578 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004579 ExactAssemblyScope eas(GetVIXLAssembler(),
4580 kInstructionSize,
4581 CodeBufferCheckScope::kExactSize);
4582 // lr()
4583 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004584 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004585 }
Vladimir Marko58155012015-08-19 12:49:41 +00004586 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004587 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004588
Andreas Gampe878d58c2015-01-15 23:24:00 -08004589 DCHECK(!IsLeafMethod());
4590}
4591
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004592void CodeGeneratorARM64::GenerateVirtualCall(
4593 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004594 // Use the calling convention instead of the location of the receiver, as
4595 // intrinsics may have put the receiver in a different register. In the intrinsics
4596 // slow path, the arguments have been moved to the right place, so here we are
4597 // guaranteed that the receiver is the first register of the calling convention.
4598 InvokeDexCallingConvention calling_convention;
4599 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004600 Register temp = XRegisterFrom(temp_in);
4601 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4602 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4603 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004604 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004605
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004606 DCHECK(receiver.IsRegister());
Artem Serov914d7a82017-02-07 14:33:49 +00004607
4608 {
4609 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4610 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4611 // /* HeapReference<Class> */ temp = receiver->klass_
4612 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
4613 MaybeRecordImplicitNullCheck(invoke);
4614 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004615 // Instead of simply (possibly) unpoisoning `temp` here, we should
4616 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004617 // intermediate/temporary reference and because the current
4618 // concurrent copying collector keeps the from-space memory
4619 // intact/accessible until the end of the marking phase (the
4620 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004621 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
4622 // temp = temp->GetMethodAt(method_offset);
4623 __ Ldr(temp, MemOperand(temp, method_offset));
4624 // lr = temp->GetEntryPoint();
4625 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
Artem Serov914d7a82017-02-07 14:33:49 +00004626 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004627 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004628 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4629 // lr();
4630 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004631 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004632 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004633}
4634
Orion Hodsonac141392017-01-13 11:53:47 +00004635void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4636 HandleInvoke(invoke);
4637}
4638
4639void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4640 codegen_->GenerateInvokePolymorphicCall(invoke);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004641 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Orion Hodsonac141392017-01-13 11:53:47 +00004642}
4643
Orion Hodson4c8e12e2018-05-18 08:33:20 +01004644void LocationsBuilderARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4645 HandleInvoke(invoke);
4646}
4647
4648void InstructionCodeGeneratorARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4649 codegen_->GenerateInvokeCustomCall(invoke);
4650 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
4651}
4652
Vladimir Marko6fd16062018-06-26 11:02:04 +01004653vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageIntrinsicPatch(
4654 uint32_t intrinsic_data,
4655 vixl::aarch64::Label* adrp_label) {
4656 return NewPcRelativePatch(
4657 /* dex_file */ nullptr, intrinsic_data, adrp_label, &boot_image_intrinsic_patches_);
4658}
4659
Vladimir Markob066d432018-01-03 13:14:37 +00004660vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageRelRoPatch(
4661 uint32_t boot_image_offset,
4662 vixl::aarch64::Label* adrp_label) {
4663 return NewPcRelativePatch(
4664 /* dex_file */ nullptr, boot_image_offset, adrp_label, &boot_image_method_patches_);
4665}
4666
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004667vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageMethodPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004668 MethodReference target_method,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004669 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004670 return NewPcRelativePatch(
4671 target_method.dex_file, target_method.index, adrp_label, &boot_image_method_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004672}
4673
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004674vixl::aarch64::Label* CodeGeneratorARM64::NewMethodBssEntryPatch(
4675 MethodReference target_method,
4676 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004677 return NewPcRelativePatch(
4678 target_method.dex_file, target_method.index, adrp_label, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004679}
4680
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004681vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageTypePatch(
Scott Wakeling97c72b72016-06-24 16:19:36 +01004682 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004683 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004684 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004685 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &boot_image_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004686}
4687
Vladimir Marko1998cd02017-01-13 13:02:58 +00004688vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4689 const DexFile& dex_file,
4690 dex::TypeIndex type_index,
4691 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004692 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004693}
4694
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004695vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageStringPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004696 const DexFile& dex_file,
4697 dex::StringIndex string_index,
4698 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004699 return NewPcRelativePatch(
4700 &dex_file, string_index.index_, adrp_label, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01004701}
4702
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004703vixl::aarch64::Label* CodeGeneratorARM64::NewStringBssEntryPatch(
4704 const DexFile& dex_file,
4705 dex::StringIndex string_index,
4706 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004707 return NewPcRelativePatch(&dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004708}
4709
Vladimir Marko966b46f2018-08-03 10:20:19 +00004710void CodeGeneratorARM64::EmitBakerReadBarrierCbnz(uint32_t custom_data) {
4711 ExactAssemblyScope guard(GetVIXLAssembler(), 1 * vixl::aarch64::kInstructionSize);
4712 if (Runtime::Current()->UseJitCompilation()) {
4713 auto it = jit_baker_read_barrier_slow_paths_.FindOrAdd(custom_data);
4714 vixl::aarch64::Label* slow_path_entry = &it->second.label;
4715 __ cbnz(mr, slow_path_entry);
4716 } else {
4717 baker_read_barrier_patches_.emplace_back(custom_data);
4718 vixl::aarch64::Label* cbnz_label = &baker_read_barrier_patches_.back().label;
4719 __ bind(cbnz_label);
4720 __ cbnz(mr, static_cast<int64_t>(0)); // Placeholder, patched at link-time.
4721 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004722}
4723
Scott Wakeling97c72b72016-06-24 16:19:36 +01004724vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004725 const DexFile* dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004726 uint32_t offset_or_index,
4727 vixl::aarch64::Label* adrp_label,
4728 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004729 // Add a patch entry and return the label.
4730 patches->emplace_back(dex_file, offset_or_index);
4731 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004732 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004733 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4734 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4735 return label;
4736}
4737
Scott Wakeling97c72b72016-06-24 16:19:36 +01004738vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4739 uint64_t address) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004740 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004741}
4742
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004743vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004744 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004745 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004746 return jit_string_patches_.GetOrCreate(
4747 StringReference(&dex_file, string_index),
4748 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4749}
4750
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004751vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004752 const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004753 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004754 return jit_class_patches_.GetOrCreate(
4755 TypeReference(&dex_file, type_index),
4756 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4757}
4758
Vladimir Markoaad75c62016-10-03 08:46:48 +00004759void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4760 vixl::aarch64::Register reg) {
4761 DCHECK(reg.IsX());
4762 SingleEmissionCheckScope guard(GetVIXLAssembler());
4763 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004764 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004765}
4766
4767void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4768 vixl::aarch64::Register out,
4769 vixl::aarch64::Register base) {
4770 DCHECK(out.IsX());
4771 DCHECK(base.IsX());
4772 SingleEmissionCheckScope guard(GetVIXLAssembler());
4773 __ Bind(fixup_label);
4774 __ add(out, base, Operand(/* offset placeholder */ 0));
4775}
4776
4777void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4778 vixl::aarch64::Register out,
4779 vixl::aarch64::Register base) {
4780 DCHECK(base.IsX());
4781 SingleEmissionCheckScope guard(GetVIXLAssembler());
4782 __ Bind(fixup_label);
4783 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4784}
4785
Vladimir Markoeebb8212018-06-05 14:57:24 +01004786void CodeGeneratorARM64::LoadBootImageAddress(vixl::aarch64::Register reg,
Vladimir Marko6fd16062018-06-26 11:02:04 +01004787 uint32_t boot_image_reference) {
4788 if (GetCompilerOptions().IsBootImage()) {
4789 // Add ADRP with its PC-relative type patch.
4790 vixl::aarch64::Label* adrp_label = NewBootImageIntrinsicPatch(boot_image_reference);
4791 EmitAdrpPlaceholder(adrp_label, reg.X());
4792 // Add ADD with its PC-relative type patch.
4793 vixl::aarch64::Label* add_label = NewBootImageIntrinsicPatch(boot_image_reference, adrp_label);
4794 EmitAddPlaceholder(add_label, reg.X(), reg.X());
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004795 } else if (Runtime::Current()->IsAotCompiler()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01004796 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004797 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004798 EmitAdrpPlaceholder(adrp_label, reg.X());
4799 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004800 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_reference, adrp_label);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004801 EmitLdrOffsetPlaceholder(ldr_label, reg.W(), reg.X());
4802 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004803 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01004804 gc::Heap* heap = Runtime::Current()->GetHeap();
4805 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004806 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01004807 __ Ldr(reg.W(), DeduplicateBootImageAddressLiteral(reinterpret_cast<uintptr_t>(address)));
4808 }
4809}
4810
Vladimir Marko6fd16062018-06-26 11:02:04 +01004811void CodeGeneratorARM64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
4812 uint32_t boot_image_offset) {
4813 DCHECK(invoke->IsStatic());
4814 InvokeRuntimeCallingConvention calling_convention;
4815 Register argument = calling_convention.GetRegisterAt(0);
4816 if (GetCompilerOptions().IsBootImage()) {
4817 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
4818 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
4819 MethodReference target_method = invoke->GetTargetMethod();
4820 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
4821 // Add ADRP with its PC-relative type patch.
4822 vixl::aarch64::Label* adrp_label = NewBootImageTypePatch(*target_method.dex_file, type_idx);
4823 EmitAdrpPlaceholder(adrp_label, argument.X());
4824 // Add ADD with its PC-relative type patch.
4825 vixl::aarch64::Label* add_label =
4826 NewBootImageTypePatch(*target_method.dex_file, type_idx, adrp_label);
4827 EmitAddPlaceholder(add_label, argument.X(), argument.X());
4828 } else {
4829 LoadBootImageAddress(argument, boot_image_offset);
4830 }
4831 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
4832 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4833}
4834
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004835template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00004836inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4837 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004838 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00004839 for (const PcRelativePatchInfo& info : infos) {
4840 linker_patches->push_back(Factory(info.label.GetLocation(),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004841 info.target_dex_file,
Vladimir Markoaad75c62016-10-03 08:46:48 +00004842 info.pc_insn_label->GetLocation(),
4843 info.offset_or_index));
4844 }
4845}
4846
Vladimir Marko6fd16062018-06-26 11:02:04 +01004847template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
4848linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
4849 const DexFile* target_dex_file,
4850 uint32_t pc_insn_offset,
4851 uint32_t boot_image_offset) {
4852 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
4853 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00004854}
4855
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004856void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00004857 DCHECK(linker_patches->empty());
4858 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004859 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004860 method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004861 boot_image_type_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004862 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004863 boot_image_string_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004864 string_bss_entry_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01004865 boot_image_intrinsic_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004866 baker_read_barrier_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004867 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01004868 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004869 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004870 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004871 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004872 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004873 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004874 boot_image_string_patches_, linker_patches);
Vladimir Marko6fd16062018-06-26 11:02:04 +01004875 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
4876 boot_image_intrinsic_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01004877 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +01004878 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
Vladimir Markob066d432018-01-03 13:14:37 +00004879 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004880 DCHECK(boot_image_type_patches_.empty());
4881 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004882 DCHECK(boot_image_intrinsic_patches_.empty());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004883 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004884 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
4885 method_bss_entry_patches_, linker_patches);
4886 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
4887 type_bss_entry_patches_, linker_patches);
4888 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
4889 string_bss_entry_patches_, linker_patches);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004890 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004891 linker_patches->push_back(linker::LinkerPatch::BakerReadBarrierBranchPatch(
4892 info.label.GetLocation(), info.custom_data));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004893 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004894 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004895}
4896
Vladimir Markoca1e0382018-04-11 09:58:41 +00004897bool CodeGeneratorARM64::NeedsThunkCode(const linker::LinkerPatch& patch) const {
4898 return patch.GetType() == linker::LinkerPatch::Type::kBakerReadBarrierBranch ||
4899 patch.GetType() == linker::LinkerPatch::Type::kCallRelative;
4900}
4901
4902void CodeGeneratorARM64::EmitThunkCode(const linker::LinkerPatch& patch,
4903 /*out*/ ArenaVector<uint8_t>* code,
4904 /*out*/ std::string* debug_name) {
4905 Arm64Assembler assembler(GetGraph()->GetAllocator());
4906 switch (patch.GetType()) {
4907 case linker::LinkerPatch::Type::kCallRelative: {
4908 // The thunk just uses the entry point in the ArtMethod. This works even for calls
4909 // to the generic JNI and interpreter trampolines.
4910 Offset offset(ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4911 kArm64PointerSize).Int32Value());
4912 assembler.JumpTo(ManagedRegister(arm64::X0), offset, ManagedRegister(arm64::IP0));
4913 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
4914 *debug_name = "MethodCallThunk";
4915 }
4916 break;
4917 }
4918 case linker::LinkerPatch::Type::kBakerReadBarrierBranch: {
4919 DCHECK_EQ(patch.GetBakerCustomValue2(), 0u);
4920 CompileBakerReadBarrierThunk(assembler, patch.GetBakerCustomValue1(), debug_name);
4921 break;
4922 }
4923 default:
4924 LOG(FATAL) << "Unexpected patch type " << patch.GetType();
4925 UNREACHABLE();
4926 }
4927
4928 // Ensure we emit the literal pool if any.
4929 assembler.FinalizeCode();
4930 code->resize(assembler.CodeSize());
4931 MemoryRegion code_region(code->data(), code->size());
4932 assembler.FinalizeInstructions(code_region);
4933}
4934
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004935vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value) {
4936 return uint32_literals_.GetOrCreate(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004937 value,
4938 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4939}
4940
Scott Wakeling97c72b72016-06-24 16:19:36 +01004941vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004942 return uint64_literals_.GetOrCreate(
4943 value,
4944 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004945}
4946
Andreas Gampe878d58c2015-01-15 23:24:00 -08004947void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004948 // Explicit clinit checks triggered by static invokes must have been pruned by
4949 // art::PrepareForRegisterAllocation.
4950 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004951
Andreas Gampe878d58c2015-01-15 23:24:00 -08004952 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004953 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004954 return;
4955 }
4956
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004957 {
4958 // Ensure that between the BLR (emitted by GenerateStaticOrDirectCall) and RecordPcInfo there
4959 // are no pools emitted.
4960 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4961 LocationSummary* locations = invoke->GetLocations();
4962 codegen_->GenerateStaticOrDirectCall(
4963 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
4964 }
4965
4966 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004967}
4968
4969void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004970 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004971 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004972 return;
4973 }
4974
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004975 {
4976 // Ensure that between the BLR (emitted by GenerateVirtualCall) and RecordPcInfo there
4977 // are no pools emitted.
4978 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4979 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
4980 DCHECK(!codegen_->IsLeafMethod());
4981 }
4982
4983 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004984}
4985
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004986HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4987 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004988 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004989 case HLoadClass::LoadKind::kInvalid:
4990 LOG(FATAL) << "UNREACHABLE";
4991 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004992 case HLoadClass::LoadKind::kReferrersClass:
4993 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004994 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004995 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004996 case HLoadClass::LoadKind::kBssEntry:
4997 DCHECK(!Runtime::Current()->UseJitCompilation());
4998 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004999 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005000 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005001 DCHECK(Runtime::Current()->UseJitCompilation());
5002 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005003 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005004 break;
5005 }
5006 return desired_class_load_kind;
5007}
5008
Alexandre Rames67555f72014-11-18 10:55:16 +00005009void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005010 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005011 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005012 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00005013 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005014 cls,
5015 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00005016 LocationFrom(vixl::aarch64::x0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00005017 DCHECK(calling_convention.GetRegisterAt(0).Is(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005018 return;
5019 }
Vladimir Marko41559982017-01-06 14:04:23 +00005020 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005021
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005022 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5023 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005024 ? LocationSummary::kCallOnSlowPath
5025 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005026 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005027 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005028 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005029 }
5030
Vladimir Marko41559982017-01-06 14:04:23 +00005031 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005032 locations->SetInAt(0, Location::RequiresRegister());
5033 }
5034 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005035 if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
5036 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5037 // Rely on the type resolution or initialization and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01005038 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005039 } else {
5040 // For non-Baker read barrier we have a temp-clobbering call.
5041 }
5042 }
Alexandre Rames67555f72014-11-18 10:55:16 +00005043}
5044
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005045// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5046// move.
5047void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005048 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005049 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005050 codegen_->GenerateLoadClassRuntimeCall(cls);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005051 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Calin Juravle580b6092015-10-06 17:35:58 +01005052 return;
5053 }
Vladimir Marko41559982017-01-06 14:04:23 +00005054 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005055
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005056 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01005057 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00005058
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005059 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5060 ? kWithoutReadBarrier
5061 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005062 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00005063 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005064 case HLoadClass::LoadKind::kReferrersClass: {
5065 DCHECK(!cls->CanCallRuntime());
5066 DCHECK(!cls->MustGenerateClinitCheck());
5067 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5068 Register current_method = InputRegisterAt(cls, 0);
Vladimir Markoca1e0382018-04-11 09:58:41 +00005069 codegen_->GenerateGcRootFieldLoad(cls,
5070 out_loc,
5071 current_method,
5072 ArtMethod::DeclaringClassOffset().Int32Value(),
5073 /* fixup_label */ nullptr,
5074 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005075 break;
5076 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005077 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005078 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005079 // Add ADRP with its PC-relative type patch.
5080 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08005081 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005082 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005083 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005084 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005085 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005086 codegen_->NewBootImageTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005087 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005088 break;
5089 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005090 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005091 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005092 uint32_t boot_image_offset = codegen_->GetBootImageOffset(cls);
5093 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
5094 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005095 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005096 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005097 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005098 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005099 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005100 break;
5101 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005102 case HLoadClass::LoadKind::kBssEntry: {
5103 // Add ADRP with its PC-relative Class .bss entry patch.
5104 const DexFile& dex_file = cls->GetDexFile();
5105 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Markof3c52b42017-11-17 17:32:12 +00005106 vixl::aarch64::Register temp = XRegisterFrom(out_loc);
5107 vixl::aarch64::Label* adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
5108 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005109 // Add LDR with its PC-relative Class .bss entry patch.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005110 vixl::aarch64::Label* ldr_label =
Vladimir Markof3c52b42017-11-17 17:32:12 +00005111 codegen_->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005112 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoca1e0382018-04-11 09:58:41 +00005113 codegen_->GenerateGcRootFieldLoad(cls,
5114 out_loc,
5115 temp,
5116 /* offset placeholder */ 0u,
5117 ldr_label,
5118 read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005119 generate_null_check = true;
5120 break;
5121 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005122 case HLoadClass::LoadKind::kJitBootImageAddress: {
5123 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
5124 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
5125 DCHECK_NE(address, 0u);
5126 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
5127 break;
5128 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005129 case HLoadClass::LoadKind::kJitTableAddress: {
5130 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
5131 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005132 cls->GetClass()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00005133 codegen_->GenerateGcRootFieldLoad(cls,
5134 out_loc,
5135 out.X(),
5136 /* offset */ 0,
5137 /* fixup_label */ nullptr,
5138 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005139 break;
5140 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005141 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005142 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00005143 LOG(FATAL) << "UNREACHABLE";
5144 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005145 }
5146
Vladimir Markoea4c1262017-02-06 19:59:33 +00005147 bool do_clinit = cls->MustGenerateClinitCheck();
5148 if (generate_null_check || do_clinit) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005149 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01005150 SlowPathCodeARM64* slow_path =
5151 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005152 codegen_->AddSlowPath(slow_path);
5153 if (generate_null_check) {
5154 __ Cbz(out, slow_path->GetEntryLabel());
5155 }
5156 if (cls->MustGenerateClinitCheck()) {
5157 GenerateClassInitializationCheck(slow_path, out);
5158 } else {
5159 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00005160 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005161 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005162 }
5163}
5164
Orion Hodsondbaa5c72018-05-10 08:22:46 +01005165void LocationsBuilderARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
5166 InvokeRuntimeCallingConvention calling_convention;
5167 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
5168 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
5169}
5170
5171void InstructionCodeGeneratorARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
5172 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
5173}
5174
Orion Hodson18259d72018-04-12 11:18:23 +01005175void LocationsBuilderARM64::VisitLoadMethodType(HLoadMethodType* load) {
5176 InvokeRuntimeCallingConvention calling_convention;
5177 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
5178 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
5179}
5180
5181void InstructionCodeGeneratorARM64::VisitLoadMethodType(HLoadMethodType* load) {
5182 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
5183}
5184
David Brazdilcb1c0552015-08-04 16:22:25 +01005185static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005186 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01005187}
5188
Alexandre Rames67555f72014-11-18 10:55:16 +00005189void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
5190 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005191 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexandre Rames67555f72014-11-18 10:55:16 +00005192 locations->SetOut(Location::RequiresRegister());
5193}
5194
5195void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005196 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
5197}
5198
5199void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005200 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01005201}
5202
5203void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5204 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00005205}
5206
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005207HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
5208 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005209 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005210 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005211 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00005212 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005213 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005214 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005215 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005216 case HLoadString::LoadKind::kJitTableAddress:
5217 DCHECK(Runtime::Current()->UseJitCompilation());
5218 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005219 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005220 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005221 }
5222 return desired_string_load_kind;
5223}
5224
Alexandre Rames67555f72014-11-18 10:55:16 +00005225void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005226 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01005227 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005228 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005229 InvokeRuntimeCallingConvention calling_convention;
5230 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
5231 } else {
5232 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005233 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5234 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005235 // Rely on the pResolveString and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01005236 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005237 } else {
5238 // For non-Baker read barrier we have a temp-clobbering call.
5239 }
5240 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005241 }
Alexandre Rames67555f72014-11-18 10:55:16 +00005242}
5243
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005244// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5245// move.
5246void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00005247 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005248 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005249
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005250 switch (load->GetLoadKind()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005251 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005252 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005253 // Add ADRP with its PC-relative String patch.
5254 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005255 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005256 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005257 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005258 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005259 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005260 codegen_->NewBootImageStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005261 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005262 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005263 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005264 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005265 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005266 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
5267 uint32_t boot_image_offset = codegen_->GetBootImageOffset(load);
5268 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005269 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005270 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005271 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005272 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005273 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
5274 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005275 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005276 case HLoadString::LoadKind::kBssEntry: {
5277 // Add ADRP with its PC-relative String .bss entry patch.
5278 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005279 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00005280 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markof3c52b42017-11-17 17:32:12 +00005281 Register temp = XRegisterFrom(out_loc);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005282 vixl::aarch64::Label* adrp_label = codegen_->NewStringBssEntryPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005283 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005284 // Add LDR with its PC-relative String .bss entry patch.
Vladimir Markoaad75c62016-10-03 08:46:48 +00005285 vixl::aarch64::Label* ldr_label =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005286 codegen_->NewStringBssEntryPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005287 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoca1e0382018-04-11 09:58:41 +00005288 codegen_->GenerateGcRootFieldLoad(load,
5289 out_loc,
5290 temp,
5291 /* offset placeholder */ 0u,
5292 ldr_label,
5293 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005294 SlowPathCodeARM64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00005295 new (codegen_->GetScopedAllocator()) LoadStringSlowPathARM64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005296 codegen_->AddSlowPath(slow_path);
5297 __ Cbz(out.X(), slow_path->GetEntryLabel());
5298 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005299 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005300 return;
5301 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005302 case HLoadString::LoadKind::kJitBootImageAddress: {
5303 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
5304 DCHECK_NE(address, 0u);
5305 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
5306 return;
5307 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005308 case HLoadString::LoadKind::kJitTableAddress: {
5309 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005310 load->GetStringIndex(),
5311 load->GetString()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00005312 codegen_->GenerateGcRootFieldLoad(load,
5313 out_loc,
5314 out.X(),
5315 /* offset */ 0,
5316 /* fixup_label */ nullptr,
5317 kCompilerReadBarrierOption);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005318 return;
5319 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005320 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005321 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005322 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005323
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005324 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005325 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005326 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005327 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005328 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
5329 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005330 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005331}
5332
Alexandre Rames5319def2014-10-23 10:03:10 +01005333void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005334 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01005335 locations->SetOut(Location::ConstantLocation(constant));
5336}
5337
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005338void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005339 // Will be generated at use site.
5340}
5341
Alexandre Rames67555f72014-11-18 10:55:16 +00005342void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005343 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5344 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005345 InvokeRuntimeCallingConvention calling_convention;
5346 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5347}
5348
5349void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01005350 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005351 instruction,
5352 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005353 if (instruction->IsEnter()) {
5354 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5355 } else {
5356 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5357 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005358 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005359}
5360
Alexandre Rames42d641b2014-10-27 14:00:51 +00005361void LocationsBuilderARM64::VisitMul(HMul* mul) {
5362 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005363 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005364 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005365 case DataType::Type::kInt32:
5366 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005367 locations->SetInAt(0, Location::RequiresRegister());
5368 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005369 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005370 break;
5371
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005372 case DataType::Type::kFloat32:
5373 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005374 locations->SetInAt(0, Location::RequiresFpuRegister());
5375 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00005376 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005377 break;
5378
5379 default:
5380 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5381 }
5382}
5383
5384void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
5385 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005386 case DataType::Type::kInt32:
5387 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005388 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
5389 break;
5390
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005391 case DataType::Type::kFloat32:
5392 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005393 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00005394 break;
5395
5396 default:
5397 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5398 }
5399}
5400
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005401void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
5402 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005403 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005404 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005405 case DataType::Type::kInt32:
5406 case DataType::Type::kInt64:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00005407 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00005408 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005409 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005410
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005411 case DataType::Type::kFloat32:
5412 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005413 locations->SetInAt(0, Location::RequiresFpuRegister());
5414 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005415 break;
5416
5417 default:
5418 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5419 }
5420}
5421
5422void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
5423 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005424 case DataType::Type::kInt32:
5425 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005426 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
5427 break;
5428
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005429 case DataType::Type::kFloat32:
5430 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005431 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005432 break;
5433
5434 default:
5435 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5436 }
5437}
5438
5439void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005440 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5441 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005442 InvokeRuntimeCallingConvention calling_convention;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005443 locations->SetOut(LocationFrom(x0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005444 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5445 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005446}
5447
5448void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01005449 // Note: if heap poisoning is enabled, the entry point takes cares
5450 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00005451 QuickEntrypointEnum entrypoint =
5452 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
5453 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005454 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005455 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005456}
5457
Alexandre Rames5319def2014-10-23 10:03:10 +01005458void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005459 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5460 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01005461 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07005462 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005463 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexandre Rames5319def2014-10-23 10:03:10 +01005464}
5465
5466void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07005467 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
5468 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005469 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005470}
5471
5472void LocationsBuilderARM64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005473 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00005474 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005475 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01005476}
5477
5478void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00005479 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005480 case DataType::Type::kInt32:
5481 case DataType::Type::kInt64:
Roland Levillain55dcfb52014-10-24 18:09:09 +01005482 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01005483 break;
5484
5485 default:
5486 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5487 }
5488}
5489
David Brazdil66d126e2015-04-03 16:02:44 +01005490void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005491 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
David Brazdil66d126e2015-04-03 16:02:44 +01005492 locations->SetInAt(0, Location::RequiresRegister());
5493 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5494}
5495
5496void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005497 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01005498}
5499
Alexandre Rames5319def2014-10-23 10:03:10 +01005500void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005501 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5502 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01005503}
5504
Calin Juravle2ae48182016-03-16 14:05:09 +00005505void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5506 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005507 return;
5508 }
Artem Serov914d7a82017-02-07 14:33:49 +00005509 {
5510 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
5511 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5512 Location obj = instruction->GetLocations()->InAt(0);
5513 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
5514 RecordPcInfo(instruction, instruction->GetDexPc());
5515 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005516}
5517
Calin Juravle2ae48182016-03-16 14:05:09 +00005518void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005519 SlowPathCodeARM64* slow_path = new (GetScopedAllocator()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005520 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01005521
5522 LocationSummary* locations = instruction->GetLocations();
5523 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005524
5525 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01005526}
5527
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005528void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005529 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005530}
5531
Alexandre Rames67555f72014-11-18 10:55:16 +00005532void LocationsBuilderARM64::VisitOr(HOr* instruction) {
5533 HandleBinaryOp(instruction);
5534}
5535
5536void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
5537 HandleBinaryOp(instruction);
5538}
5539
Alexandre Rames3e69f162014-12-10 10:36:50 +00005540void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5541 LOG(FATAL) << "Unreachable";
5542}
5543
5544void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005545 if (instruction->GetNext()->IsSuspendCheck() &&
5546 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5547 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5548 // The back edge will generate the suspend check.
5549 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5550 }
5551
Alexandre Rames3e69f162014-12-10 10:36:50 +00005552 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5553}
5554
Alexandre Rames5319def2014-10-23 10:03:10 +01005555void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005556 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005557 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5558 if (location.IsStackSlot()) {
5559 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5560 } else if (location.IsDoubleStackSlot()) {
5561 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5562 }
5563 locations->SetOut(location);
5564}
5565
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005566void InstructionCodeGeneratorARM64::VisitParameterValue(
5567 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005568 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005569}
5570
5571void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
5572 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005573 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01005574 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005575}
5576
5577void InstructionCodeGeneratorARM64::VisitCurrentMethod(
5578 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5579 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01005580}
5581
5582void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005583 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005584 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005585 locations->SetInAt(i, Location::Any());
5586 }
5587 locations->SetOut(Location::Any());
5588}
5589
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005590void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005591 LOG(FATAL) << "Unreachable";
5592}
5593
Serban Constantinescu02164b32014-11-13 14:05:07 +00005594void LocationsBuilderARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005595 DataType::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00005596 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005597 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005598 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005599 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005600
5601 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005602 case DataType::Type::kInt32:
5603 case DataType::Type::kInt64:
Serban Constantinescu02164b32014-11-13 14:05:07 +00005604 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08005605 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00005606 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5607 break;
5608
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005609 case DataType::Type::kFloat32:
5610 case DataType::Type::kFloat64: {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005611 InvokeRuntimeCallingConvention calling_convention;
5612 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
5613 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
5614 locations->SetOut(calling_convention.GetReturnLocation(type));
5615
5616 break;
5617 }
5618
Serban Constantinescu02164b32014-11-13 14:05:07 +00005619 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005620 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00005621 }
5622}
5623
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005624void InstructionCodeGeneratorARM64::GenerateIntRemForPower2Denom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005625 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005626 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
5627 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
5628
5629 Register out = OutputRegister(instruction);
5630 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005631
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005632 if (abs_imm == 2) {
5633 __ Cmp(dividend, 0);
5634 __ And(out, dividend, 1);
5635 __ Csneg(out, out, out, ge);
5636 } else {
5637 UseScratchRegisterScope temps(GetVIXLAssembler());
5638 Register temp = temps.AcquireSameSizeAs(out);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005639
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005640 __ Negs(temp, dividend);
5641 __ And(out, dividend, abs_imm - 1);
5642 __ And(temp, temp, abs_imm - 1);
5643 __ Csneg(out, out, temp, mi);
5644 }
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005645}
5646
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005647void InstructionCodeGeneratorARM64::GenerateIntRemForConstDenom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005648 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005649
5650 if (imm == 0) {
5651 // Do not generate anything.
5652 // DivZeroCheck would prevent any code to be executed.
5653 return;
5654 }
5655
Evgeny Astigeevichf58dc652018-06-25 17:54:07 +01005656 if (IsPowerOfTwo(AbsOrMin(imm))) {
5657 // Cases imm == -1 or imm == 1 are handled in constant folding by
5658 // InstructionWithAbsorbingInputSimplifier.
5659 // If the cases have survided till code generation they are handled in
5660 // GenerateIntRemForPower2Denom becauses -1 and 1 are the power of 2 (2^0).
5661 // The correct code is generated for them, just more instructions.
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005662 GenerateIntRemForPower2Denom(instruction);
5663 } else {
5664 DCHECK(imm < -2 || imm > 2) << imm;
5665 GenerateDivRemWithAnyConstant(instruction);
5666 }
5667}
5668
5669void InstructionCodeGeneratorARM64::GenerateIntRem(HRem* instruction) {
5670 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
5671 << instruction->GetResultType();
5672
5673 if (instruction->GetLocations()->InAt(1).IsConstant()) {
5674 GenerateIntRemForConstDenom(instruction);
5675 } else {
5676 Register out = OutputRegister(instruction);
5677 Register dividend = InputRegisterAt(instruction, 0);
5678 Register divisor = InputRegisterAt(instruction, 1);
5679 UseScratchRegisterScope temps(GetVIXLAssembler());
5680 Register temp = temps.AcquireSameSizeAs(out);
5681 __ Sdiv(temp, dividend, divisor);
5682 __ Msub(out, temp, divisor, dividend);
5683 }
5684}
5685
Serban Constantinescu02164b32014-11-13 14:05:07 +00005686void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005687 DataType::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005688
Serban Constantinescu02164b32014-11-13 14:05:07 +00005689 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005690 case DataType::Type::kInt32:
5691 case DataType::Type::kInt64: {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005692 GenerateIntRem(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005693 break;
5694 }
5695
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005696 case DataType::Type::kFloat32:
5697 case DataType::Type::kFloat64: {
5698 QuickEntrypointEnum entrypoint =
5699 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005700 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005701 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00005702 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5703 } else {
5704 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5705 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005706 break;
5707 }
5708
Serban Constantinescu02164b32014-11-13 14:05:07 +00005709 default:
5710 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005711 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00005712 }
5713}
5714
Aart Bik1f8d51b2018-02-15 10:42:37 -08005715void LocationsBuilderARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005716 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005717}
5718
Aart Bik1f8d51b2018-02-15 10:42:37 -08005719void InstructionCodeGeneratorARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005720 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005721}
5722
5723void LocationsBuilderARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005724 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005725}
5726
5727void InstructionCodeGeneratorARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005728 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005729}
5730
Aart Bik3dad3412018-02-28 12:01:46 -08005731void LocationsBuilderARM64::VisitAbs(HAbs* abs) {
5732 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
5733 switch (abs->GetResultType()) {
5734 case DataType::Type::kInt32:
5735 case DataType::Type::kInt64:
5736 locations->SetInAt(0, Location::RequiresRegister());
5737 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5738 break;
5739 case DataType::Type::kFloat32:
5740 case DataType::Type::kFloat64:
5741 locations->SetInAt(0, Location::RequiresFpuRegister());
5742 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5743 break;
5744 default:
5745 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5746 }
5747}
5748
5749void InstructionCodeGeneratorARM64::VisitAbs(HAbs* abs) {
5750 switch (abs->GetResultType()) {
5751 case DataType::Type::kInt32:
5752 case DataType::Type::kInt64: {
5753 Register in_reg = InputRegisterAt(abs, 0);
5754 Register out_reg = OutputRegister(abs);
5755 __ Cmp(in_reg, Operand(0));
5756 __ Cneg(out_reg, in_reg, lt);
5757 break;
5758 }
5759 case DataType::Type::kFloat32:
5760 case DataType::Type::kFloat64: {
5761 FPRegister in_reg = InputFPRegisterAt(abs, 0);
5762 FPRegister out_reg = OutputFPRegister(abs);
5763 __ Fabs(out_reg, in_reg);
5764 break;
5765 }
5766 default:
5767 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5768 }
5769}
5770
Igor Murashkind01745e2017-04-05 16:40:31 -07005771void LocationsBuilderARM64::VisitConstructorFence(HConstructorFence* constructor_fence) {
5772 constructor_fence->SetLocations(nullptr);
5773}
5774
5775void InstructionCodeGeneratorARM64::VisitConstructorFence(
5776 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
5777 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
5778}
5779
Calin Juravle27df7582015-04-17 19:12:31 +01005780void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5781 memory_barrier->SetLocations(nullptr);
5782}
5783
5784void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005785 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01005786}
5787
Alexandre Rames5319def2014-10-23 10:03:10 +01005788void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005789 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005790 DataType::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005791 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01005792}
5793
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005794void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005795 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005796}
5797
5798void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
5799 instruction->SetLocations(nullptr);
5800}
5801
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005802void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005803 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005804}
5805
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005806void LocationsBuilderARM64::VisitRor(HRor* ror) {
5807 HandleBinaryOp(ror);
5808}
5809
5810void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
5811 HandleBinaryOp(ror);
5812}
5813
Serban Constantinescu02164b32014-11-13 14:05:07 +00005814void LocationsBuilderARM64::VisitShl(HShl* shl) {
5815 HandleShift(shl);
5816}
5817
5818void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5819 HandleShift(shl);
5820}
5821
5822void LocationsBuilderARM64::VisitShr(HShr* shr) {
5823 HandleShift(shr);
5824}
5825
5826void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5827 HandleShift(shr);
5828}
5829
Alexandre Rames5319def2014-10-23 10:03:10 +01005830void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005831 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005832}
5833
5834void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005835 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005836}
5837
Alexandre Rames67555f72014-11-18 10:55:16 +00005838void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005839 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005840}
5841
5842void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005843 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005844}
5845
5846void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005847 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005848}
5849
Alexandre Rames67555f72014-11-18 10:55:16 +00005850void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005851 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005852}
5853
Calin Juravlee460d1d2015-09-29 04:52:17 +01005854void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5855 HUnresolvedInstanceFieldGet* instruction) {
5856 FieldAccessCallingConventionARM64 calling_convention;
5857 codegen_->CreateUnresolvedFieldLocationSummary(
5858 instruction, instruction->GetFieldType(), calling_convention);
5859}
5860
5861void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5862 HUnresolvedInstanceFieldGet* instruction) {
5863 FieldAccessCallingConventionARM64 calling_convention;
5864 codegen_->GenerateUnresolvedFieldAccess(instruction,
5865 instruction->GetFieldType(),
5866 instruction->GetFieldIndex(),
5867 instruction->GetDexPc(),
5868 calling_convention);
5869}
5870
5871void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5872 HUnresolvedInstanceFieldSet* instruction) {
5873 FieldAccessCallingConventionARM64 calling_convention;
5874 codegen_->CreateUnresolvedFieldLocationSummary(
5875 instruction, instruction->GetFieldType(), calling_convention);
5876}
5877
5878void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5879 HUnresolvedInstanceFieldSet* instruction) {
5880 FieldAccessCallingConventionARM64 calling_convention;
5881 codegen_->GenerateUnresolvedFieldAccess(instruction,
5882 instruction->GetFieldType(),
5883 instruction->GetFieldIndex(),
5884 instruction->GetDexPc(),
5885 calling_convention);
5886}
5887
5888void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5889 HUnresolvedStaticFieldGet* instruction) {
5890 FieldAccessCallingConventionARM64 calling_convention;
5891 codegen_->CreateUnresolvedFieldLocationSummary(
5892 instruction, instruction->GetFieldType(), calling_convention);
5893}
5894
5895void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5896 HUnresolvedStaticFieldGet* instruction) {
5897 FieldAccessCallingConventionARM64 calling_convention;
5898 codegen_->GenerateUnresolvedFieldAccess(instruction,
5899 instruction->GetFieldType(),
5900 instruction->GetFieldIndex(),
5901 instruction->GetDexPc(),
5902 calling_convention);
5903}
5904
5905void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5906 HUnresolvedStaticFieldSet* instruction) {
5907 FieldAccessCallingConventionARM64 calling_convention;
5908 codegen_->CreateUnresolvedFieldLocationSummary(
5909 instruction, instruction->GetFieldType(), calling_convention);
5910}
5911
5912void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5913 HUnresolvedStaticFieldSet* instruction) {
5914 FieldAccessCallingConventionARM64 calling_convention;
5915 codegen_->GenerateUnresolvedFieldAccess(instruction,
5916 instruction->GetFieldType(),
5917 instruction->GetFieldIndex(),
5918 instruction->GetDexPc(),
5919 calling_convention);
5920}
5921
Alexandre Rames5319def2014-10-23 10:03:10 +01005922void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005923 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5924 instruction, LocationSummary::kCallOnSlowPath);
Artem Serov7957d952017-04-04 15:44:09 +01005925 // In suspend check slow path, usually there are no caller-save registers at all.
5926 // If SIMD instructions are present, however, we force spilling all live SIMD
5927 // registers in full width (since the runtime only saves/restores lower part).
5928 locations->SetCustomSlowPathCallerSaves(
5929 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexandre Rames5319def2014-10-23 10:03:10 +01005930}
5931
5932void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005933 HBasicBlock* block = instruction->GetBlock();
5934 if (block->GetLoopInformation() != nullptr) {
5935 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5936 // The back edge will generate the suspend check.
5937 return;
5938 }
5939 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5940 // The goto will generate the suspend check.
5941 return;
5942 }
5943 GenerateSuspendCheck(instruction, nullptr);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005944 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005945}
5946
Alexandre Rames67555f72014-11-18 10:55:16 +00005947void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005948 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5949 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005950 InvokeRuntimeCallingConvention calling_convention;
5951 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5952}
5953
5954void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005955 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08005956 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005957}
5958
5959void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
5960 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005961 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005962 DataType::Type input_type = conversion->GetInputType();
5963 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005964 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
5965 << input_type << " -> " << result_type;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005966 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
5967 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005968 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5969 }
5970
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005971 if (DataType::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005972 locations->SetInAt(0, Location::RequiresFpuRegister());
5973 } else {
5974 locations->SetInAt(0, Location::RequiresRegister());
5975 }
5976
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005977 if (DataType::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005978 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5979 } else {
5980 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5981 }
5982}
5983
5984void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005985 DataType::Type result_type = conversion->GetResultType();
5986 DataType::Type input_type = conversion->GetInputType();
Alexandre Rames67555f72014-11-18 10:55:16 +00005987
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005988 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
5989 << input_type << " -> " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005990
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005991 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
5992 int result_size = DataType::Size(result_type);
5993 int input_size = DataType::Size(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00005994 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005995 Register output = OutputRegister(conversion);
5996 Register source = InputRegisterAt(conversion, 0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005997 if (result_type == DataType::Type::kInt32 && input_type == DataType::Type::kInt64) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01005998 // 'int' values are used directly as W registers, discarding the top
5999 // bits, so we don't need to sign-extend and can just perform a move.
6000 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
6001 // top 32 bits of the target register. We theoretically could leave those
6002 // bits unchanged, but we would have to make sure that no code uses a
6003 // 32bit input value as a 64bit value assuming that the top 32 bits are
6004 // zero.
6005 __ Mov(output.W(), source.W());
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006006 } else if (DataType::IsUnsignedType(result_type) ||
6007 (DataType::IsUnsignedType(input_type) && input_size < result_size)) {
6008 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, result_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00006009 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00006010 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00006011 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006012 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00006013 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006014 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
6015 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Serban Constantinescu02164b32014-11-13 14:05:07 +00006016 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006017 } else if (DataType::IsFloatingPointType(result_type) &&
6018 DataType::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00006019 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
6020 } else {
6021 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
6022 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00006023 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00006024}
Alexandre Rames67555f72014-11-18 10:55:16 +00006025
Serban Constantinescu02164b32014-11-13 14:05:07 +00006026void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
6027 HandleShift(ushr);
6028}
6029
6030void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
6031 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00006032}
6033
6034void LocationsBuilderARM64::VisitXor(HXor* instruction) {
6035 HandleBinaryOp(instruction);
6036}
6037
6038void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
6039 HandleBinaryOp(instruction);
6040}
6041
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006042void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006043 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006044 LOG(FATAL) << "Unreachable";
6045}
6046
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006047void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006048 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006049 LOG(FATAL) << "Unreachable";
6050}
6051
Mark Mendellfe57faa2015-09-18 09:26:15 -04006052// Simple implementation of packed switch - generate cascaded compare/jumps.
6053void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6054 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006055 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006056 locations->SetInAt(0, Location::RequiresRegister());
6057}
6058
6059void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6060 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08006061 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006062 Register value_reg = InputRegisterAt(switch_instr, 0);
6063 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6064
Zheng Xu3927c8b2015-11-18 17:46:25 +08006065 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01006066 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08006067 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
6068 // make sure we don't emit it if the target may run out of range.
6069 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
6070 // ranges and emit the tables only as required.
6071 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04006072
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006073 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08006074 // Current instruction id is an upper bound of the number of HIRs in the graph.
6075 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
6076 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006077 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
6078 Register temp = temps.AcquireW();
6079 __ Subs(temp, value_reg, Operand(lower_bound));
6080
Zheng Xu3927c8b2015-11-18 17:46:25 +08006081 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006082 // Jump to successors[0] if value == lower_bound.
6083 __ B(eq, codegen_->GetLabelOf(successors[0]));
6084 int32_t last_index = 0;
6085 for (; num_entries - last_index > 2; last_index += 2) {
6086 __ Subs(temp, temp, Operand(2));
6087 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
6088 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
6089 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
6090 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
6091 }
6092 if (num_entries - last_index == 2) {
6093 // The last missing case_value.
6094 __ Cmp(temp, Operand(1));
6095 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08006096 }
6097
6098 // And the default for any other value.
6099 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6100 __ B(codegen_->GetLabelOf(default_block));
6101 }
6102 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01006103 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08006104
6105 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
6106
6107 // Below instructions should use at most one blocked register. Since there are two blocked
6108 // registers, we are free to block one.
6109 Register temp_w = temps.AcquireW();
6110 Register index;
6111 // Remove the bias.
6112 if (lower_bound != 0) {
6113 index = temp_w;
6114 __ Sub(index, value_reg, Operand(lower_bound));
6115 } else {
6116 index = value_reg;
6117 }
6118
6119 // Jump to default block if index is out of the range.
6120 __ Cmp(index, Operand(num_entries));
6121 __ B(hs, codegen_->GetLabelOf(default_block));
6122
6123 // In current VIXL implementation, it won't require any blocked registers to encode the
6124 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
6125 // register pressure.
6126 Register table_base = temps.AcquireX();
6127 // Load jump offset from the table.
6128 __ Adr(table_base, jump_table->GetTableStartLabel());
6129 Register jump_offset = temp_w;
6130 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
6131
6132 // Jump to target block by branching to table_base(pc related) + offset.
6133 Register target_address = table_base;
6134 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
6135 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006136 }
6137}
6138
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006139void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
6140 HInstruction* instruction,
6141 Location out,
6142 uint32_t offset,
6143 Location maybe_temp,
6144 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006145 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006146 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006147 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006148 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006149 if (kUseBakerReadBarrier) {
6150 // Load with fast path based Baker's read barrier.
6151 // /* HeapReference<Object> */ out = *(out + offset)
6152 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6153 out,
6154 out_reg,
6155 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006156 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006157 /* needs_null_check */ false,
6158 /* use_load_acquire */ false);
6159 } else {
6160 // Load with slow path based read barrier.
6161 // Save the value of `out` into `maybe_temp` before overwriting it
6162 // in the following move operation, as we will need it for the
6163 // read barrier below.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006164 Register temp_reg = RegisterFrom(maybe_temp, type);
Roland Levillain44015862016-01-22 11:47:17 +00006165 __ Mov(temp_reg, out_reg);
6166 // /* HeapReference<Object> */ out = *(out + offset)
6167 __ Ldr(out_reg, HeapOperand(out_reg, offset));
6168 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6169 }
6170 } else {
6171 // Plain load with no read barrier.
6172 // /* HeapReference<Object> */ out = *(out + offset)
6173 __ Ldr(out_reg, HeapOperand(out_reg, offset));
6174 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
6175 }
6176}
6177
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006178void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
6179 HInstruction* instruction,
6180 Location out,
6181 Location obj,
6182 uint32_t offset,
6183 Location maybe_temp,
6184 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006185 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006186 Register out_reg = RegisterFrom(out, type);
6187 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006188 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006189 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006190 if (kUseBakerReadBarrier) {
6191 // Load with fast path based Baker's read barrier.
Roland Levillain44015862016-01-22 11:47:17 +00006192 // /* HeapReference<Object> */ out = *(obj + offset)
6193 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6194 out,
6195 obj_reg,
6196 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006197 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006198 /* needs_null_check */ false,
6199 /* use_load_acquire */ false);
6200 } else {
6201 // Load with slow path based read barrier.
6202 // /* HeapReference<Object> */ out = *(obj + offset)
6203 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
6204 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6205 }
6206 } else {
6207 // Plain load with no read barrier.
6208 // /* HeapReference<Object> */ out = *(obj + offset)
6209 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
6210 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
6211 }
6212}
6213
Vladimir Markoca1e0382018-04-11 09:58:41 +00006214void CodeGeneratorARM64::GenerateGcRootFieldLoad(
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006215 HInstruction* instruction,
6216 Location root,
6217 Register obj,
6218 uint32_t offset,
6219 vixl::aarch64::Label* fixup_label,
6220 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00006221 DCHECK(fixup_label == nullptr || offset == 0u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006222 Register root_reg = RegisterFrom(root, DataType::Type::kReference);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006223 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006224 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006225 if (kUseBakerReadBarrier) {
6226 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillainba650a42017-03-06 13:52:32 +00006227 // Baker's read barrier are used.
Roland Levillain44015862016-01-22 11:47:17 +00006228
Vladimir Marko008e09f32018-08-06 15:42:43 +01006229 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in
6230 // the Marking Register) to decide whether we need to enter
6231 // the slow path to mark the GC root.
6232 //
6233 // We use shared thunks for the slow path; shared within the method
6234 // for JIT, across methods for AOT. That thunk checks the reference
6235 // and jumps to the entrypoint if needed.
6236 //
6237 // lr = &return_address;
6238 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
6239 // if (mr) { // Thread::Current()->GetIsGcMarking()
6240 // goto gc_root_thunk<root_reg>(lr)
6241 // }
6242 // return_address:
Roland Levillainba650a42017-03-06 13:52:32 +00006243
Vladimir Marko008e09f32018-08-06 15:42:43 +01006244 UseScratchRegisterScope temps(GetVIXLAssembler());
6245 DCHECK(temps.IsAvailable(ip0));
6246 DCHECK(temps.IsAvailable(ip1));
6247 temps.Exclude(ip0, ip1);
6248 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(root_reg.GetCode());
Roland Levillain44015862016-01-22 11:47:17 +00006249
Vladimir Marko008e09f32018-08-06 15:42:43 +01006250 ExactAssemblyScope guard(GetVIXLAssembler(), 3 * vixl::aarch64::kInstructionSize);
6251 vixl::aarch64::Label return_address;
6252 __ adr(lr, &return_address);
6253 if (fixup_label != nullptr) {
6254 __ bind(fixup_label);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006255 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006256 static_assert(BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_OFFSET == -8,
6257 "GC root LDR must be 2 instruction (8B) before the return address label.");
6258 __ ldr(root_reg, MemOperand(obj.X(), offset));
6259 EmitBakerReadBarrierCbnz(custom_data);
6260 __ bind(&return_address);
Roland Levillain44015862016-01-22 11:47:17 +00006261 } else {
6262 // GC root loaded through a slow path for read barriers other
6263 // than Baker's.
6264 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006265 if (fixup_label == nullptr) {
6266 __ Add(root_reg.X(), obj.X(), offset);
6267 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006268 EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006269 }
Roland Levillain44015862016-01-22 11:47:17 +00006270 // /* mirror::Object* */ root = root->Read()
Vladimir Markoca1e0382018-04-11 09:58:41 +00006271 GenerateReadBarrierForRootSlow(instruction, root, root);
Roland Levillain44015862016-01-22 11:47:17 +00006272 }
6273 } else {
6274 // Plain GC root load with no read barrier.
6275 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006276 if (fixup_label == nullptr) {
6277 __ Ldr(root_reg, MemOperand(obj, offset));
6278 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006279 EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006280 }
Roland Levillain44015862016-01-22 11:47:17 +00006281 // Note that GC roots are not affected by heap poisoning, thus we
6282 // do not have to unpoison `root_reg` here.
6283 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006284 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillain44015862016-01-22 11:47:17 +00006285}
6286
6287void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6288 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006289 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006290 uint32_t offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006291 Location maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006292 bool needs_null_check,
6293 bool use_load_acquire) {
6294 DCHECK(kEmitCompilerReadBarrier);
6295 DCHECK(kUseBakerReadBarrier);
6296
Vladimir Marko008e09f32018-08-06 15:42:43 +01006297 if (!use_load_acquire) {
Roland Levillain97c46462017-05-11 14:04:03 +01006298 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6299 // Marking Register) to decide whether we need to enter the slow
6300 // path to mark the reference. Then, in the slow path, check the
6301 // gray bit in the lock word of the reference's holder (`obj`) to
6302 // decide whether to mark `ref` or not.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006303 //
Vladimir Marko966b46f2018-08-03 10:20:19 +00006304 // We use shared thunks for the slow path; shared within the method
6305 // for JIT, across methods for AOT. That thunk checks the holder
6306 // and jumps to the entrypoint if needed. If the holder is not gray,
6307 // it creates a fake dependency and returns to the LDR instruction.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006308 //
Vladimir Marko66d691d2017-04-07 17:53:39 +01006309 // lr = &gray_return_address;
Roland Levillain97c46462017-05-11 14:04:03 +01006310 // if (mr) { // Thread::Current()->GetIsGcMarking()
6311 // goto field_thunk<holder_reg, base_reg>(lr)
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006312 // }
6313 // not_gray_return_address:
6314 // // Original reference load. If the offset is too large to fit
6315 // // into LDR, we use an adjusted base register here.
Vladimir Marko88abba22017-05-03 17:09:25 +01006316 // HeapReference<mirror::Object> reference = *(obj+offset);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006317 // gray_return_address:
6318
6319 DCHECK_ALIGNED(offset, sizeof(mirror::HeapReference<mirror::Object>));
6320 Register base = obj;
6321 if (offset >= kReferenceLoadMinFarOffset) {
6322 DCHECK(maybe_temp.IsRegister());
6323 base = WRegisterFrom(maybe_temp);
6324 static_assert(IsPowerOfTwo(kReferenceLoadMinFarOffset), "Expecting a power of 2.");
6325 __ Add(base, obj, Operand(offset & ~(kReferenceLoadMinFarOffset - 1u)));
6326 offset &= (kReferenceLoadMinFarOffset - 1u);
6327 }
6328 UseScratchRegisterScope temps(GetVIXLAssembler());
6329 DCHECK(temps.IsAvailable(ip0));
6330 DCHECK(temps.IsAvailable(ip1));
6331 temps.Exclude(ip0, ip1);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006332 uint32_t custom_data = EncodeBakerReadBarrierFieldData(base.GetCode(), obj.GetCode());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006333
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006334 {
Vladimir Marko966b46f2018-08-03 10:20:19 +00006335 ExactAssemblyScope guard(GetVIXLAssembler(),
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006336 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6337 vixl::aarch64::Label return_address;
6338 __ adr(lr, &return_address);
Vladimir Marko966b46f2018-08-03 10:20:19 +00006339 EmitBakerReadBarrierCbnz(custom_data);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006340 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6341 "Field LDR must be 1 instruction (4B) before the return address label; "
6342 " 2 instructions (8B) for heap poisoning.");
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006343 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006344 __ ldr(ref_reg, MemOperand(base.X(), offset));
6345 if (needs_null_check) {
6346 MaybeRecordImplicitNullCheck(instruction);
6347 }
Vladimir Marko966b46f2018-08-03 10:20:19 +00006348 // Unpoison the reference explicitly if needed. MaybeUnpoisonHeapReference() uses
6349 // macro instructions disallowed in ExactAssemblyScope.
6350 if (kPoisonHeapReferences) {
6351 __ neg(ref_reg, Operand(ref_reg));
6352 }
6353 __ bind(&return_address);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006354 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006355 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__, /* temp_loc */ LocationFrom(ip1));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006356 return;
6357 }
6358
Roland Levillain44015862016-01-22 11:47:17 +00006359 // /* HeapReference<Object> */ ref = *(obj + offset)
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006360 Register temp = WRegisterFrom(maybe_temp);
Roland Levillain44015862016-01-22 11:47:17 +00006361 Location no_index = Location::NoLocation();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006362 size_t no_scale_factor = 0u;
Roland Levillainbfea3352016-06-23 13:48:47 +01006363 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6364 ref,
6365 obj,
6366 offset,
6367 no_index,
6368 no_scale_factor,
6369 temp,
6370 needs_null_check,
6371 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00006372}
6373
Vladimir Marko008e09f32018-08-06 15:42:43 +01006374void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006375 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006376 uint32_t data_offset,
6377 Location index,
6378 Register temp,
6379 bool needs_null_check) {
6380 DCHECK(kEmitCompilerReadBarrier);
6381 DCHECK(kUseBakerReadBarrier);
6382
Vladimir Marko66d691d2017-04-07 17:53:39 +01006383 static_assert(
6384 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6385 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006386 size_t scale_factor = DataType::SizeShift(DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006387
Vladimir Marko008e09f32018-08-06 15:42:43 +01006388 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6389 // Marking Register) to decide whether we need to enter the slow
6390 // path to mark the reference. Then, in the slow path, check the
6391 // gray bit in the lock word of the reference's holder (`obj`) to
6392 // decide whether to mark `ref` or not.
6393 //
6394 // We use shared thunks for the slow path; shared within the method
6395 // for JIT, across methods for AOT. That thunk checks the holder
6396 // and jumps to the entrypoint if needed. If the holder is not gray,
6397 // it creates a fake dependency and returns to the LDR instruction.
6398 //
6399 // lr = &gray_return_address;
6400 // if (mr) { // Thread::Current()->GetIsGcMarking()
6401 // goto array_thunk<base_reg>(lr)
6402 // }
6403 // not_gray_return_address:
6404 // // Original reference load. If the offset is too large to fit
6405 // // into LDR, we use an adjusted base register here.
6406 // HeapReference<mirror::Object> reference = data[index];
6407 // gray_return_address:
Vladimir Marko66d691d2017-04-07 17:53:39 +01006408
Vladimir Marko008e09f32018-08-06 15:42:43 +01006409 DCHECK(index.IsValid());
6410 Register index_reg = RegisterFrom(index, DataType::Type::kInt32);
6411 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006412
Vladimir Marko008e09f32018-08-06 15:42:43 +01006413 UseScratchRegisterScope temps(GetVIXLAssembler());
6414 DCHECK(temps.IsAvailable(ip0));
6415 DCHECK(temps.IsAvailable(ip1));
6416 temps.Exclude(ip0, ip1);
6417 uint32_t custom_data = EncodeBakerReadBarrierArrayData(temp.GetCode());
Vladimir Marko66d691d2017-04-07 17:53:39 +01006418
Vladimir Marko008e09f32018-08-06 15:42:43 +01006419 __ Add(temp.X(), obj.X(), Operand(data_offset));
6420 {
6421 ExactAssemblyScope guard(GetVIXLAssembler(),
6422 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6423 vixl::aarch64::Label return_address;
6424 __ adr(lr, &return_address);
6425 EmitBakerReadBarrierCbnz(custom_data);
6426 static_assert(BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6427 "Array LDR must be 1 instruction (4B) before the return address label; "
6428 " 2 instructions (8B) for heap poisoning.");
6429 __ ldr(ref_reg, MemOperand(temp.X(), index_reg.X(), LSL, scale_factor));
6430 DCHECK(!needs_null_check); // The thunk cannot handle the null check.
6431 // Unpoison the reference explicitly if needed. MaybeUnpoisonHeapReference() uses
6432 // macro instructions disallowed in ExactAssemblyScope.
6433 if (kPoisonHeapReferences) {
6434 __ neg(ref_reg, Operand(ref_reg));
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006435 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006436 __ bind(&return_address);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006437 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006438 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__, /* temp_loc */ LocationFrom(ip1));
Roland Levillain44015862016-01-22 11:47:17 +00006439}
6440
6441void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6442 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006443 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006444 uint32_t offset,
6445 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01006446 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00006447 Register temp,
6448 bool needs_null_check,
Roland Levillainff487002017-03-07 16:50:01 +00006449 bool use_load_acquire) {
Roland Levillain44015862016-01-22 11:47:17 +00006450 DCHECK(kEmitCompilerReadBarrier);
6451 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01006452 // If we are emitting an array load, we should not be using a
6453 // Load Acquire instruction. In other words:
6454 // `instruction->IsArrayGet()` => `!use_load_acquire`.
6455 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00006456
Roland Levillain97c46462017-05-11 14:04:03 +01006457 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6458 // Marking Register) to decide whether we need to enter the slow
6459 // path to mark the reference. Then, in the slow path, check the
6460 // gray bit in the lock word of the reference's holder (`obj`) to
6461 // decide whether to mark `ref` or not.
Roland Levillain44015862016-01-22 11:47:17 +00006462 //
Roland Levillain97c46462017-05-11 14:04:03 +01006463 // if (mr) { // Thread::Current()->GetIsGcMarking()
Roland Levillainba650a42017-03-06 13:52:32 +00006464 // // Slow path.
Roland Levillain54f869e2017-03-06 13:54:11 +00006465 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6466 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6467 // HeapReference<mirror::Object> ref = *src; // Original reference load.
6468 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6469 // if (is_gray) {
Roland Levillain97c46462017-05-11 14:04:03 +01006470 // entrypoint = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6471 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
Roland Levillain54f869e2017-03-06 13:54:11 +00006472 // }
6473 // } else {
6474 // HeapReference<mirror::Object> ref = *src; // Original reference load.
Roland Levillain44015862016-01-22 11:47:17 +00006475 // }
Roland Levillain44015862016-01-22 11:47:17 +00006476
Roland Levillainba650a42017-03-06 13:52:32 +00006477 // Slow path marking the object `ref` when the GC is marking. The
Roland Levillain97c46462017-05-11 14:04:03 +01006478 // entrypoint will be loaded by the slow path code.
Roland Levillainff487002017-03-07 16:50:01 +00006479 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006480 new (GetScopedAllocator()) LoadReferenceWithBakerReadBarrierSlowPathARM64(
Roland Levillainff487002017-03-07 16:50:01 +00006481 instruction,
6482 ref,
6483 obj,
6484 offset,
6485 index,
6486 scale_factor,
6487 needs_null_check,
6488 use_load_acquire,
Roland Levillain97c46462017-05-11 14:04:03 +01006489 temp);
Roland Levillainba650a42017-03-06 13:52:32 +00006490 AddSlowPath(slow_path);
6491
Roland Levillain97c46462017-05-11 14:04:03 +01006492 __ Cbnz(mr, slow_path->GetEntryLabel());
Roland Levillainff487002017-03-07 16:50:01 +00006493 // Fast path: the GC is not marking: just load the reference.
Roland Levillain54f869e2017-03-06 13:54:11 +00006494 GenerateRawReferenceLoad(
6495 instruction, ref, obj, offset, index, scale_factor, needs_null_check, use_load_acquire);
Roland Levillainba650a42017-03-06 13:52:32 +00006496 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006497 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillainba650a42017-03-06 13:52:32 +00006498}
6499
Roland Levillainff487002017-03-07 16:50:01 +00006500void CodeGeneratorARM64::UpdateReferenceFieldWithBakerReadBarrier(HInstruction* instruction,
6501 Location ref,
6502 Register obj,
6503 Location field_offset,
6504 Register temp,
6505 bool needs_null_check,
6506 bool use_load_acquire) {
6507 DCHECK(kEmitCompilerReadBarrier);
6508 DCHECK(kUseBakerReadBarrier);
6509 // If we are emitting an array load, we should not be using a
6510 // Load Acquire instruction. In other words:
6511 // `instruction->IsArrayGet()` => `!use_load_acquire`.
6512 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
6513
Roland Levillain97c46462017-05-11 14:04:03 +01006514 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6515 // Marking Register) to decide whether we need to enter the slow
6516 // path to update the reference field within `obj`. Then, in the
6517 // slow path, check the gray bit in the lock word of the reference's
6518 // holder (`obj`) to decide whether to mark `ref` and update the
6519 // field or not.
Roland Levillainff487002017-03-07 16:50:01 +00006520 //
Roland Levillain97c46462017-05-11 14:04:03 +01006521 // if (mr) { // Thread::Current()->GetIsGcMarking()
Roland Levillainff487002017-03-07 16:50:01 +00006522 // // Slow path.
6523 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6524 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6525 // HeapReference<mirror::Object> ref = *(obj + field_offset); // Reference load.
6526 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6527 // if (is_gray) {
6528 // old_ref = ref;
Roland Levillain97c46462017-05-11 14:04:03 +01006529 // entrypoint = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6530 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
Roland Levillainff487002017-03-07 16:50:01 +00006531 // compareAndSwapObject(obj, field_offset, old_ref, ref);
6532 // }
6533 // }
6534
6535 // Slow path updating the object reference at address `obj + field_offset`
Roland Levillain97c46462017-05-11 14:04:03 +01006536 // when the GC is marking. The entrypoint will be loaded by the slow path code.
Roland Levillainff487002017-03-07 16:50:01 +00006537 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006538 new (GetScopedAllocator()) LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64(
Roland Levillainff487002017-03-07 16:50:01 +00006539 instruction,
6540 ref,
6541 obj,
6542 /* offset */ 0u,
6543 /* index */ field_offset,
6544 /* scale_factor */ 0u /* "times 1" */,
6545 needs_null_check,
6546 use_load_acquire,
Roland Levillain97c46462017-05-11 14:04:03 +01006547 temp);
Roland Levillainff487002017-03-07 16:50:01 +00006548 AddSlowPath(slow_path);
6549
Roland Levillain97c46462017-05-11 14:04:03 +01006550 __ Cbnz(mr, slow_path->GetEntryLabel());
Roland Levillainff487002017-03-07 16:50:01 +00006551 // Fast path: the GC is not marking: nothing to do (the field is
6552 // up-to-date, and we don't need to load the reference).
6553 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006554 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillainff487002017-03-07 16:50:01 +00006555}
6556
Roland Levillainba650a42017-03-06 13:52:32 +00006557void CodeGeneratorARM64::GenerateRawReferenceLoad(HInstruction* instruction,
6558 Location ref,
6559 Register obj,
6560 uint32_t offset,
6561 Location index,
6562 size_t scale_factor,
6563 bool needs_null_check,
6564 bool use_load_acquire) {
6565 DCHECK(obj.IsW());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006566 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006567 Register ref_reg = RegisterFrom(ref, type);
Roland Levillain44015862016-01-22 11:47:17 +00006568
Roland Levillainba650a42017-03-06 13:52:32 +00006569 // If needed, vixl::EmissionCheckScope guards are used to ensure
6570 // that no pools are emitted between the load (macro) instruction
6571 // and MaybeRecordImplicitNullCheck.
Roland Levillain44015862016-01-22 11:47:17 +00006572
Roland Levillain44015862016-01-22 11:47:17 +00006573 if (index.IsValid()) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006574 // Load types involving an "index": ArrayGet,
6575 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6576 // intrinsics.
Roland Levillainbfea3352016-06-23 13:48:47 +01006577 if (use_load_acquire) {
6578 // UnsafeGetObjectVolatile intrinsic case.
6579 // Register `index` is not an index in an object array, but an
6580 // offset to an object reference field within object `obj`.
6581 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
6582 DCHECK(instruction->GetLocations()->Intrinsified());
6583 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
6584 << instruction->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006585 DCHECK_EQ(offset, 0u);
6586 DCHECK_EQ(scale_factor, 0u);
Roland Levillainba650a42017-03-06 13:52:32 +00006587 DCHECK_EQ(needs_null_check, false);
6588 // /* HeapReference<mirror::Object> */ ref = *(obj + index)
Roland Levillainbfea3352016-06-23 13:48:47 +01006589 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
6590 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00006591 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006592 // ArrayGet and UnsafeGetObject and UnsafeCASObject intrinsics cases.
6593 // /* HeapReference<mirror::Object> */ ref = *(obj + offset + (index << scale_factor))
Roland Levillainbfea3352016-06-23 13:48:47 +01006594 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01006595 uint32_t computed_offset = offset + (Int64FromLocation(index) << scale_factor);
Roland Levillainba650a42017-03-06 13:52:32 +00006596 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillainbfea3352016-06-23 13:48:47 +01006597 Load(type, ref_reg, HeapOperand(obj, computed_offset));
Roland Levillainba650a42017-03-06 13:52:32 +00006598 if (needs_null_check) {
6599 MaybeRecordImplicitNullCheck(instruction);
6600 }
Roland Levillainbfea3352016-06-23 13:48:47 +01006601 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006602 UseScratchRegisterScope temps(GetVIXLAssembler());
6603 Register temp = temps.AcquireW();
6604 __ Add(temp, obj, offset);
6605 {
6606 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
6607 Load(type, ref_reg, HeapOperand(temp, XRegisterFrom(index), LSL, scale_factor));
6608 if (needs_null_check) {
6609 MaybeRecordImplicitNullCheck(instruction);
6610 }
6611 }
Roland Levillainbfea3352016-06-23 13:48:47 +01006612 }
Roland Levillain44015862016-01-22 11:47:17 +00006613 }
Roland Levillain44015862016-01-22 11:47:17 +00006614 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006615 // /* HeapReference<mirror::Object> */ ref = *(obj + offset)
Roland Levillain44015862016-01-22 11:47:17 +00006616 MemOperand field = HeapOperand(obj, offset);
6617 if (use_load_acquire) {
Roland Levillainba650a42017-03-06 13:52:32 +00006618 // Implicit null checks are handled by CodeGeneratorARM64::LoadAcquire.
6619 LoadAcquire(instruction, ref_reg, field, needs_null_check);
Roland Levillain44015862016-01-22 11:47:17 +00006620 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006621 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain44015862016-01-22 11:47:17 +00006622 Load(type, ref_reg, field);
Roland Levillainba650a42017-03-06 13:52:32 +00006623 if (needs_null_check) {
6624 MaybeRecordImplicitNullCheck(instruction);
6625 }
Roland Levillain44015862016-01-22 11:47:17 +00006626 }
6627 }
6628
6629 // Object* ref = ref_addr->AsMirrorPtr()
6630 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
Roland Levillain44015862016-01-22 11:47:17 +00006631}
6632
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006633void CodeGeneratorARM64::MaybeGenerateMarkingRegisterCheck(int code, Location temp_loc) {
6634 // The following condition is a compile-time one, so it does not have a run-time cost.
6635 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && kIsDebugBuild) {
6636 // The following condition is a run-time one; it is executed after the
6637 // previous compile-time test, to avoid penalizing non-debug builds.
6638 if (GetCompilerOptions().EmitRunTimeChecksInDebugMode()) {
6639 UseScratchRegisterScope temps(GetVIXLAssembler());
6640 Register temp = temp_loc.IsValid() ? WRegisterFrom(temp_loc) : temps.AcquireW();
6641 GetAssembler()->GenerateMarkingRegisterCheck(temp, code);
6642 }
6643 }
6644}
6645
Roland Levillain44015862016-01-22 11:47:17 +00006646void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
6647 Location out,
6648 Location ref,
6649 Location obj,
6650 uint32_t offset,
6651 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006652 DCHECK(kEmitCompilerReadBarrier);
6653
Roland Levillain44015862016-01-22 11:47:17 +00006654 // Insert a slow path based read barrier *after* the reference load.
6655 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006656 // If heap poisoning is enabled, the unpoisoning of the loaded
6657 // reference will be carried out by the runtime within the slow
6658 // path.
6659 //
6660 // Note that `ref` currently does not get unpoisoned (when heap
6661 // poisoning is enabled), which is alright as the `ref` argument is
6662 // not used by the artReadBarrierSlow entry point.
6663 //
6664 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006665 SlowPathCodeARM64* slow_path = new (GetScopedAllocator())
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006666 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
6667 AddSlowPath(slow_path);
6668
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006669 __ B(slow_path->GetEntryLabel());
6670 __ Bind(slow_path->GetExitLabel());
6671}
6672
Roland Levillain44015862016-01-22 11:47:17 +00006673void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6674 Location out,
6675 Location ref,
6676 Location obj,
6677 uint32_t offset,
6678 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006679 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00006680 // Baker's read barriers shall be handled by the fast path
6681 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
6682 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006683 // If heap poisoning is enabled, unpoisoning will be taken care of
6684 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00006685 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006686 } else if (kPoisonHeapReferences) {
6687 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
6688 }
6689}
6690
Roland Levillain44015862016-01-22 11:47:17 +00006691void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6692 Location out,
6693 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006694 DCHECK(kEmitCompilerReadBarrier);
6695
Roland Levillain44015862016-01-22 11:47:17 +00006696 // Insert a slow path based read barrier *after* the GC root load.
6697 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006698 // Note that GC roots are not affected by heap poisoning, so we do
6699 // not need to do anything special for this here.
6700 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006701 new (GetScopedAllocator()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006702 AddSlowPath(slow_path);
6703
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006704 __ B(slow_path->GetEntryLabel());
6705 __ Bind(slow_path->GetExitLabel());
6706}
6707
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006708void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
6709 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006710 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006711 locations->SetInAt(0, Location::RequiresRegister());
6712 locations->SetOut(Location::RequiresRegister());
6713}
6714
6715void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
6716 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00006717 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006718 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006719 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006720 __ Ldr(XRegisterFrom(locations->Out()),
6721 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006722 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006723 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006724 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006725 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
6726 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006727 __ Ldr(XRegisterFrom(locations->Out()),
6728 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006729 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006730}
6731
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006732static void PatchJitRootUse(uint8_t* code,
6733 const uint8_t* roots_data,
6734 vixl::aarch64::Literal<uint32_t>* literal,
6735 uint64_t index_in_table) {
6736 uint32_t literal_offset = literal->GetOffset();
6737 uintptr_t address =
6738 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
6739 uint8_t* data = code + literal_offset;
6740 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
6741}
6742
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006743void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
6744 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006745 const StringReference& string_reference = entry.first;
6746 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006747 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006748 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006749 }
6750 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006751 const TypeReference& type_reference = entry.first;
6752 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006753 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006754 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006755 }
6756}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006757
Alexandre Rames67555f72014-11-18 10:55:16 +00006758#undef __
6759#undef QUICK_ENTRY_POINT
6760
Vladimir Markoca1e0382018-04-11 09:58:41 +00006761#define __ assembler.GetVIXLAssembler()->
6762
6763static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler,
6764 vixl::aarch64::Register base_reg,
6765 vixl::aarch64::MemOperand& lock_word,
Vladimir Marko7a695052018-04-12 10:26:50 +01006766 vixl::aarch64::Label* slow_path,
6767 vixl::aarch64::Label* throw_npe = nullptr) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006768 // Load the lock word containing the rb_state.
6769 __ Ldr(ip0.W(), lock_word);
6770 // Given the numeric representation, it's enough to check the low bit of the rb_state.
6771 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6772 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6773 __ Tbnz(ip0.W(), LockWord::kReadBarrierStateShift, slow_path);
6774 static_assert(
6775 BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET,
6776 "Field and array LDR offsets must be the same to reuse the same code.");
Vladimir Marko7a695052018-04-12 10:26:50 +01006777 // To throw NPE, we return to the fast path; the artificial dependence below does not matter.
6778 if (throw_npe != nullptr) {
6779 __ Bind(throw_npe);
6780 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006781 // Adjust the return address back to the LDR (1 instruction; 2 for heap poisoning).
6782 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6783 "Field LDR must be 1 instruction (4B) before the return address label; "
6784 " 2 instructions (8B) for heap poisoning.");
6785 __ Add(lr, lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6786 // Introduce a dependency on the lock_word including rb_state,
6787 // to prevent load-load reordering, and without using
6788 // a memory barrier (which would be more expensive).
6789 __ Add(base_reg, base_reg, Operand(ip0, LSR, 32));
6790 __ Br(lr); // And return back to the function.
6791 // Note: The fake dependency is unnecessary for the slow path.
6792}
6793
6794// Load the read barrier introspection entrypoint in register `entrypoint`.
6795static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler,
6796 vixl::aarch64::Register entrypoint) {
6797 // entrypoint = Thread::Current()->pReadBarrierMarkReg16, i.e. pReadBarrierMarkIntrospection.
6798 DCHECK_EQ(ip0.GetCode(), 16u);
6799 const int32_t entry_point_offset =
6800 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ip0.GetCode());
6801 __ Ldr(entrypoint, MemOperand(tr, entry_point_offset));
6802}
6803
6804void CodeGeneratorARM64::CompileBakerReadBarrierThunk(Arm64Assembler& assembler,
6805 uint32_t encoded_data,
6806 /*out*/ std::string* debug_name) {
6807 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
6808 switch (kind) {
6809 case BakerReadBarrierKind::kField: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006810 auto base_reg =
6811 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6812 CheckValidReg(base_reg.GetCode());
6813 auto holder_reg =
6814 Register::GetXRegFromCode(BakerReadBarrierSecondRegField::Decode(encoded_data));
6815 CheckValidReg(holder_reg.GetCode());
6816 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6817 temps.Exclude(ip0, ip1);
Vladimir Marko7a695052018-04-12 10:26:50 +01006818 // If base_reg differs from holder_reg, the offset was too large and we must have emitted
6819 // an explicit null check before the load. Otherwise, for implicit null checks, we need to
6820 // null-check the holder as we do not necessarily do that check before going to the thunk.
6821 vixl::aarch64::Label throw_npe_label;
6822 vixl::aarch64::Label* throw_npe = nullptr;
6823 if (GetCompilerOptions().GetImplicitNullChecks() && holder_reg.Is(base_reg)) {
6824 throw_npe = &throw_npe_label;
6825 __ Cbz(holder_reg.W(), throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006826 }
Vladimir Marko7a695052018-04-12 10:26:50 +01006827 // Check if the holder is gray and, if not, add fake dependency to the base register
6828 // and return to the LDR instruction to load the reference. Otherwise, use introspection
6829 // to load the reference and call the entrypoint that performs further checks on the
6830 // reference and marks it if needed.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006831 vixl::aarch64::Label slow_path;
6832 MemOperand lock_word(holder_reg, mirror::Object::MonitorOffset().Int32Value());
Vladimir Marko7a695052018-04-12 10:26:50 +01006833 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006834 __ Bind(&slow_path);
6835 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6836 __ Ldr(ip0.W(), ldr_address); // Load the LDR (immediate) unsigned offset.
6837 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6838 __ Ubfx(ip0.W(), ip0.W(), 10, 12); // Extract the offset.
6839 __ Ldr(ip0.W(), MemOperand(base_reg, ip0, LSL, 2)); // Load the reference.
6840 // Do not unpoison. With heap poisoning enabled, the entrypoint expects a poisoned reference.
6841 __ Br(ip1); // Jump to the entrypoint.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006842 break;
6843 }
6844 case BakerReadBarrierKind::kArray: {
6845 auto base_reg =
6846 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6847 CheckValidReg(base_reg.GetCode());
6848 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6849 BakerReadBarrierSecondRegField::Decode(encoded_data));
6850 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6851 temps.Exclude(ip0, ip1);
6852 vixl::aarch64::Label slow_path;
6853 int32_t data_offset =
6854 mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
6855 MemOperand lock_word(base_reg, mirror::Object::MonitorOffset().Int32Value() - data_offset);
6856 DCHECK_LT(lock_word.GetOffset(), 0);
6857 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path);
6858 __ Bind(&slow_path);
6859 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET);
6860 __ Ldr(ip0.W(), ldr_address); // Load the LDR (register) unsigned offset.
6861 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6862 __ Ubfx(ip0, ip0, 16, 6); // Extract the index register, plus 32 (bit 21 is set).
6863 __ Bfi(ip1, ip0, 3, 6); // Insert ip0 to the entrypoint address to create
6864 // a switch case target based on the index register.
6865 __ Mov(ip0, base_reg); // Move the base register to ip0.
6866 __ Br(ip1); // Jump to the entrypoint's array switch case.
6867 break;
6868 }
6869 case BakerReadBarrierKind::kGcRoot: {
6870 // Check if the reference needs to be marked and if so (i.e. not null, not marked yet
6871 // and it does not have a forwarding address), call the correct introspection entrypoint;
6872 // otherwise return the reference (or the extracted forwarding address).
6873 // There is no gray bit check for GC roots.
6874 auto root_reg =
6875 Register::GetWRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6876 CheckValidReg(root_reg.GetCode());
6877 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6878 BakerReadBarrierSecondRegField::Decode(encoded_data));
6879 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6880 temps.Exclude(ip0, ip1);
6881 vixl::aarch64::Label return_label, not_marked, forwarding_address;
6882 __ Cbz(root_reg, &return_label);
6883 MemOperand lock_word(root_reg.X(), mirror::Object::MonitorOffset().Int32Value());
6884 __ Ldr(ip0.W(), lock_word);
6885 __ Tbz(ip0.W(), LockWord::kMarkBitStateShift, &not_marked);
6886 __ Bind(&return_label);
6887 __ Br(lr);
6888 __ Bind(&not_marked);
6889 __ Tst(ip0.W(), Operand(ip0.W(), LSL, 1));
6890 __ B(&forwarding_address, mi);
6891 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6892 // Adjust the art_quick_read_barrier_mark_introspection address in IP1 to
6893 // art_quick_read_barrier_mark_introspection_gc_roots.
6894 __ Add(ip1, ip1, Operand(BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRYPOINT_OFFSET));
6895 __ Mov(ip0.W(), root_reg);
6896 __ Br(ip1);
6897 __ Bind(&forwarding_address);
6898 __ Lsl(root_reg, ip0.W(), LockWord::kForwardingAddressShift);
6899 __ Br(lr);
6900 break;
6901 }
6902 default:
6903 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
6904 UNREACHABLE();
6905 }
6906
Vladimir Marko966b46f2018-08-03 10:20:19 +00006907 // For JIT, the slow path is considered part of the compiled method,
6908 // so JIT should pass null as `debug_name`. Tests may not have a runtime.
6909 DCHECK(Runtime::Current() == nullptr ||
6910 !Runtime::Current()->UseJitCompilation() ||
6911 debug_name == nullptr);
6912 if (debug_name != nullptr && GetCompilerOptions().GenerateAnyDebugInfo()) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006913 std::ostringstream oss;
6914 oss << "BakerReadBarrierThunk";
6915 switch (kind) {
6916 case BakerReadBarrierKind::kField:
6917 oss << "Field_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
6918 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
6919 break;
6920 case BakerReadBarrierKind::kArray:
6921 oss << "Array_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
6922 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6923 BakerReadBarrierSecondRegField::Decode(encoded_data));
6924 break;
6925 case BakerReadBarrierKind::kGcRoot:
6926 oss << "GcRoot_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
6927 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6928 BakerReadBarrierSecondRegField::Decode(encoded_data));
6929 break;
6930 }
6931 *debug_name = oss.str();
6932 }
6933}
6934
6935#undef __
6936
Alexandre Rames5319def2014-10-23 10:03:10 +01006937} // namespace arm64
6938} // namespace art