blob: 7aaa7bf65e443f664dce3c713728592cab592da1 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Vladimir Markof4f2daa2017-03-20 18:26:59 +000019#include "arch/arm64/asm_support_arm64.h"
Serban Constantinescu579885a2015-02-22 20:51:33 +000020#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070021#include "art_method.h"
Andreas Gampe5678db52017-06-08 14:11:18 -070022#include "base/bit_utils.h"
23#include "base/bit_utils_iterator.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010024#include "class_table.h"
Zheng Xuc6667102015-05-15 16:08:45 +080025#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000026#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010027#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080028#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010029#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010030#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070031#include "heap_poisoning.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080032#include "intrinsics.h"
33#include "intrinsics_arm64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010034#include "linker/linker_patch.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070035#include "lock_word.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010036#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070037#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000038#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010039#include "thread.h"
40#include "utils/arm64/assembler_arm64.h"
41#include "utils/assembler.h"
42#include "utils/stack_checks.h"
43
Scott Wakeling97c72b72016-06-24 16:19:36 +010044using namespace vixl::aarch64; // NOLINT(build/namespaces)
Artem Serov914d7a82017-02-07 14:33:49 +000045using vixl::ExactAssemblyScope;
46using vixl::CodeBufferCheckScope;
47using vixl::EmissionCheckScope;
Alexandre Rames5319def2014-10-23 10:03:10 +010048
49#ifdef __
50#error "ARM64 Codegen VIXL macro-assembler macro already defined."
51#endif
52
Alexandre Rames5319def2014-10-23 10:03:10 +010053namespace art {
54
Roland Levillain22ccc3a2015-11-24 13:10:05 +000055template<class MirrorType>
56class GcRoot;
57
Alexandre Rames5319def2014-10-23 10:03:10 +010058namespace arm64 {
59
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::ARM64EncodableConstantOrRegister;
61using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080062using helpers::CPURegisterFrom;
63using helpers::DRegisterFrom;
64using helpers::FPRegisterFrom;
65using helpers::HeapOperand;
66using helpers::HeapOperandFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010067using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080068using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080069using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010070using helpers::InputRegisterAt;
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +010071using helpers::Int64FromLocation;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010072using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080073using helpers::LocationFrom;
74using helpers::OperandFromMemOperand;
75using helpers::OutputCPURegister;
76using helpers::OutputFPRegister;
77using helpers::OutputRegister;
Artem Serovd4bccf12017-04-03 18:47:32 +010078using helpers::QRegisterFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080079using helpers::RegisterFrom;
80using helpers::StackOperandFrom;
81using helpers::VIXLRegCodeFromART;
82using helpers::WRegisterFrom;
83using helpers::XRegisterFrom;
84
Vladimir Markof3e0ee22015-12-17 15:23:13 +000085// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080086// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
87// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000088static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010089
Vladimir Markof4f2daa2017-03-20 18:26:59 +000090// Reference load (except object array loads) is using LDR Wt, [Xn, #offset] which can handle
91// offset < 16KiB. For offsets >= 16KiB, the load shall be emitted as two or more instructions.
Vladimir Marko008e09f32018-08-06 15:42:43 +010092// For the Baker read barrier implementation using link-time generated thunks we need to split
Vladimir Markof4f2daa2017-03-20 18:26:59 +000093// the offset explicitly.
94constexpr uint32_t kReferenceLoadMinFarOffset = 16 * KB;
95
Vladimir Markof4f2daa2017-03-20 18:26:59 +000096// Some instructions have special requirements for a temporary, for example
97// LoadClass/kBssEntry and LoadString/kBssEntry for Baker read barrier require
98// temp that's not an R0 (to avoid an extra move) and Baker read barrier field
99// loads with large offsets need a fixed register to limit the number of link-time
100// thunks we generate. For these and similar cases, we want to reserve a specific
101// register that's neither callee-save nor an argument register. We choose x15.
102inline Location FixedTempLocation() {
103 return Location::RegisterLocation(x15.GetCode());
104}
105
Alexandre Rames5319def2014-10-23 10:03:10 +0100106inline Condition ARM64Condition(IfCondition cond) {
107 switch (cond) {
108 case kCondEQ: return eq;
109 case kCondNE: return ne;
110 case kCondLT: return lt;
111 case kCondLE: return le;
112 case kCondGT: return gt;
113 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -0700114 case kCondB: return lo;
115 case kCondBE: return ls;
116 case kCondA: return hi;
117 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +0100118 }
Roland Levillain7f63c522015-07-13 15:54:55 +0000119 LOG(FATAL) << "Unreachable";
120 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +0100121}
122
Vladimir Markod6e069b2016-01-18 11:11:01 +0000123inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
124 // The ARM64 condition codes can express all the necessary branches, see the
125 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
126 // There is no dex instruction or HIR that would need the missing conditions
127 // "equal or unordered" or "not equal".
128 switch (cond) {
129 case kCondEQ: return eq;
130 case kCondNE: return ne /* unordered */;
131 case kCondLT: return gt_bias ? cc : lt /* unordered */;
132 case kCondLE: return gt_bias ? ls : le /* unordered */;
133 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
134 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
135 default:
136 LOG(FATAL) << "UNREACHABLE";
137 UNREACHABLE();
138 }
139}
140
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100141Location ARM64ReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000142 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
143 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
144 // but we use the exact registers for clarity.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100145 if (return_type == DataType::Type::kFloat32) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000146 return LocationFrom(s0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100147 } else if (return_type == DataType::Type::kFloat64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000148 return LocationFrom(d0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100149 } else if (return_type == DataType::Type::kInt64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000150 return LocationFrom(x0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100151 } else if (return_type == DataType::Type::kVoid) {
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100152 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000153 } else {
154 return LocationFrom(w0);
155 }
156}
157
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100158Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000159 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100160}
161
Vladimir Marko3232dbb2018-07-25 15:42:46 +0100162static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
163 InvokeRuntimeCallingConvention calling_convention;
164 RegisterSet caller_saves = RegisterSet::Empty();
165 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
166 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
167 RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference),
168 DataType::Type::kReference).GetCode());
169 return caller_saves;
170}
171
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100172// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
173#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700174#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100175
Zheng Xuda403092015-04-24 17:35:39 +0800176// Calculate memory accessing operand for save/restore live registers.
177static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100178 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800179 int64_t spill_offset,
180 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100181 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
182 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
183 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800184 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100185 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800186 codegen->GetNumberOfFloatingPointRegisters()));
187
Vladimir Marko804b03f2016-09-14 16:26:36 +0100188 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
Artem Serov7957d952017-04-04 15:44:09 +0100189 unsigned v_reg_size = codegen->GetGraph()->HasSIMD() ? kQRegSize : kDRegSize;
190 CPURegList fp_list = CPURegList(CPURegister::kVRegister, v_reg_size, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800191
192 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
193 UseScratchRegisterScope temps(masm);
194
195 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100196 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
197 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800198 int64_t reg_size = kXRegSizeInBytes;
199 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
200 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100201 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800202 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
203 // If the offset does not fit in the instruction's immediate field, use an alternate register
204 // to compute the base address(float point registers spill base address).
205 Register new_base = temps.AcquireSameSizeAs(base);
206 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
207 base = new_base;
208 spill_offset = -core_spill_size;
209 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
210 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
211 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
212 }
213
214 if (is_save) {
215 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
216 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
217 } else {
218 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
219 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
220 }
221}
222
223void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800224 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100225 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
226 for (uint32_t i : LowToHighBits(core_spills)) {
227 // If the register holds an object, update the stack mask.
228 if (locations->RegisterContainsObject(i)) {
229 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800230 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100231 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
232 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
233 saved_core_stack_offsets_[i] = stack_offset;
234 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800235 }
236
Vladimir Marko804b03f2016-09-14 16:26:36 +0100237 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
238 for (uint32_t i : LowToHighBits(fp_spills)) {
239 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
240 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
241 saved_fpu_stack_offsets_[i] = stack_offset;
242 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800243 }
244
Vladimir Marko804b03f2016-09-14 16:26:36 +0100245 SaveRestoreLiveRegistersHelper(codegen,
246 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800247 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
248}
249
250void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100251 SaveRestoreLiveRegistersHelper(codegen,
252 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800253 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
254}
255
Alexandre Rames5319def2014-10-23 10:03:10 +0100256class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
257 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000258 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100259
Alexandre Rames67555f72014-11-18 10:55:16 +0000260 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100261 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000262 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100263
Alexandre Rames5319def2014-10-23 10:03:10 +0100264 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000265 if (instruction_->CanThrowIntoCatchBlock()) {
266 // Live registers will be restored in the catch block if caught.
267 SaveLiveRegisters(codegen, instruction_->GetLocations());
268 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000269 // We're moving two locations to locations that could overlap, so we need a parallel
270 // move resolver.
271 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100272 codegen->EmitParallelMoves(locations->InAt(0),
273 LocationFrom(calling_convention.GetRegisterAt(0)),
274 DataType::Type::kInt32,
275 locations->InAt(1),
276 LocationFrom(calling_convention.GetRegisterAt(1)),
277 DataType::Type::kInt32);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000278 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
279 ? kQuickThrowStringBounds
280 : kQuickThrowArrayBounds;
281 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100282 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800283 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100284 }
285
Alexandre Rames8158f282015-08-07 10:26:17 +0100286 bool IsFatal() const OVERRIDE { return true; }
287
Alexandre Rames9931f312015-06-19 14:47:01 +0100288 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
289
Alexandre Rames5319def2014-10-23 10:03:10 +0100290 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100291 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
292};
293
Alexandre Rames67555f72014-11-18 10:55:16 +0000294class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
295 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000296 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000297
298 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
299 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
300 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000301 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800302 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000303 }
304
Alexandre Rames8158f282015-08-07 10:26:17 +0100305 bool IsFatal() const OVERRIDE { return true; }
306
Alexandre Rames9931f312015-06-19 14:47:01 +0100307 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
308
Alexandre Rames67555f72014-11-18 10:55:16 +0000309 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000310 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
311};
312
313class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
314 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100315 LoadClassSlowPathARM64(HLoadClass* cls, HInstruction* at)
316 : SlowPathCodeARM64(at), cls_(cls) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000317 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100318 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Alexandre Rames67555f72014-11-18 10:55:16 +0000319 }
320
321 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000322 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000323 Location out = locations->Out();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100324 const uint32_t dex_pc = instruction_->GetDexPc();
325 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
326 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
Alexandre Rames67555f72014-11-18 10:55:16 +0000327
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100328 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames67555f72014-11-18 10:55:16 +0000329 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000330 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000331
Vladimir Markof3c52b42017-11-17 17:32:12 +0000332 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100333 if (must_resolve_type) {
334 DCHECK(IsSameDexFile(cls_->GetDexFile(), arm64_codegen->GetGraph()->GetDexFile()));
335 dex::TypeIndex type_index = cls_->GetTypeIndex();
336 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Vladimir Marko9d479252018-07-24 11:35:20 +0100337 arm64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
338 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100339 // If we also must_do_clinit, the resolved type is now in the correct register.
340 } else {
341 DCHECK(must_do_clinit);
342 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
343 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)),
344 source,
345 cls_->GetType());
346 }
347 if (must_do_clinit) {
348 arm64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
349 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800350 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000351
352 // Move the class to the desired location.
Alexandre Rames67555f72014-11-18 10:55:16 +0000353 if (out.IsValid()) {
354 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100355 DataType::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000356 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000357 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000358 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000359 __ B(GetExitLabel());
360 }
361
Alexandre Rames9931f312015-06-19 14:47:01 +0100362 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
363
Alexandre Rames67555f72014-11-18 10:55:16 +0000364 private:
365 // The class this slow path will load.
366 HLoadClass* const cls_;
367
Alexandre Rames67555f72014-11-18 10:55:16 +0000368 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
369};
370
Vladimir Markoaad75c62016-10-03 08:46:48 +0000371class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
372 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000373 explicit LoadStringSlowPathARM64(HLoadString* instruction)
374 : SlowPathCodeARM64(instruction) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000375
376 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
377 LocationSummary* locations = instruction_->GetLocations();
378 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
379 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
380
381 __ Bind(GetEntryLabel());
382 SaveLiveRegisters(codegen, locations);
383
Vladimir Markof3c52b42017-11-17 17:32:12 +0000384 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000385 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
386 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000387 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
388 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100389 DataType::Type type = instruction_->GetType();
Vladimir Markoaad75c62016-10-03 08:46:48 +0000390 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
391
392 RestoreLiveRegisters(codegen, locations);
393
Vladimir Markoaad75c62016-10-03 08:46:48 +0000394 __ B(GetExitLabel());
395 }
396
397 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
398
399 private:
400 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
401};
402
Alexandre Rames5319def2014-10-23 10:03:10 +0100403class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
404 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000405 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100406
Alexandre Rames67555f72014-11-18 10:55:16 +0000407 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
408 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100409 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000410 if (instruction_->CanThrowIntoCatchBlock()) {
411 // Live registers will be restored in the catch block if caught.
412 SaveLiveRegisters(codegen, instruction_->GetLocations());
413 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000414 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
415 instruction_,
416 instruction_->GetDexPc(),
417 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800418 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100419 }
420
Alexandre Rames8158f282015-08-07 10:26:17 +0100421 bool IsFatal() const OVERRIDE { return true; }
422
Alexandre Rames9931f312015-06-19 14:47:01 +0100423 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
424
Alexandre Rames5319def2014-10-23 10:03:10 +0100425 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100426 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
427};
428
429class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
430 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100431 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000432 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100433
Alexandre Rames67555f72014-11-18 10:55:16 +0000434 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Artem Serov7957d952017-04-04 15:44:09 +0100435 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +0000436 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100437 __ Bind(GetEntryLabel());
Artem Serov7957d952017-04-04 15:44:09 +0100438 SaveLiveRegisters(codegen, locations); // Only saves live 128-bit regs for SIMD.
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000439 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800440 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Artem Serov7957d952017-04-04 15:44:09 +0100441 RestoreLiveRegisters(codegen, locations); // Only restores live 128-bit regs for SIMD.
Alexandre Rames67555f72014-11-18 10:55:16 +0000442 if (successor_ == nullptr) {
443 __ B(GetReturnLabel());
444 } else {
445 __ B(arm64_codegen->GetLabelOf(successor_));
446 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100447 }
448
Scott Wakeling97c72b72016-06-24 16:19:36 +0100449 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100450 DCHECK(successor_ == nullptr);
451 return &return_label_;
452 }
453
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100454 HBasicBlock* GetSuccessor() const {
455 return successor_;
456 }
457
Alexandre Rames9931f312015-06-19 14:47:01 +0100458 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
459
Alexandre Rames5319def2014-10-23 10:03:10 +0100460 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100461 // If not null, the block to branch to after the suspend check.
462 HBasicBlock* const successor_;
463
464 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100465 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100466
467 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
468};
469
Alexandre Rames67555f72014-11-18 10:55:16 +0000470class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
471 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000472 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000473 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000474
475 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000476 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800477
Alexandre Rames3e69f162014-12-10 10:36:50 +0000478 DCHECK(instruction_->IsCheckCast()
479 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
480 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100481 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000482
Alexandre Rames67555f72014-11-18 10:55:16 +0000483 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000484
Vladimir Marko87584542017-12-12 17:47:52 +0000485 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000486 SaveLiveRegisters(codegen, locations);
487 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000488
489 // We're moving two locations to locations that could overlap, so we need a parallel
490 // move resolver.
491 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800492 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800493 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100494 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800495 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800496 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100497 DataType::Type::kReference);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000498 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000499 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800500 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100501 DataType::Type ret_type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000502 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
503 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
504 } else {
505 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800506 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
507 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000508 }
509
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000510 if (!is_fatal_) {
511 RestoreLiveRegisters(codegen, locations);
512 __ B(GetExitLabel());
513 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000514 }
515
Alexandre Rames9931f312015-06-19 14:47:01 +0100516 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100517 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100518
Alexandre Rames67555f72014-11-18 10:55:16 +0000519 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000520 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000521
Alexandre Rames67555f72014-11-18 10:55:16 +0000522 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
523};
524
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700525class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
526 public:
Aart Bik42249c32016-01-07 15:33:50 -0800527 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000528 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700529
530 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800531 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700532 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100533 LocationSummary* locations = instruction_->GetLocations();
534 SaveLiveRegisters(codegen, locations);
535 InvokeRuntimeCallingConvention calling_convention;
536 __ Mov(calling_convention.GetRegisterAt(0),
537 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000538 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100539 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700540 }
541
Alexandre Rames9931f312015-06-19 14:47:01 +0100542 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
543
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700544 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700545 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
546};
547
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100548class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
549 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000550 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100551
552 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
553 LocationSummary* locations = instruction_->GetLocations();
554 __ Bind(GetEntryLabel());
555 SaveLiveRegisters(codegen, locations);
556
557 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100558 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100559 parallel_move.AddMove(
560 locations->InAt(0),
561 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100562 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100563 nullptr);
564 parallel_move.AddMove(
565 locations->InAt(1),
566 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100567 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100568 nullptr);
569 parallel_move.AddMove(
570 locations->InAt(2),
571 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100572 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100573 nullptr);
574 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
575
576 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000577 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100578 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
579 RestoreLiveRegisters(codegen, locations);
580 __ B(GetExitLabel());
581 }
582
583 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
584
585 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100586 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
587};
588
Zheng Xu3927c8b2015-11-18 17:46:25 +0800589void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
590 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000591 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800592
593 // We are about to use the assembler to place literals directly. Make sure we have enough
594 // underlying code buffer and we have generated the jump table with right size.
Artem Serov914d7a82017-02-07 14:33:49 +0000595 EmissionCheckScope scope(codegen->GetVIXLAssembler(),
596 num_entries * sizeof(int32_t),
597 CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800598
599 __ Bind(&table_start_);
600 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
601 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100602 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800603 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100604 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800605 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
606 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
607 Literal<int32_t> literal(jump_offset);
608 __ place(&literal);
609 }
610}
611
Roland Levillain54f869e2017-03-06 13:54:11 +0000612// Abstract base class for read barrier slow paths marking a reference
613// `ref`.
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000614//
Roland Levillain54f869e2017-03-06 13:54:11 +0000615// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100616// barrier marking runtime entry point to be invoked or an empty
617// location; in the latter case, the read barrier marking runtime
618// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000619class ReadBarrierMarkSlowPathBaseARM64 : public SlowPathCodeARM64 {
620 protected:
621 ReadBarrierMarkSlowPathBaseARM64(HInstruction* instruction, Location ref, Location entrypoint)
622 : SlowPathCodeARM64(instruction), ref_(ref), entrypoint_(entrypoint) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000623 DCHECK(kEmitCompilerReadBarrier);
624 }
625
Roland Levillain54f869e2017-03-06 13:54:11 +0000626 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathBaseARM64"; }
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000627
Roland Levillain54f869e2017-03-06 13:54:11 +0000628 // Generate assembly code calling the read barrier marking runtime
629 // entry point (ReadBarrierMarkRegX).
630 void GenerateReadBarrierMarkRuntimeCall(CodeGenerator* codegen) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000631 // No need to save live registers; it's taken care of by the
632 // entrypoint. Also, there is no need to update the stack mask,
633 // as this runtime call will not trigger a garbage collection.
634 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
635 DCHECK_NE(ref_.reg(), LR);
636 DCHECK_NE(ref_.reg(), WSP);
637 DCHECK_NE(ref_.reg(), WZR);
638 // IP0 is used internally by the ReadBarrierMarkRegX entry point
639 // as a temporary, it cannot be the entry point's input/output.
640 DCHECK_NE(ref_.reg(), IP0);
641 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
642 // "Compact" slow path, saving two moves.
643 //
644 // Instead of using the standard runtime calling convention (input
645 // and output in W0):
646 //
647 // W0 <- ref
648 // W0 <- ReadBarrierMark(W0)
649 // ref <- W0
650 //
651 // we just use rX (the register containing `ref`) as input and output
652 // of a dedicated entrypoint:
653 //
654 // rX <- ReadBarrierMarkRegX(rX)
655 //
656 if (entrypoint_.IsValid()) {
657 arm64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
658 __ Blr(XRegisterFrom(entrypoint_));
659 } else {
660 // Entrypoint is not already loaded, load from the thread.
661 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100662 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000663 // This runtime call does not require a stack map.
664 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
665 }
666 }
667
668 // The location (register) of the marked object reference.
669 const Location ref_;
670
671 // The location of the entrypoint if it is already loaded.
672 const Location entrypoint_;
673
Roland Levillain54f869e2017-03-06 13:54:11 +0000674 private:
675 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathBaseARM64);
676};
677
Roland Levillain54f869e2017-03-06 13:54:11 +0000678// Slow path loading `obj`'s lock word, loading a reference from
679// object `*(obj + offset + (index << scale_factor))` into `ref`, and
680// marking `ref` if `obj` is gray according to the lock word (Baker
681// read barrier). The field `obj.field` in the object `obj` holding
682// this reference does not get updated by this slow path after marking
683// (see LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64
684// below for that).
685//
686// This means that after the execution of this slow path, `ref` will
687// always be up-to-date, but `obj.field` may not; i.e., after the
688// flip, `ref` will be a to-space reference, but `obj.field` will
689// probably still be a from-space reference (unless it gets updated by
690// another thread, or if another thread installed another object
691// reference (different from `ref`) in `obj.field`).
692//
693// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100694// barrier marking runtime entry point to be invoked or an empty
695// location; in the latter case, the read barrier marking runtime
696// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000697class LoadReferenceWithBakerReadBarrierSlowPathARM64 : public ReadBarrierMarkSlowPathBaseARM64 {
698 public:
699 LoadReferenceWithBakerReadBarrierSlowPathARM64(HInstruction* instruction,
700 Location ref,
701 Register obj,
702 uint32_t offset,
703 Location index,
704 size_t scale_factor,
705 bool needs_null_check,
706 bool use_load_acquire,
707 Register temp,
Roland Levillain97c46462017-05-11 14:04:03 +0100708 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000709 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint),
710 obj_(obj),
711 offset_(offset),
712 index_(index),
713 scale_factor_(scale_factor),
714 needs_null_check_(needs_null_check),
715 use_load_acquire_(use_load_acquire),
716 temp_(temp) {
717 DCHECK(kEmitCompilerReadBarrier);
718 DCHECK(kUseBakerReadBarrier);
719 }
720
721 const char* GetDescription() const OVERRIDE {
722 return "LoadReferenceWithBakerReadBarrierSlowPathARM64";
723 }
724
725 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
726 LocationSummary* locations = instruction_->GetLocations();
727 DCHECK(locations->CanCall());
728 DCHECK(ref_.IsRegister()) << ref_;
729 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
730 DCHECK(obj_.IsW());
731 DCHECK_NE(ref_.reg(), LocationFrom(temp_).reg());
Alexandre Rames5319def2014-10-23 10:03:10 +0100732 DCHECK(instruction_->IsInstanceFieldGet() ||
733 instruction_->IsStaticFieldGet() ||
734 instruction_->IsArrayGet() ||
735 instruction_->IsArraySet() ||
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000736 instruction_->IsInstanceOf() ||
737 instruction_->IsCheckCast() ||
738 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
739 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
740 << "Unexpected instruction in read barrier marking slow path: "
741 << instruction_->DebugName();
742 // The read barrier instrumentation of object ArrayGet
743 // instructions does not support the HIntermediateAddress
744 // instruction.
745 DCHECK(!(instruction_->IsArrayGet() &&
746 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
747
Roland Levillain54f869e2017-03-06 13:54:11 +0000748 // Temporary register `temp_`, used to store the lock word, must
749 // not be IP0 nor IP1, as we may use them to emit the reference
750 // load (in the call to GenerateRawReferenceLoad below), and we
751 // need the lock word to still be in `temp_` after the reference
752 // load.
753 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
754 DCHECK_NE(LocationFrom(temp_).reg(), IP1);
755
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000756 __ Bind(GetEntryLabel());
Roland Levillain54f869e2017-03-06 13:54:11 +0000757
758 // When using MaybeGenerateReadBarrierSlow, the read barrier call is
759 // inserted after the original load. However, in fast path based
760 // Baker's read barriers, we need to perform the load of
761 // mirror::Object::monitor_ *before* the original reference load.
762 // This load-load ordering is required by the read barrier.
Roland Levillainff487002017-03-07 16:50:01 +0000763 // The slow path (for Baker's algorithm) should look like:
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000764 //
Roland Levillain54f869e2017-03-06 13:54:11 +0000765 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
766 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
767 // HeapReference<mirror::Object> ref = *src; // Original reference load.
768 // bool is_gray = (rb_state == ReadBarrier::GrayState());
769 // if (is_gray) {
770 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
771 // }
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000772 //
Roland Levillain54f869e2017-03-06 13:54:11 +0000773 // Note: the original implementation in ReadBarrier::Barrier is
774 // slightly more complex as it performs additional checks that we do
775 // not do here for performance reasons.
776
777 // /* int32_t */ monitor = obj->monitor_
778 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
779 __ Ldr(temp_, HeapOperand(obj_, monitor_offset));
780 if (needs_null_check_) {
781 codegen->MaybeRecordImplicitNullCheck(instruction_);
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000782 }
Roland Levillain54f869e2017-03-06 13:54:11 +0000783 // /* LockWord */ lock_word = LockWord(monitor)
784 static_assert(sizeof(LockWord) == sizeof(int32_t),
785 "art::LockWord and int32_t have different sizes.");
786
787 // Introduce a dependency on the lock_word including rb_state,
788 // to prevent load-load reordering, and without using
789 // a memory barrier (which would be more expensive).
790 // `obj` is unchanged by this operation, but its value now depends
791 // on `temp`.
792 __ Add(obj_.X(), obj_.X(), Operand(temp_.X(), LSR, 32));
793
794 // The actual reference load.
795 // A possible implicit null check has already been handled above.
796 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
797 arm64_codegen->GenerateRawReferenceLoad(instruction_,
798 ref_,
799 obj_,
800 offset_,
801 index_,
802 scale_factor_,
803 /* needs_null_check */ false,
804 use_load_acquire_);
805
806 // Mark the object `ref` when `obj` is gray.
807 //
808 // if (rb_state == ReadBarrier::GrayState())
809 // ref = ReadBarrier::Mark(ref);
810 //
811 // Given the numeric representation, it's enough to check the low bit of the rb_state.
812 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
813 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
814 __ Tbz(temp_, LockWord::kReadBarrierStateShift, GetExitLabel());
815 GenerateReadBarrierMarkRuntimeCall(codegen);
816
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000817 __ B(GetExitLabel());
818 }
819
820 private:
Roland Levillain54f869e2017-03-06 13:54:11 +0000821 // The register containing the object holding the marked object reference field.
822 Register obj_;
823 // The offset, index and scale factor to access the reference in `obj_`.
824 uint32_t offset_;
825 Location index_;
826 size_t scale_factor_;
827 // Is a null check required?
828 bool needs_null_check_;
829 // Should this reference load use Load-Acquire semantics?
830 bool use_load_acquire_;
831 // A temporary register used to hold the lock word of `obj_`.
832 Register temp_;
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000833
Roland Levillain54f869e2017-03-06 13:54:11 +0000834 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierSlowPathARM64);
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000835};
836
Roland Levillain54f869e2017-03-06 13:54:11 +0000837// Slow path loading `obj`'s lock word, loading a reference from
838// object `*(obj + offset + (index << scale_factor))` into `ref`, and
839// marking `ref` if `obj` is gray according to the lock word (Baker
840// read barrier). If needed, this slow path also atomically updates
841// the field `obj.field` in the object `obj` holding this reference
842// after marking (contrary to
843// LoadReferenceWithBakerReadBarrierSlowPathARM64 above, which never
844// tries to update `obj.field`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100845//
846// This means that after the execution of this slow path, both `ref`
847// and `obj.field` will be up-to-date; i.e., after the flip, both will
848// hold the same to-space reference (unless another thread installed
849// another object reference (different from `ref`) in `obj.field`).
Roland Levillainba650a42017-03-06 13:52:32 +0000850//
Roland Levillain54f869e2017-03-06 13:54:11 +0000851// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100852// barrier marking runtime entry point to be invoked or an empty
853// location; in the latter case, the read barrier marking runtime
854// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000855class LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64
856 : public ReadBarrierMarkSlowPathBaseARM64 {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100857 public:
Roland Levillain97c46462017-05-11 14:04:03 +0100858 LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64(
859 HInstruction* instruction,
860 Location ref,
861 Register obj,
862 uint32_t offset,
863 Location index,
864 size_t scale_factor,
865 bool needs_null_check,
866 bool use_load_acquire,
867 Register temp,
868 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000869 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint),
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100870 obj_(obj),
Roland Levillain54f869e2017-03-06 13:54:11 +0000871 offset_(offset),
872 index_(index),
873 scale_factor_(scale_factor),
874 needs_null_check_(needs_null_check),
875 use_load_acquire_(use_load_acquire),
Roland Levillain35345a52017-02-27 14:32:08 +0000876 temp_(temp) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100877 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain54f869e2017-03-06 13:54:11 +0000878 DCHECK(kUseBakerReadBarrier);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100879 }
880
881 const char* GetDescription() const OVERRIDE {
Roland Levillain54f869e2017-03-06 13:54:11 +0000882 return "LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64";
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100883 }
884
885 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
886 LocationSummary* locations = instruction_->GetLocations();
887 Register ref_reg = WRegisterFrom(ref_);
888 DCHECK(locations->CanCall());
889 DCHECK(ref_.IsRegister()) << ref_;
890 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain54f869e2017-03-06 13:54:11 +0000891 DCHECK(obj_.IsW());
892 DCHECK_NE(ref_.reg(), LocationFrom(temp_).reg());
893
894 // This slow path is only used by the UnsafeCASObject intrinsic at the moment.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100895 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
896 << "Unexpected instruction in read barrier marking and field updating slow path: "
897 << instruction_->DebugName();
898 DCHECK(instruction_->GetLocations()->Intrinsified());
899 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
Roland Levillain54f869e2017-03-06 13:54:11 +0000900 DCHECK_EQ(offset_, 0u);
901 DCHECK_EQ(scale_factor_, 0u);
902 DCHECK_EQ(use_load_acquire_, false);
903 // The location of the offset of the marked reference field within `obj_`.
904 Location field_offset = index_;
905 DCHECK(field_offset.IsRegister()) << field_offset;
906
907 // Temporary register `temp_`, used to store the lock word, must
908 // not be IP0 nor IP1, as we may use them to emit the reference
909 // load (in the call to GenerateRawReferenceLoad below), and we
910 // need the lock word to still be in `temp_` after the reference
911 // load.
912 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
913 DCHECK_NE(LocationFrom(temp_).reg(), IP1);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100914
915 __ Bind(GetEntryLabel());
916
Roland Levillainff487002017-03-07 16:50:01 +0000917 // The implementation is similar to LoadReferenceWithBakerReadBarrierSlowPathARM64's:
918 //
919 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
920 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
921 // HeapReference<mirror::Object> ref = *src; // Original reference load.
922 // bool is_gray = (rb_state == ReadBarrier::GrayState());
923 // if (is_gray) {
924 // old_ref = ref;
925 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
926 // compareAndSwapObject(obj, field_offset, old_ref, ref);
927 // }
928
Roland Levillain54f869e2017-03-06 13:54:11 +0000929 // /* int32_t */ monitor = obj->monitor_
930 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
931 __ Ldr(temp_, HeapOperand(obj_, monitor_offset));
932 if (needs_null_check_) {
933 codegen->MaybeRecordImplicitNullCheck(instruction_);
934 }
935 // /* LockWord */ lock_word = LockWord(monitor)
936 static_assert(sizeof(LockWord) == sizeof(int32_t),
937 "art::LockWord and int32_t have different sizes.");
938
939 // Introduce a dependency on the lock_word including rb_state,
940 // to prevent load-load reordering, and without using
941 // a memory barrier (which would be more expensive).
942 // `obj` is unchanged by this operation, but its value now depends
943 // on `temp`.
944 __ Add(obj_.X(), obj_.X(), Operand(temp_.X(), LSR, 32));
945
946 // The actual reference load.
947 // A possible implicit null check has already been handled above.
948 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
949 arm64_codegen->GenerateRawReferenceLoad(instruction_,
950 ref_,
951 obj_,
952 offset_,
953 index_,
954 scale_factor_,
955 /* needs_null_check */ false,
956 use_load_acquire_);
957
958 // Mark the object `ref` when `obj` is gray.
959 //
960 // if (rb_state == ReadBarrier::GrayState())
961 // ref = ReadBarrier::Mark(ref);
962 //
963 // Given the numeric representation, it's enough to check the low bit of the rb_state.
964 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
965 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
966 __ Tbz(temp_, LockWord::kReadBarrierStateShift, GetExitLabel());
967
968 // Save the old value of the reference before marking it.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100969 // Note that we cannot use IP to save the old reference, as IP is
970 // used internally by the ReadBarrierMarkRegX entry point, and we
971 // need the old reference after the call to that entry point.
972 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
973 __ Mov(temp_.W(), ref_reg);
974
Roland Levillain54f869e2017-03-06 13:54:11 +0000975 GenerateReadBarrierMarkRuntimeCall(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100976
977 // If the new reference is different from the old reference,
Roland Levillain54f869e2017-03-06 13:54:11 +0000978 // update the field in the holder (`*(obj_ + field_offset)`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100979 //
980 // Note that this field could also hold a different object, if
981 // another thread had concurrently changed it. In that case, the
982 // LDXR/CMP/BNE sequence of instructions in the compare-and-set
983 // (CAS) operation below would abort the CAS, leaving the field
984 // as-is.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100985 __ Cmp(temp_.W(), ref_reg);
Roland Levillain54f869e2017-03-06 13:54:11 +0000986 __ B(eq, GetExitLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100987
988 // Update the the holder's field atomically. This may fail if
989 // mutator updates before us, but it's OK. This is achieved
990 // using a strong compare-and-set (CAS) operation with relaxed
991 // memory synchronization ordering, where the expected value is
992 // the old reference and the desired value is the new reference.
993
994 MacroAssembler* masm = arm64_codegen->GetVIXLAssembler();
995 UseScratchRegisterScope temps(masm);
996
997 // Convenience aliases.
998 Register base = obj_.W();
Roland Levillain54f869e2017-03-06 13:54:11 +0000999 Register offset = XRegisterFrom(field_offset);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001000 Register expected = temp_.W();
1001 Register value = ref_reg;
1002 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1003 Register tmp_value = temps.AcquireW(); // Value in memory.
1004
1005 __ Add(tmp_ptr, base.X(), Operand(offset));
1006
1007 if (kPoisonHeapReferences) {
1008 arm64_codegen->GetAssembler()->PoisonHeapReference(expected);
1009 if (value.Is(expected)) {
1010 // Do not poison `value`, as it is the same register as
1011 // `expected`, which has just been poisoned.
1012 } else {
1013 arm64_codegen->GetAssembler()->PoisonHeapReference(value);
1014 }
1015 }
1016
1017 // do {
1018 // tmp_value = [tmp_ptr] - expected;
1019 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1020
Roland Levillain24a4d112016-10-26 13:10:46 +01001021 vixl::aarch64::Label loop_head, comparison_failed, exit_loop;
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001022 __ Bind(&loop_head);
1023 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
1024 __ Cmp(tmp_value, expected);
Roland Levillain24a4d112016-10-26 13:10:46 +01001025 __ B(&comparison_failed, ne);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001026 __ Stxr(tmp_value, value, MemOperand(tmp_ptr));
1027 __ Cbnz(tmp_value, &loop_head);
Roland Levillain24a4d112016-10-26 13:10:46 +01001028 __ B(&exit_loop);
1029 __ Bind(&comparison_failed);
1030 __ Clrex();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001031 __ Bind(&exit_loop);
1032
1033 if (kPoisonHeapReferences) {
1034 arm64_codegen->GetAssembler()->UnpoisonHeapReference(expected);
1035 if (value.Is(expected)) {
1036 // Do not unpoison `value`, as it is the same register as
1037 // `expected`, which has just been unpoisoned.
1038 } else {
1039 arm64_codegen->GetAssembler()->UnpoisonHeapReference(value);
1040 }
1041 }
1042
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001043 __ B(GetExitLabel());
1044 }
1045
1046 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001047 // The register containing the object holding the marked object reference field.
1048 const Register obj_;
Roland Levillain54f869e2017-03-06 13:54:11 +00001049 // The offset, index and scale factor to access the reference in `obj_`.
1050 uint32_t offset_;
1051 Location index_;
1052 size_t scale_factor_;
1053 // Is a null check required?
1054 bool needs_null_check_;
1055 // Should this reference load use Load-Acquire semantics?
1056 bool use_load_acquire_;
1057 // A temporary register used to hold the lock word of `obj_`; and
1058 // also to hold the original reference value, when the reference is
1059 // marked.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001060 const Register temp_;
1061
Roland Levillain54f869e2017-03-06 13:54:11 +00001062 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001063};
1064
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001065// Slow path generating a read barrier for a heap reference.
1066class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
1067 public:
1068 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
1069 Location out,
1070 Location ref,
1071 Location obj,
1072 uint32_t offset,
1073 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +00001074 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001075 out_(out),
1076 ref_(ref),
1077 obj_(obj),
1078 offset_(offset),
1079 index_(index) {
1080 DCHECK(kEmitCompilerReadBarrier);
1081 // If `obj` is equal to `out` or `ref`, it means the initial object
1082 // has been overwritten by (or after) the heap object reference load
1083 // to be instrumented, e.g.:
1084 //
1085 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +00001086 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001087 //
1088 // In that case, we have lost the information about the original
1089 // object, and the emitted read barrier cannot work properly.
1090 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
1091 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
1092 }
1093
1094 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1095 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1096 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001097 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001098 DCHECK(locations->CanCall());
1099 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +01001100 DCHECK(instruction_->IsInstanceFieldGet() ||
1101 instruction_->IsStaticFieldGet() ||
1102 instruction_->IsArrayGet() ||
1103 instruction_->IsInstanceOf() ||
1104 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -07001105 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +00001106 << "Unexpected instruction in read barrier for heap reference slow path: "
1107 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +00001108 // The read barrier instrumentation of object ArrayGet
1109 // instructions does not support the HIntermediateAddress
1110 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001111 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +01001112 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001113
1114 __ Bind(GetEntryLabel());
1115
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001116 SaveLiveRegisters(codegen, locations);
1117
1118 // We may have to change the index's value, but as `index_` is a
1119 // constant member (like other "inputs" of this slow path),
1120 // introduce a copy of it, `index`.
1121 Location index = index_;
1122 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +01001123 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001124 if (instruction_->IsArrayGet()) {
1125 // Compute the actual memory offset and store it in `index`.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001126 Register index_reg = RegisterFrom(index_, DataType::Type::kInt32);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001127 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
1128 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
1129 // We are about to change the value of `index_reg` (see the
1130 // calls to vixl::MacroAssembler::Lsl and
1131 // vixl::MacroAssembler::Mov below), but it has
1132 // not been saved by the previous call to
1133 // art::SlowPathCode::SaveLiveRegisters, as it is a
1134 // callee-save register --
1135 // art::SlowPathCode::SaveLiveRegisters does not consider
1136 // callee-save registers, as it has been designed with the
1137 // assumption that callee-save registers are supposed to be
1138 // handled by the called function. So, as a callee-save
1139 // register, `index_reg` _would_ eventually be saved onto
1140 // the stack, but it would be too late: we would have
1141 // changed its value earlier. Therefore, we manually save
1142 // it here into another freely available register,
1143 // `free_reg`, chosen of course among the caller-save
1144 // registers (as a callee-save `free_reg` register would
1145 // exhibit the same problem).
1146 //
1147 // Note we could have requested a temporary register from
1148 // the register allocator instead; but we prefer not to, as
1149 // this is a slow path, and we know we can find a
1150 // caller-save register that is available.
1151 Register free_reg = FindAvailableCallerSaveRegister(codegen);
1152 __ Mov(free_reg.W(), index_reg);
1153 index_reg = free_reg;
1154 index = LocationFrom(index_reg);
1155 } else {
1156 // The initial register stored in `index_` has already been
1157 // saved in the call to art::SlowPathCode::SaveLiveRegisters
1158 // (as it is not a callee-save register), so we can freely
1159 // use it.
1160 }
1161 // Shifting the index value contained in `index_reg` by the scale
1162 // factor (2) cannot overflow in practice, as the runtime is
1163 // unable to allocate object arrays with a size larger than
1164 // 2^26 - 1 (that is, 2^28 - 4 bytes).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001165 __ Lsl(index_reg, index_reg, DataType::SizeShift(type));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001166 static_assert(
1167 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
1168 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
1169 __ Add(index_reg, index_reg, Operand(offset_));
1170 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01001171 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
1172 // intrinsics, `index_` is not shifted by a scale factor of 2
1173 // (as in the case of ArrayGet), as it is actually an offset
1174 // to an object field within an object.
1175 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001176 DCHECK(instruction_->GetLocations()->Intrinsified());
1177 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
1178 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
1179 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001180 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +01001181 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001182 }
1183 }
1184
1185 // We're moving two or three locations to locations that could
1186 // overlap, so we need a parallel move resolver.
1187 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +01001188 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001189 parallel_move.AddMove(ref_,
1190 LocationFrom(calling_convention.GetRegisterAt(0)),
1191 type,
1192 nullptr);
1193 parallel_move.AddMove(obj_,
1194 LocationFrom(calling_convention.GetRegisterAt(1)),
1195 type,
1196 nullptr);
1197 if (index.IsValid()) {
1198 parallel_move.AddMove(index,
1199 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001200 DataType::Type::kInt32,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001201 nullptr);
1202 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1203 } else {
1204 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1205 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
1206 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001207 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001208 instruction_,
1209 instruction_->GetDexPc(),
1210 this);
1211 CheckEntrypointTypes<
1212 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
1213 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1214
1215 RestoreLiveRegisters(codegen, locations);
1216
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001217 __ B(GetExitLabel());
1218 }
1219
1220 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
1221
1222 private:
1223 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001224 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
1225 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001226 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1227 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
1228 return Register(VIXLRegCodeFromART(i), kXRegSize);
1229 }
1230 }
1231 // We shall never fail to find a free caller-save register, as
1232 // there are more than two core caller-save registers on ARM64
1233 // (meaning it is possible to find one which is different from
1234 // `ref` and `obj`).
1235 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1236 LOG(FATAL) << "Could not find a free register";
1237 UNREACHABLE();
1238 }
1239
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001240 const Location out_;
1241 const Location ref_;
1242 const Location obj_;
1243 const uint32_t offset_;
1244 // An additional location containing an index to an array.
1245 // Only used for HArrayGet and the UnsafeGetObject &
1246 // UnsafeGetObjectVolatile intrinsics.
1247 const Location index_;
1248
1249 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
1250};
1251
1252// Slow path generating a read barrier for a GC root.
1253class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
1254 public:
1255 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +00001256 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +00001257 DCHECK(kEmitCompilerReadBarrier);
1258 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001259
1260 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1261 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001262 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001263 DCHECK(locations->CanCall());
1264 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +00001265 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1266 << "Unexpected instruction in read barrier for GC root slow path: "
1267 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001268
1269 __ Bind(GetEntryLabel());
1270 SaveLiveRegisters(codegen, locations);
1271
1272 InvokeRuntimeCallingConvention calling_convention;
1273 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1274 // The argument of the ReadBarrierForRootSlow is not a managed
1275 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
1276 // thus we need a 64-bit move here, and we cannot use
1277 //
1278 // arm64_codegen->MoveLocation(
1279 // LocationFrom(calling_convention.GetRegisterAt(0)),
1280 // root_,
1281 // type);
1282 //
1283 // which would emit a 32-bit move, as `type` is a (32-bit wide)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001284 // reference type (`DataType::Type::kReference`).
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001285 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001286 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001287 instruction_,
1288 instruction_->GetDexPc(),
1289 this);
1290 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1291 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1292
1293 RestoreLiveRegisters(codegen, locations);
1294 __ B(GetExitLabel());
1295 }
1296
1297 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
1298
1299 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001300 const Location out_;
1301 const Location root_;
1302
1303 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
1304};
1305
Alexandre Rames5319def2014-10-23 10:03:10 +01001306#undef __
1307
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001308Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(DataType::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001309 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001310 if (type == DataType::Type::kVoid) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001311 LOG(FATAL) << "Unreachable type " << type;
1312 }
1313
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001314 if (DataType::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001315 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
1316 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001317 } else if (!DataType::IsFloatingPointType(type) &&
Alexandre Rames542361f2015-01-29 16:57:31 +00001318 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001319 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
1320 } else {
1321 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001322 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
1323 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +01001324 }
1325
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001326 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001327 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +01001328 return next_location;
1329}
1330
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001331Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +01001332 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001333}
1334
Serban Constantinescu579885a2015-02-22 20:51:33 +00001335CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001336 const CompilerOptions& compiler_options,
1337 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +01001338 : CodeGenerator(graph,
1339 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001340 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +00001341 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001342 callee_saved_core_registers.GetList(),
1343 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001344 compiler_options,
1345 stats),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001346 block_labels_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1347 jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +01001348 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +00001349 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001350 move_resolver_(graph->GetAllocator(), this),
1351 assembler_(graph->GetAllocator()),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001352 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001353 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +01001354 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001355 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001356 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001357 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001358 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001359 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001360 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001361 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6fd16062018-06-26 11:02:04 +01001362 boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001363 baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001364 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001365 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001366 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Marko966b46f2018-08-03 10:20:19 +00001367 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1368 jit_baker_read_barrier_slow_paths_(std::less<uint32_t>(),
1369 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001370 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001371 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001372}
Alexandre Rames5319def2014-10-23 10:03:10 +01001373
Alexandre Rames67555f72014-11-18 10:55:16 +00001374#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +01001375
Zheng Xu3927c8b2015-11-18 17:46:25 +08001376void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001377 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001378 jump_table->EmitTable(this);
1379 }
1380}
1381
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001382void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001383 EmitJumpTables();
Vladimir Marko966b46f2018-08-03 10:20:19 +00001384
1385 // Emit JIT baker read barrier slow paths.
1386 DCHECK(Runtime::Current()->UseJitCompilation() || jit_baker_read_barrier_slow_paths_.empty());
1387 for (auto& entry : jit_baker_read_barrier_slow_paths_) {
1388 uint32_t encoded_data = entry.first;
1389 vixl::aarch64::Label* slow_path_entry = &entry.second.label;
1390 __ Bind(slow_path_entry);
1391 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name */ nullptr);
1392 }
1393
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001394 // Ensure we emit the literal pool.
1395 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +00001396
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001397 CodeGenerator::Finalize(allocator);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001398
1399 // Verify Baker read barrier linker patches.
1400 if (kIsDebugBuild) {
1401 ArrayRef<const uint8_t> code = allocator->GetMemory();
1402 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
1403 DCHECK(info.label.IsBound());
1404 uint32_t literal_offset = info.label.GetLocation();
1405 DCHECK_ALIGNED(literal_offset, 4u);
1406
1407 auto GetInsn = [&code](uint32_t offset) {
1408 DCHECK_ALIGNED(offset, 4u);
1409 return
1410 (static_cast<uint32_t>(code[offset + 0]) << 0) +
1411 (static_cast<uint32_t>(code[offset + 1]) << 8) +
1412 (static_cast<uint32_t>(code[offset + 2]) << 16)+
1413 (static_cast<uint32_t>(code[offset + 3]) << 24);
1414 };
1415
1416 const uint32_t encoded_data = info.custom_data;
1417 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
1418 // Check that the next instruction matches the expected LDR.
1419 switch (kind) {
Vladimir Marko0ecac682018-08-07 10:40:38 +01001420 case BakerReadBarrierKind::kField:
1421 case BakerReadBarrierKind::kAcquire: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00001422 DCHECK_GE(code.size() - literal_offset, 8u);
1423 uint32_t next_insn = GetInsn(literal_offset + 4u);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001424 CheckValidReg(next_insn & 0x1fu); // Check destination register.
1425 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Marko0ecac682018-08-07 10:40:38 +01001426 if (kind == BakerReadBarrierKind::kField) {
1427 // LDR (immediate) with correct base_reg.
1428 CHECK_EQ(next_insn & 0xffc003e0u, 0xb9400000u | (base_reg << 5));
1429 } else {
1430 DCHECK(kind == BakerReadBarrierKind::kAcquire);
1431 // LDAR with correct base_reg.
1432 CHECK_EQ(next_insn & 0xffffffe0u, 0x88dffc00u | (base_reg << 5));
1433 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00001434 break;
1435 }
1436 case BakerReadBarrierKind::kArray: {
1437 DCHECK_GE(code.size() - literal_offset, 8u);
1438 uint32_t next_insn = GetInsn(literal_offset + 4u);
1439 // LDR (register) with the correct base_reg, size=10 (32-bit), option=011 (extend = LSL),
1440 // and S=1 (shift amount = 2 for 32-bit version), i.e. LDR Wt, [Xn, Xm, LSL #2].
1441 CheckValidReg(next_insn & 0x1fu); // Check destination register.
1442 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1443 CHECK_EQ(next_insn & 0xffe0ffe0u, 0xb8607800u | (base_reg << 5));
1444 CheckValidReg((next_insn >> 16) & 0x1f); // Check index register
1445 break;
1446 }
1447 case BakerReadBarrierKind::kGcRoot: {
1448 DCHECK_GE(literal_offset, 4u);
1449 uint32_t prev_insn = GetInsn(literal_offset - 4u);
1450 // LDR (immediate) with correct root_reg.
1451 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1452 CHECK_EQ(prev_insn & 0xffc0001fu, 0xb9400000u | root_reg);
1453 break;
1454 }
1455 default:
1456 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
1457 UNREACHABLE();
1458 }
1459 }
1460 }
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001461}
1462
Zheng Xuad4450e2015-04-17 18:48:56 +08001463void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1464 // Note: There are 6 kinds of moves:
1465 // 1. constant -> GPR/FPR (non-cycle)
1466 // 2. constant -> stack (non-cycle)
1467 // 3. GPR/FPR -> GPR/FPR
1468 // 4. GPR/FPR -> stack
1469 // 5. stack -> GPR/FPR
1470 // 6. stack -> stack (non-cycle)
1471 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1472 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1473 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1474 // dependency.
1475 vixl_temps_.Open(GetVIXLAssembler());
1476}
1477
1478void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1479 vixl_temps_.Close();
1480}
1481
1482Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
Artem Serovd4bccf12017-04-03 18:47:32 +01001483 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister
1484 || kind == Location::kStackSlot || kind == Location::kDoubleStackSlot
1485 || kind == Location::kSIMDStackSlot);
1486 kind = (kind == Location::kFpuRegister || kind == Location::kSIMDStackSlot)
1487 ? Location::kFpuRegister
1488 : Location::kRegister;
Zheng Xuad4450e2015-04-17 18:48:56 +08001489 Location scratch = GetScratchLocation(kind);
1490 if (!scratch.Equals(Location::NoLocation())) {
1491 return scratch;
1492 }
1493 // Allocate from VIXL temp registers.
1494 if (kind == Location::kRegister) {
1495 scratch = LocationFrom(vixl_temps_.AcquireX());
1496 } else {
Roland Levillain952b2352017-05-03 19:49:14 +01001497 DCHECK_EQ(kind, Location::kFpuRegister);
Artem Serovd4bccf12017-04-03 18:47:32 +01001498 scratch = LocationFrom(codegen_->GetGraph()->HasSIMD()
1499 ? vixl_temps_.AcquireVRegisterOfSize(kQRegSize)
1500 : vixl_temps_.AcquireD());
Zheng Xuad4450e2015-04-17 18:48:56 +08001501 }
1502 AddScratchLocation(scratch);
1503 return scratch;
1504}
1505
1506void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1507 if (loc.IsRegister()) {
1508 vixl_temps_.Release(XRegisterFrom(loc));
1509 } else {
1510 DCHECK(loc.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001511 vixl_temps_.Release(codegen_->GetGraph()->HasSIMD() ? QRegisterFrom(loc) : DRegisterFrom(loc));
Zheng Xuad4450e2015-04-17 18:48:56 +08001512 }
1513 RemoveScratchLocation(loc);
1514}
1515
Alexandre Rames3e69f162014-12-10 10:36:50 +00001516void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001517 MoveOperands* move = moves_[index];
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001518 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001519}
1520
Alexandre Rames5319def2014-10-23 10:03:10 +01001521void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001522 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001523 __ Bind(&frame_entry_label_);
1524
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001525 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1526 UseScratchRegisterScope temps(masm);
1527 Register temp = temps.AcquireX();
1528 __ Ldrh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1529 __ Add(temp, temp, 1);
1530 __ Strh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1531 }
1532
Vladimir Marko33bff252017-11-01 14:35:42 +00001533 bool do_overflow_check =
1534 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm64) || !IsLeafMethod();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001535 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001536 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001537 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001538 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Vladimir Marko33bff252017-11-01 14:35:42 +00001539 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kArm64)));
Artem Serov914d7a82017-02-07 14:33:49 +00001540 {
1541 // Ensure that between load and RecordPcInfo there are no pools emitted.
1542 ExactAssemblyScope eas(GetVIXLAssembler(),
1543 kInstructionSize,
1544 CodeBufferCheckScope::kExactSize);
1545 __ ldr(wzr, MemOperand(temp, 0));
1546 RecordPcInfo(nullptr, 0);
1547 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001548 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001549
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001550 if (!HasEmptyFrame()) {
1551 int frame_size = GetFrameSize();
1552 // Stack layout:
1553 // sp[frame_size - 8] : lr.
1554 // ... : other preserved core registers.
1555 // ... : other preserved fp registers.
1556 // ... : reserved frame space.
1557 // sp[0] : current method.
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001558
1559 // Save the current method if we need it. Note that we do not
1560 // do this in HCurrentMethod, as the instruction might have been removed
1561 // in the SSA graph.
1562 if (RequiresCurrentMethod()) {
1563 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001564 } else {
1565 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001566 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001567 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001568 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1569 frame_size - GetCoreSpillSize());
1570 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1571 frame_size - FrameEntrySpillSize());
Mingyao Yang063fc772016-08-02 11:02:54 -07001572
1573 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1574 // Initialize should_deoptimize flag to 0.
1575 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1576 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1577 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001578 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01001579
1580 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01001581}
1582
1583void CodeGeneratorARM64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001584 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001585 if (!HasEmptyFrame()) {
1586 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001587 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1588 frame_size - FrameEntrySpillSize());
1589 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1590 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001591 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001592 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001593 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001594 __ Ret();
1595 GetAssembler()->cfi().RestoreState();
1596 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001597}
1598
Scott Wakeling97c72b72016-06-24 16:19:36 +01001599CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001600 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001601 return CPURegList(CPURegister::kRegister, kXRegSize,
1602 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001603}
1604
Scott Wakeling97c72b72016-06-24 16:19:36 +01001605CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001606 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1607 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001608 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1609 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001610}
1611
Alexandre Rames5319def2014-10-23 10:03:10 +01001612void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1613 __ Bind(GetLabelOf(block));
1614}
1615
Calin Juravle175dc732015-08-25 15:42:32 +01001616void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1617 DCHECK(location.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001618 __ Mov(RegisterFrom(location, DataType::Type::kInt32), value);
Calin Juravle175dc732015-08-25 15:42:32 +01001619}
1620
Calin Juravlee460d1d2015-09-29 04:52:17 +01001621void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1622 if (location.IsRegister()) {
1623 locations->AddTemp(location);
1624 } else {
1625 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1626 }
1627}
1628
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001629void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001630 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001631 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001632 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001633 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001634 if (value_can_be_null) {
1635 __ Cbz(value, &done);
1636 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001637 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001638 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001639 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001640 if (value_can_be_null) {
1641 __ Bind(&done);
1642 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001643}
1644
David Brazdil58282f42016-01-14 12:45:10 +00001645void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001646 // Blocked core registers:
1647 // lr : Runtime reserved.
1648 // tr : Runtime reserved.
Roland Levillain97c46462017-05-11 14:04:03 +01001649 // mr : Runtime reserved.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001650 // ip1 : VIXL core temp.
1651 // ip0 : VIXL core temp.
1652 //
1653 // Blocked fp registers:
1654 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001655 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1656 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001657 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001658 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001659 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001660
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001661 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001662 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001663 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001664 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001665
David Brazdil58282f42016-01-14 12:45:10 +00001666 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001667 // Stubs do not save callee-save floating point registers. If the graph
1668 // is debuggable, we need to deal with these registers differently. For
1669 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001670 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1671 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001672 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001673 }
1674 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001675}
1676
Alexandre Rames3e69f162014-12-10 10:36:50 +00001677size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1678 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1679 __ Str(reg, MemOperand(sp, stack_index));
1680 return kArm64WordSize;
1681}
1682
1683size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1684 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1685 __ Ldr(reg, MemOperand(sp, stack_index));
1686 return kArm64WordSize;
1687}
1688
1689size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1690 FPRegister reg = FPRegister(reg_id, kDRegSize);
1691 __ Str(reg, MemOperand(sp, stack_index));
1692 return kArm64WordSize;
1693}
1694
1695size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1696 FPRegister reg = FPRegister(reg_id, kDRegSize);
1697 __ Ldr(reg, MemOperand(sp, stack_index));
1698 return kArm64WordSize;
1699}
1700
Alexandre Rames5319def2014-10-23 10:03:10 +01001701void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001702 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001703}
1704
1705void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001706 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001707}
1708
Vladimir Markoa0431112018-06-25 09:32:54 +01001709const Arm64InstructionSetFeatures& CodeGeneratorARM64::GetInstructionSetFeatures() const {
1710 return *GetCompilerOptions().GetInstructionSetFeatures()->AsArm64InstructionSetFeatures();
1711}
1712
Alexandre Rames67555f72014-11-18 10:55:16 +00001713void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001714 if (constant->IsIntConstant()) {
1715 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1716 } else if (constant->IsLongConstant()) {
1717 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1718 } else if (constant->IsNullConstant()) {
1719 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001720 } else if (constant->IsFloatConstant()) {
1721 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1722 } else {
1723 DCHECK(constant->IsDoubleConstant());
1724 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1725 }
1726}
1727
Alexandre Rames3e69f162014-12-10 10:36:50 +00001728
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001729static bool CoherentConstantAndType(Location constant, DataType::Type type) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001730 DCHECK(constant.IsConstant());
1731 HConstant* cst = constant.GetConstant();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001732 return (cst->IsIntConstant() && type == DataType::Type::kInt32) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001733 // Null is mapped to a core W register, which we associate with kPrimInt.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001734 (cst->IsNullConstant() && type == DataType::Type::kInt32) ||
1735 (cst->IsLongConstant() && type == DataType::Type::kInt64) ||
1736 (cst->IsFloatConstant() && type == DataType::Type::kFloat32) ||
1737 (cst->IsDoubleConstant() && type == DataType::Type::kFloat64);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001738}
1739
Roland Levillain952b2352017-05-03 19:49:14 +01001740// Allocate a scratch register from the VIXL pool, querying first
1741// the floating-point register pool, and then the core register
1742// pool. This is essentially a reimplementation of
Roland Levillain558dea12017-01-27 19:40:44 +00001743// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
1744// using a different allocation strategy.
1745static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
1746 vixl::aarch64::UseScratchRegisterScope* temps,
1747 int size_in_bits) {
1748 return masm->GetScratchFPRegisterList()->IsEmpty()
1749 ? CPURegister(temps->AcquireRegisterOfSize(size_in_bits))
1750 : CPURegister(temps->AcquireVRegisterOfSize(size_in_bits));
1751}
1752
Calin Juravlee460d1d2015-09-29 04:52:17 +01001753void CodeGeneratorARM64::MoveLocation(Location destination,
1754 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001755 DataType::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001756 if (source.Equals(destination)) {
1757 return;
1758 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001759
1760 // A valid move can always be inferred from the destination and source
1761 // locations. When moving from and to a register, the argument type can be
1762 // used to generate 32bit instead of 64bit moves. In debug mode we also
1763 // checks the coherency of the locations and the type.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001764 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001765
1766 if (destination.IsRegister() || destination.IsFpuRegister()) {
1767 if (unspecified_type) {
1768 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1769 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001770 (src_cst != nullptr && (src_cst->IsIntConstant()
1771 || src_cst->IsFloatConstant()
1772 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001773 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001774 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexandre Rames67555f72014-11-18 10:55:16 +00001775 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001776 // If the source is a double stack slot or a 64bit constant, a 64bit
1777 // type is appropriate. Else the source is a register, and since the
1778 // type has not been specified, we chose a 64bit type to force a 64bit
1779 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001780 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexandre Rames67555f72014-11-18 10:55:16 +00001781 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001782 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001783 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1784 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001785 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001786 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1787 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1788 __ Ldr(dst, StackOperandFrom(source));
Artem Serovd4bccf12017-04-03 18:47:32 +01001789 } else if (source.IsSIMDStackSlot()) {
1790 __ Ldr(QRegisterFrom(destination), StackOperandFrom(source));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001791 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001792 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001793 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001794 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001795 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001796 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001797 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001798 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001799 DataType::Type source_type = DataType::Is64BitType(dst_type)
1800 ? DataType::Type::kInt64
1801 : DataType::Type::kInt32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001802 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1803 }
1804 } else {
1805 DCHECK(source.IsFpuRegister());
1806 if (destination.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001807 DataType::Type source_type = DataType::Is64BitType(dst_type)
1808 ? DataType::Type::kFloat64
1809 : DataType::Type::kFloat32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001810 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1811 } else {
1812 DCHECK(destination.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001813 if (GetGraph()->HasSIMD()) {
1814 __ Mov(QRegisterFrom(destination), QRegisterFrom(source));
1815 } else {
1816 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
1817 }
1818 }
1819 }
1820 } else if (destination.IsSIMDStackSlot()) {
1821 if (source.IsFpuRegister()) {
1822 __ Str(QRegisterFrom(source), StackOperandFrom(destination));
1823 } else {
1824 DCHECK(source.IsSIMDStackSlot());
1825 UseScratchRegisterScope temps(GetVIXLAssembler());
1826 if (GetVIXLAssembler()->GetScratchFPRegisterList()->IsEmpty()) {
1827 Register temp = temps.AcquireX();
1828 __ Ldr(temp, MemOperand(sp, source.GetStackIndex()));
1829 __ Str(temp, MemOperand(sp, destination.GetStackIndex()));
1830 __ Ldr(temp, MemOperand(sp, source.GetStackIndex() + kArm64WordSize));
1831 __ Str(temp, MemOperand(sp, destination.GetStackIndex() + kArm64WordSize));
1832 } else {
1833 FPRegister temp = temps.AcquireVRegisterOfSize(kQRegSize);
1834 __ Ldr(temp, StackOperandFrom(source));
1835 __ Str(temp, StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001836 }
1837 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001838 } else { // The destination is not a register. It must be a stack slot.
1839 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1840 if (source.IsRegister() || source.IsFpuRegister()) {
1841 if (unspecified_type) {
1842 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001843 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001844 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001845 dst_type =
1846 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001847 }
1848 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001849 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1850 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001851 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001852 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001853 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1854 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001855 UseScratchRegisterScope temps(GetVIXLAssembler());
1856 HConstant* src_cst = source.GetConstant();
1857 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001858 if (src_cst->IsZeroBitPattern()) {
Scott Wakeling79db9972017-01-19 14:08:42 +00001859 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant())
1860 ? Register(xzr)
1861 : Register(wzr);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001862 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001863 if (src_cst->IsIntConstant()) {
1864 temp = temps.AcquireW();
1865 } else if (src_cst->IsLongConstant()) {
1866 temp = temps.AcquireX();
1867 } else if (src_cst->IsFloatConstant()) {
1868 temp = temps.AcquireS();
1869 } else {
1870 DCHECK(src_cst->IsDoubleConstant());
1871 temp = temps.AcquireD();
1872 }
1873 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001874 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001875 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001876 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001877 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001878 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001879 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001880 // Use any scratch register (a core or a floating-point one)
1881 // from VIXL scratch register pools as a temporary.
1882 //
1883 // We used to only use the FP scratch register pool, but in some
1884 // rare cases the only register from this pool (D31) would
1885 // already be used (e.g. within a ParallelMove instruction, when
1886 // a move is blocked by a another move requiring a scratch FP
1887 // register, which would reserve D31). To prevent this issue, we
1888 // ask for a scratch register of any type (core or FP).
Roland Levillain558dea12017-01-27 19:40:44 +00001889 //
1890 // Also, we start by asking for a FP scratch register first, as the
Roland Levillain952b2352017-05-03 19:49:14 +01001891 // demand of scratch core registers is higher. This is why we
Roland Levillain558dea12017-01-27 19:40:44 +00001892 // use AcquireFPOrCoreCPURegisterOfSize instead of
1893 // UseScratchRegisterScope::AcquireCPURegisterOfSize, which
1894 // allocates core scratch registers first.
1895 CPURegister temp = AcquireFPOrCoreCPURegisterOfSize(
1896 GetVIXLAssembler(),
1897 &temps,
1898 (destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001899 __ Ldr(temp, StackOperandFrom(source));
1900 __ Str(temp, StackOperandFrom(destination));
1901 }
1902 }
1903}
1904
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001905void CodeGeneratorARM64::Load(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001906 CPURegister dst,
1907 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001908 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001909 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001910 case DataType::Type::kUint8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001911 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001912 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001913 case DataType::Type::kInt8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001914 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001915 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001916 case DataType::Type::kUint16:
Alexandre Rames67555f72014-11-18 10:55:16 +00001917 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001918 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001919 case DataType::Type::kInt16:
1920 __ Ldrsh(Register(dst), src);
1921 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001922 case DataType::Type::kInt32:
1923 case DataType::Type::kReference:
1924 case DataType::Type::kInt64:
1925 case DataType::Type::kFloat32:
1926 case DataType::Type::kFloat64:
1927 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001928 __ Ldr(dst, src);
1929 break;
Aart Bik66c158e2018-01-31 12:55:04 -08001930 case DataType::Type::kUint32:
1931 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001932 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001933 LOG(FATAL) << "Unreachable type " << type;
1934 }
1935}
1936
Calin Juravle77520bc2015-01-12 18:45:46 +00001937void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001938 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001939 const MemOperand& src,
1940 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001941 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001942 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001943 Register temp_base = temps.AcquireX();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001944 DataType::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001945
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001946 DCHECK(!src.IsPreIndex());
1947 DCHECK(!src.IsPostIndex());
1948
1949 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001950 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Artem Serov914d7a82017-02-07 14:33:49 +00001951 {
1952 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1953 MemOperand base = MemOperand(temp_base);
1954 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001955 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001956 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001957 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00001958 {
1959 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1960 __ ldarb(Register(dst), base);
1961 if (needs_null_check) {
1962 MaybeRecordImplicitNullCheck(instruction);
1963 }
1964 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001965 if (type == DataType::Type::kInt8) {
1966 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
Artem Serov914d7a82017-02-07 14:33:49 +00001967 }
1968 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001969 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001970 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00001971 {
1972 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1973 __ ldarh(Register(dst), base);
1974 if (needs_null_check) {
1975 MaybeRecordImplicitNullCheck(instruction);
1976 }
1977 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001978 if (type == DataType::Type::kInt16) {
1979 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
1980 }
Artem Serov914d7a82017-02-07 14:33:49 +00001981 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001982 case DataType::Type::kInt32:
1983 case DataType::Type::kReference:
1984 case DataType::Type::kInt64:
1985 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001986 {
1987 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1988 __ ldar(Register(dst), base);
1989 if (needs_null_check) {
1990 MaybeRecordImplicitNullCheck(instruction);
1991 }
1992 }
1993 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001994 case DataType::Type::kFloat32:
1995 case DataType::Type::kFloat64: {
Artem Serov914d7a82017-02-07 14:33:49 +00001996 DCHECK(dst.IsFPRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001997 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001998
Artem Serov914d7a82017-02-07 14:33:49 +00001999 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
2000 {
2001 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2002 __ ldar(temp, base);
2003 if (needs_null_check) {
2004 MaybeRecordImplicitNullCheck(instruction);
2005 }
2006 }
2007 __ Fmov(FPRegister(dst), temp);
2008 break;
Roland Levillain44015862016-01-22 11:47:17 +00002009 }
Aart Bik66c158e2018-01-31 12:55:04 -08002010 case DataType::Type::kUint32:
2011 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002012 case DataType::Type::kVoid:
Artem Serov914d7a82017-02-07 14:33:49 +00002013 LOG(FATAL) << "Unreachable type " << type;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002014 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002015 }
2016}
2017
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002018void CodeGeneratorARM64::Store(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002019 CPURegister src,
2020 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002021 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002022 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002023 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002024 case DataType::Type::kInt8:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002025 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002026 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002027 case DataType::Type::kUint16:
2028 case DataType::Type::kInt16:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002029 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002030 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002031 case DataType::Type::kInt32:
2032 case DataType::Type::kReference:
2033 case DataType::Type::kInt64:
2034 case DataType::Type::kFloat32:
2035 case DataType::Type::kFloat64:
2036 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002037 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00002038 break;
Aart Bik66c158e2018-01-31 12:55:04 -08002039 case DataType::Type::kUint32:
2040 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002041 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002042 LOG(FATAL) << "Unreachable type " << type;
2043 }
2044}
2045
Artem Serov914d7a82017-02-07 14:33:49 +00002046void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002047 DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002048 CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +00002049 const MemOperand& dst,
2050 bool needs_null_check) {
2051 MacroAssembler* masm = GetVIXLAssembler();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002052 UseScratchRegisterScope temps(GetVIXLAssembler());
2053 Register temp_base = temps.AcquireX();
2054
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002055 DCHECK(!dst.IsPreIndex());
2056 DCHECK(!dst.IsPostIndex());
2057
2058 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08002059 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01002060 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002061 MemOperand base = MemOperand(temp_base);
Artem Serov914d7a82017-02-07 14:33:49 +00002062 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002063 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002064 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002065 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002066 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00002067 {
2068 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2069 __ stlrb(Register(src), base);
2070 if (needs_null_check) {
2071 MaybeRecordImplicitNullCheck(instruction);
2072 }
2073 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002074 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002075 case DataType::Type::kUint16:
2076 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00002077 {
2078 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2079 __ stlrh(Register(src), base);
2080 if (needs_null_check) {
2081 MaybeRecordImplicitNullCheck(instruction);
2082 }
2083 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002084 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002085 case DataType::Type::kInt32:
2086 case DataType::Type::kReference:
2087 case DataType::Type::kInt64:
2088 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00002089 {
2090 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2091 __ stlr(Register(src), base);
2092 if (needs_null_check) {
2093 MaybeRecordImplicitNullCheck(instruction);
2094 }
2095 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002096 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002097 case DataType::Type::kFloat32:
2098 case DataType::Type::kFloat64: {
2099 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002100 Register temp_src;
2101 if (src.IsZero()) {
2102 // The zero register is used to avoid synthesizing zero constants.
2103 temp_src = Register(src);
2104 } else {
2105 DCHECK(src.IsFPRegister());
2106 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
2107 __ Fmov(temp_src, FPRegister(src));
2108 }
Artem Serov914d7a82017-02-07 14:33:49 +00002109 {
2110 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2111 __ stlr(temp_src, base);
2112 if (needs_null_check) {
2113 MaybeRecordImplicitNullCheck(instruction);
2114 }
2115 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002116 break;
2117 }
Aart Bik66c158e2018-01-31 12:55:04 -08002118 case DataType::Type::kUint32:
2119 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002120 case DataType::Type::kVoid:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002121 LOG(FATAL) << "Unreachable type " << type;
2122 }
2123}
2124
Calin Juravle175dc732015-08-25 15:42:32 +01002125void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
2126 HInstruction* instruction,
2127 uint32_t dex_pc,
2128 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01002129 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00002130
2131 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value()));
2132 {
2133 // Ensure the pc position is recorded immediately after the `blr` instruction.
2134 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
2135 __ blr(lr);
2136 if (EntrypointRequiresStackMap(entrypoint)) {
2137 RecordPcInfo(instruction, dex_pc, slow_path);
2138 }
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00002139 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002140}
2141
Roland Levillaindec8f632016-07-22 17:10:06 +01002142void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
2143 HInstruction* instruction,
2144 SlowPathCode* slow_path) {
2145 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Roland Levillaindec8f632016-07-22 17:10:06 +01002146 __ Ldr(lr, MemOperand(tr, entry_point_offset));
2147 __ Blr(lr);
2148}
2149
Alexandre Rames67555f72014-11-18 10:55:16 +00002150void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002151 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002152 UseScratchRegisterScope temps(GetVIXLAssembler());
2153 Register temp = temps.AcquireW();
Vladimir Markodc682aa2018-01-04 18:42:57 +00002154 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
2155 const size_t status_byte_offset =
2156 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
2157 constexpr uint32_t shifted_initialized_value =
2158 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002159
Serban Constantinescu02164b32014-11-13 14:05:07 +00002160 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002161 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Vladimir Markodc682aa2018-01-04 18:42:57 +00002162 __ Add(temp, class_reg, status_byte_offset);
Igor Murashkin86083f72017-10-27 10:59:04 -07002163 __ Ldarb(temp, HeapOperand(temp));
Vladimir Markodc682aa2018-01-04 18:42:57 +00002164 __ Cmp(temp, shifted_initialized_value);
Vladimir Marko2c64a832018-01-04 11:31:56 +00002165 __ B(lo, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00002166 __ Bind(slow_path->GetExitLabel());
2167}
Alexandre Rames5319def2014-10-23 10:03:10 +01002168
Vladimir Marko175e7862018-03-27 09:03:13 +00002169void InstructionCodeGeneratorARM64::GenerateBitstringTypeCheckCompare(
2170 HTypeCheckInstruction* check, vixl::aarch64::Register temp) {
2171 uint32_t path_to_root = check->GetBitstringPathToRoot();
2172 uint32_t mask = check->GetBitstringMask();
2173 DCHECK(IsPowerOfTwo(mask + 1));
2174 size_t mask_bits = WhichPowerOf2(mask + 1);
2175
2176 if (mask_bits == 16u) {
2177 // Load only the bitstring part of the status word.
2178 __ Ldrh(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
2179 } else {
2180 // /* uint32_t */ temp = temp->status_
2181 __ Ldr(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
2182 // Extract the bitstring bits.
2183 __ Ubfx(temp, temp, 0, mask_bits);
2184 }
2185 // Compare the bitstring bits to `path_to_root`.
2186 __ Cmp(temp, path_to_root);
2187}
2188
Roland Levillain44015862016-01-22 11:47:17 +00002189void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002190 BarrierType type = BarrierAll;
2191
2192 switch (kind) {
2193 case MemBarrierKind::kAnyAny:
2194 case MemBarrierKind::kAnyStore: {
2195 type = BarrierAll;
2196 break;
2197 }
2198 case MemBarrierKind::kLoadAny: {
2199 type = BarrierReads;
2200 break;
2201 }
2202 case MemBarrierKind::kStoreStore: {
2203 type = BarrierWrites;
2204 break;
2205 }
2206 default:
2207 LOG(FATAL) << "Unexpected memory barrier " << kind;
2208 }
2209 __ Dmb(InnerShareable, type);
2210}
2211
Serban Constantinescu02164b32014-11-13 14:05:07 +00002212void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
2213 HBasicBlock* successor) {
2214 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002215 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
2216 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002217 slow_path =
2218 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathARM64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002219 instruction->SetSlowPath(slow_path);
2220 codegen_->AddSlowPath(slow_path);
2221 if (successor != nullptr) {
2222 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002223 }
2224 } else {
2225 DCHECK_EQ(slow_path->GetSuccessor(), successor);
2226 }
2227
Serban Constantinescu02164b32014-11-13 14:05:07 +00002228 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
2229 Register temp = temps.AcquireW();
2230
Andreas Gampe542451c2016-07-26 09:02:02 -07002231 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002232 if (successor == nullptr) {
2233 __ Cbnz(temp, slow_path->GetEntryLabel());
2234 __ Bind(slow_path->GetReturnLabel());
2235 } else {
2236 __ Cbz(temp, codegen_->GetLabelOf(successor));
2237 __ B(slow_path->GetEntryLabel());
2238 // slow_path will return to GetLabelOf(successor).
2239 }
2240}
2241
Alexandre Rames5319def2014-10-23 10:03:10 +01002242InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
2243 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08002244 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01002245 assembler_(codegen->GetAssembler()),
2246 codegen_(codegen) {}
2247
Alexandre Rames67555f72014-11-18 10:55:16 +00002248void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002249 DCHECK_EQ(instr->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01002250 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002251 DataType::Type type = instr->GetResultType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002252 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002253 case DataType::Type::kInt32:
2254 case DataType::Type::kInt64:
Alexandre Rames5319def2014-10-23 10:03:10 +01002255 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002256 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002257 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002258 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002259
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002260 case DataType::Type::kFloat32:
2261 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002262 locations->SetInAt(0, Location::RequiresFpuRegister());
2263 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002264 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002265 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002266
Alexandre Rames5319def2014-10-23 10:03:10 +01002267 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002268 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002269 }
2270}
2271
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002272void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction,
2273 const FieldInfo& field_info) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002274 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2275
2276 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002277 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Rames09a99962015-04-15 11:47:56 +01002278 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002279 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2280 object_field_get_with_read_barrier
2281 ? LocationSummary::kCallOnSlowPath
2282 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002283 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002284 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko0ecac682018-08-07 10:40:38 +01002285 // We need a temporary register for the read barrier load in
2286 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier()
2287 // only if the field is volatile or the offset is too big.
2288 if (field_info.IsVolatile() ||
2289 field_info.GetFieldOffset().Uint32Value() >= kReferenceLoadMinFarOffset) {
2290 locations->AddTemp(FixedTempLocation());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002291 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002292 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002293 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002294 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002295 locations->SetOut(Location::RequiresFpuRegister());
2296 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002297 // The output overlaps for an object field get when read barriers
2298 // are enabled: we do not want the load to overwrite the object's
2299 // location, as we need it to emit the read barrier.
2300 locations->SetOut(
2301 Location::RequiresRegister(),
2302 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01002303 }
2304}
2305
2306void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
2307 const FieldInfo& field_info) {
2308 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00002309 LocationSummary* locations = instruction->GetLocations();
2310 Location base_loc = locations->InAt(0);
2311 Location out = locations->Out();
2312 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Vladimir Marko61b92282017-10-11 13:23:17 +01002313 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
2314 DataType::Type load_type = instruction->GetType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002315 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01002316
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002317 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier &&
Vladimir Marko61b92282017-10-11 13:23:17 +01002318 load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002319 // Object FieldGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002320 // /* HeapReference<Object> */ out = *(base + offset)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002321 Register base = RegisterFrom(base_loc, DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002322 Location maybe_temp =
2323 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
Roland Levillain44015862016-01-22 11:47:17 +00002324 // Note that potential implicit null checks are handled in this
2325 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
2326 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2327 instruction,
2328 out,
2329 base,
2330 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002331 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00002332 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002333 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00002334 } else {
2335 // General case.
2336 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002337 // Note that a potential implicit null check is handled in this
2338 // CodeGeneratorARM64::LoadAcquire call.
2339 // NB: LoadAcquire will record the pc info if needed.
2340 codegen_->LoadAcquire(
2341 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01002342 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002343 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2344 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Vladimir Marko61b92282017-10-11 13:23:17 +01002345 codegen_->Load(load_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01002346 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01002347 }
Vladimir Marko61b92282017-10-11 13:23:17 +01002348 if (load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002349 // If read barriers are enabled, emit read barriers other than
2350 // Baker's using a slow path (and also unpoison the loaded
2351 // reference, if heap poisoning is enabled).
2352 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
2353 }
Roland Levillain4d027112015-07-01 15:41:14 +01002354 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002355}
2356
2357void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
2358 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002359 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01002360 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002361 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
2362 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002363 } else if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002364 locations->SetInAt(1, Location::RequiresFpuRegister());
2365 } else {
2366 locations->SetInAt(1, Location::RequiresRegister());
2367 }
2368}
2369
2370void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002371 const FieldInfo& field_info,
2372 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002373 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2374
2375 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002376 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01002377 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01002378 Offset offset = field_info.GetFieldOffset();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002379 DataType::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002380
Roland Levillain4d027112015-07-01 15:41:14 +01002381 {
2382 // We use a block to end the scratch scope before the write barrier, thus
2383 // freeing the temporary registers so they can be used in `MarkGCCard`.
2384 UseScratchRegisterScope temps(GetVIXLAssembler());
2385
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002386 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01002387 DCHECK(value.IsW());
2388 Register temp = temps.AcquireW();
2389 __ Mov(temp, value.W());
2390 GetAssembler()->PoisonHeapReference(temp.W());
2391 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01002392 }
Roland Levillain4d027112015-07-01 15:41:14 +01002393
2394 if (field_info.IsVolatile()) {
Artem Serov914d7a82017-02-07 14:33:49 +00002395 codegen_->StoreRelease(
2396 instruction, field_type, source, HeapOperand(obj, offset), /* needs_null_check */ true);
Roland Levillain4d027112015-07-01 15:41:14 +01002397 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002398 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2399 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002400 codegen_->Store(field_type, source, HeapOperand(obj, offset));
2401 codegen_->MaybeRecordImplicitNullCheck(instruction);
2402 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002403 }
2404
2405 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002406 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01002407 }
2408}
2409
Alexandre Rames67555f72014-11-18 10:55:16 +00002410void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002411 DataType::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002412
2413 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002414 case DataType::Type::kInt32:
2415 case DataType::Type::kInt64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002416 Register dst = OutputRegister(instr);
2417 Register lhs = InputRegisterAt(instr, 0);
2418 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01002419 if (instr->IsAdd()) {
2420 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002421 } else if (instr->IsAnd()) {
2422 __ And(dst, lhs, rhs);
2423 } else if (instr->IsOr()) {
2424 __ Orr(dst, lhs, rhs);
2425 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002426 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002427 } else if (instr->IsRor()) {
2428 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002429 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002430 __ Ror(dst, lhs, shift);
2431 } else {
2432 // Ensure shift distance is in the same size register as the result. If
2433 // we are rotating a long and the shift comes in a w register originally,
2434 // we don't need to sxtw for use as an x since the shift distances are
2435 // all & reg_bits - 1.
2436 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
2437 }
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002438 } else if (instr->IsMin() || instr->IsMax()) {
2439 __ Cmp(lhs, rhs);
2440 __ Csel(dst, lhs, rhs, instr->IsMin() ? lt : gt);
Alexandre Rames67555f72014-11-18 10:55:16 +00002441 } else {
2442 DCHECK(instr->IsXor());
2443 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002444 }
2445 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002446 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002447 case DataType::Type::kFloat32:
2448 case DataType::Type::kFloat64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002449 FPRegister dst = OutputFPRegister(instr);
2450 FPRegister lhs = InputFPRegisterAt(instr, 0);
2451 FPRegister rhs = InputFPRegisterAt(instr, 1);
2452 if (instr->IsAdd()) {
2453 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002454 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002455 __ Fsub(dst, lhs, rhs);
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002456 } else if (instr->IsMin()) {
2457 __ Fmin(dst, lhs, rhs);
2458 } else if (instr->IsMax()) {
2459 __ Fmax(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002460 } else {
2461 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002462 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002463 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002464 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002465 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002466 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002467 }
2468}
2469
Serban Constantinescu02164b32014-11-13 14:05:07 +00002470void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2471 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2472
Vladimir Markoca6fff82017-10-03 14:49:14 +01002473 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002474 DataType::Type type = instr->GetResultType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002475 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002476 case DataType::Type::kInt32:
2477 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002478 locations->SetInAt(0, Location::RequiresRegister());
2479 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Artem Serov87c97052016-09-23 13:34:31 +01002480 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002481 break;
2482 }
2483 default:
2484 LOG(FATAL) << "Unexpected shift type " << type;
2485 }
2486}
2487
2488void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2489 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2490
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002491 DataType::Type type = instr->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002492 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002493 case DataType::Type::kInt32:
2494 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002495 Register dst = OutputRegister(instr);
2496 Register lhs = InputRegisterAt(instr, 0);
2497 Operand rhs = InputOperandAt(instr, 1);
2498 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002499 uint32_t shift_value = rhs.GetImmediate() &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002500 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002501 if (instr->IsShl()) {
2502 __ Lsl(dst, lhs, shift_value);
2503 } else if (instr->IsShr()) {
2504 __ Asr(dst, lhs, shift_value);
2505 } else {
2506 __ Lsr(dst, lhs, shift_value);
2507 }
2508 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002509 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002510
2511 if (instr->IsShl()) {
2512 __ Lsl(dst, lhs, rhs_reg);
2513 } else if (instr->IsShr()) {
2514 __ Asr(dst, lhs, rhs_reg);
2515 } else {
2516 __ Lsr(dst, lhs, rhs_reg);
2517 }
2518 }
2519 break;
2520 }
2521 default:
2522 LOG(FATAL) << "Unexpected shift operation type " << type;
2523 }
2524}
2525
Alexandre Rames5319def2014-10-23 10:03:10 +01002526void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002527 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002528}
2529
2530void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002531 HandleBinaryOp(instruction);
2532}
2533
2534void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2535 HandleBinaryOp(instruction);
2536}
2537
2538void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2539 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002540}
2541
Artem Serov7fc63502016-02-09 17:15:29 +00002542void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002543 DCHECK(DataType::IsIntegralType(instr->GetType())) << instr->GetType();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002544 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002545 locations->SetInAt(0, Location::RequiresRegister());
2546 // There is no immediate variant of negated bitwise instructions in AArch64.
2547 locations->SetInAt(1, Location::RequiresRegister());
2548 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2549}
2550
Artem Serov7fc63502016-02-09 17:15:29 +00002551void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002552 Register dst = OutputRegister(instr);
2553 Register lhs = InputRegisterAt(instr, 0);
2554 Register rhs = InputRegisterAt(instr, 1);
2555
2556 switch (instr->GetOpKind()) {
2557 case HInstruction::kAnd:
2558 __ Bic(dst, lhs, rhs);
2559 break;
2560 case HInstruction::kOr:
2561 __ Orn(dst, lhs, rhs);
2562 break;
2563 case HInstruction::kXor:
2564 __ Eon(dst, lhs, rhs);
2565 break;
2566 default:
2567 LOG(FATAL) << "Unreachable";
2568 }
2569}
2570
Anton Kirilov74234da2017-01-13 14:42:47 +00002571void LocationsBuilderARM64::VisitDataProcWithShifterOp(
2572 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002573 DCHECK(instruction->GetType() == DataType::Type::kInt32 ||
2574 instruction->GetType() == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002575 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002576 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames8626b742015-11-25 16:28:08 +00002577 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2578 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2579 } else {
2580 locations->SetInAt(0, Location::RequiresRegister());
2581 }
2582 locations->SetInAt(1, Location::RequiresRegister());
2583 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2584}
2585
Anton Kirilov74234da2017-01-13 14:42:47 +00002586void InstructionCodeGeneratorARM64::VisitDataProcWithShifterOp(
2587 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002588 DataType::Type type = instruction->GetType();
Alexandre Rames8626b742015-11-25 16:28:08 +00002589 HInstruction::InstructionKind kind = instruction->GetInstrKind();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002590 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002591 Register out = OutputRegister(instruction);
2592 Register left;
2593 if (kind != HInstruction::kNeg) {
2594 left = InputRegisterAt(instruction, 0);
2595 }
Anton Kirilov74234da2017-01-13 14:42:47 +00002596 // If this `HDataProcWithShifterOp` was created by merging a type conversion as the
Alexandre Rames8626b742015-11-25 16:28:08 +00002597 // shifter operand operation, the IR generating `right_reg` (input to the type
2598 // conversion) can have a different type from the current instruction's type,
2599 // so we manually indicate the type.
2600 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Alexandre Rames8626b742015-11-25 16:28:08 +00002601 Operand right_operand(0);
2602
Anton Kirilov74234da2017-01-13 14:42:47 +00002603 HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2604 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002605 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2606 } else {
Anton Kirilov74234da2017-01-13 14:42:47 +00002607 right_operand = Operand(right_reg,
2608 helpers::ShiftFromOpKind(op_kind),
2609 instruction->GetShiftAmount());
Alexandre Rames8626b742015-11-25 16:28:08 +00002610 }
2611
2612 // Logical binary operations do not support extension operations in the
2613 // operand. Note that VIXL would still manage if it was passed by generating
2614 // the extension as a separate instruction.
2615 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2616 DCHECK(!right_operand.IsExtendedRegister() ||
2617 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2618 kind != HInstruction::kNeg));
2619 switch (kind) {
2620 case HInstruction::kAdd:
2621 __ Add(out, left, right_operand);
2622 break;
2623 case HInstruction::kAnd:
2624 __ And(out, left, right_operand);
2625 break;
2626 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002627 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002628 __ Neg(out, right_operand);
2629 break;
2630 case HInstruction::kOr:
2631 __ Orr(out, left, right_operand);
2632 break;
2633 case HInstruction::kSub:
2634 __ Sub(out, left, right_operand);
2635 break;
2636 case HInstruction::kXor:
2637 __ Eor(out, left, right_operand);
2638 break;
2639 default:
2640 LOG(FATAL) << "Unexpected operation kind: " << kind;
2641 UNREACHABLE();
2642 }
2643}
2644
Artem Serov328429f2016-07-06 16:23:04 +01002645void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002646 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002647 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002648 locations->SetInAt(0, Location::RequiresRegister());
2649 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
Artem Serov87c97052016-09-23 13:34:31 +01002650 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002651}
2652
Roland Levillain19c54192016-11-04 13:44:09 +00002653void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002654 __ Add(OutputRegister(instruction),
2655 InputRegisterAt(instruction, 0),
2656 Operand(InputOperandAt(instruction, 1)));
2657}
2658
Artem Serove1811ed2017-04-27 16:50:47 +01002659void LocationsBuilderARM64::VisitIntermediateAddressIndex(HIntermediateAddressIndex* instruction) {
2660 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002661 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serove1811ed2017-04-27 16:50:47 +01002662
2663 HIntConstant* shift = instruction->GetShift()->AsIntConstant();
2664
2665 locations->SetInAt(0, Location::RequiresRegister());
2666 // For byte case we don't need to shift the index variable so we can encode the data offset into
2667 // ADD instruction. For other cases we prefer the data_offset to be in register; that will hoist
2668 // data offset constant generation out of the loop and reduce the critical path length in the
2669 // loop.
2670 locations->SetInAt(1, shift->GetValue() == 0
2671 ? Location::ConstantLocation(instruction->GetOffset()->AsIntConstant())
2672 : Location::RequiresRegister());
2673 locations->SetInAt(2, Location::ConstantLocation(shift));
2674 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2675}
2676
2677void InstructionCodeGeneratorARM64::VisitIntermediateAddressIndex(
2678 HIntermediateAddressIndex* instruction) {
2679 Register index_reg = InputRegisterAt(instruction, 0);
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002680 uint32_t shift = Int64FromLocation(instruction->GetLocations()->InAt(2));
Artem Serove1811ed2017-04-27 16:50:47 +01002681 uint32_t offset = instruction->GetOffset()->AsIntConstant()->GetValue();
2682
2683 if (shift == 0) {
2684 __ Add(OutputRegister(instruction), index_reg, offset);
2685 } else {
2686 Register offset_reg = InputRegisterAt(instruction, 1);
2687 __ Add(OutputRegister(instruction), offset_reg, Operand(index_reg, LSL, shift));
2688 }
2689}
2690
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002691void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002692 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002693 new (GetGraph()->GetAllocator()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002694 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2695 if (instr->GetOpKind() == HInstruction::kSub &&
2696 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002697 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002698 // Don't allocate register for Mneg instruction.
2699 } else {
2700 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2701 Location::RequiresRegister());
2702 }
2703 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2704 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002705 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2706}
2707
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002708void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002709 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002710 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2711 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002712
2713 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2714 // This fixup should be carried out for all multiply-accumulate instructions:
2715 // madd, msub, smaddl, smsubl, umaddl and umsubl.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002716 if (instr->GetType() == DataType::Type::kInt64 &&
Alexandre Rames418318f2015-11-20 15:55:47 +00002717 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2718 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002719 vixl::aarch64::Instruction* prev =
2720 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002721 if (prev->IsLoadOrStore()) {
2722 // Make sure we emit only exactly one nop.
Artem Serov914d7a82017-02-07 14:33:49 +00002723 ExactAssemblyScope scope(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002724 __ nop();
2725 }
2726 }
2727
2728 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002729 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002730 __ Madd(res, mul_left, mul_right, accumulator);
2731 } else {
2732 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002733 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002734 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002735 __ Mneg(res, mul_left, mul_right);
2736 } else {
2737 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2738 __ Msub(res, mul_left, mul_right, accumulator);
2739 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002740 }
2741}
2742
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002743void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002744 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002745 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002746 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002747 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2748 object_array_get_with_read_barrier
2749 ? LocationSummary::kCallOnSlowPath
2750 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002751 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002752 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002753 if (instruction->GetIndex()->IsConstant()) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002754 // Array loads with constant index are treated as field loads.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002755 // We need a temporary register for the read barrier load in
2756 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier()
2757 // only if the offset is too big.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002758 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
2759 uint32_t index = instruction->GetIndex()->AsIntConstant()->GetValue();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002760 offset += index << DataType::SizeShift(DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002761 if (offset >= kReferenceLoadMinFarOffset) {
2762 locations->AddTemp(FixedTempLocation());
2763 }
2764 } else {
Vladimir Marko008e09f32018-08-06 15:42:43 +01002765 // We need a non-scratch temporary for the array data pointer in
2766 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier().
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002767 locations->AddTemp(Location::RequiresRegister());
2768 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002769 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002770 locations->SetInAt(0, Location::RequiresRegister());
2771 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002772 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002773 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2774 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002775 // The output overlaps in the case of an object array get with
2776 // read barriers enabled: we do not want the move to overwrite the
2777 // array's location, as we need it to emit the read barrier.
2778 locations->SetOut(
2779 Location::RequiresRegister(),
2780 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002781 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002782}
2783
2784void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002785 DataType::Type type = instruction->GetType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002786 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002787 LocationSummary* locations = instruction->GetLocations();
2788 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002789 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002790 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002791 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2792 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002793 MacroAssembler* masm = GetVIXLAssembler();
2794 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002795
Roland Levillain19c54192016-11-04 13:44:09 +00002796 // The read barrier instrumentation of object ArrayGet instructions
2797 // does not support the HIntermediateAddress instruction.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002798 DCHECK(!((type == DataType::Type::kReference) &&
Roland Levillain19c54192016-11-04 13:44:09 +00002799 instruction->GetArray()->IsIntermediateAddress() &&
2800 kEmitCompilerReadBarrier));
2801
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002802 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00002803 // Object ArrayGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002804 // Note that a potential implicit null check is handled in the
2805 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
Vladimir Marko66d691d2017-04-07 17:53:39 +01002806 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002807 if (index.IsConstant()) {
2808 // Array load with a constant index can be treated as a field load.
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002809 offset += Int64FromLocation(index) << DataType::SizeShift(type);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002810 Location maybe_temp =
2811 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
2812 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2813 out,
2814 obj.W(),
2815 offset,
2816 maybe_temp,
Vladimir Marko66d691d2017-04-07 17:53:39 +01002817 /* needs_null_check */ false,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002818 /* use_load_acquire */ false);
2819 } else {
2820 Register temp = WRegisterFrom(locations->GetTemp(0));
2821 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko008e09f32018-08-06 15:42:43 +01002822 out, obj.W(), offset, index, temp, /* needs_null_check */ false);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002823 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002824 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002825 // General case.
2826 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002827 Register length;
2828 if (maybe_compressed_char_at) {
2829 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2830 length = temps.AcquireW();
Artem Serov914d7a82017-02-07 14:33:49 +00002831 {
2832 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2833 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2834
2835 if (instruction->GetArray()->IsIntermediateAddress()) {
2836 DCHECK_LT(count_offset, offset);
2837 int64_t adjusted_offset =
2838 static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2839 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2840 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2841 } else {
2842 __ Ldr(length, HeapOperand(obj, count_offset));
2843 }
2844 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002845 }
jessicahandojo05765752016-09-09 19:01:32 -07002846 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002847 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002848 if (maybe_compressed_char_at) {
2849 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002850 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2851 "Expecting 0=compressed, 1=uncompressed");
2852 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002853 __ Ldrb(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002854 HeapOperand(obj, offset + Int64FromLocation(index)));
jessicahandojo05765752016-09-09 19:01:32 -07002855 __ B(&done);
2856 __ Bind(&uncompressed_load);
2857 __ Ldrh(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002858 HeapOperand(obj, offset + (Int64FromLocation(index) << 1)));
jessicahandojo05765752016-09-09 19:01:32 -07002859 __ Bind(&done);
2860 } else {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002861 offset += Int64FromLocation(index) << DataType::SizeShift(type);
jessicahandojo05765752016-09-09 19:01:32 -07002862 source = HeapOperand(obj, offset);
2863 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002864 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002865 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002866 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002867 // We do not need to compute the intermediate address from the array: the
2868 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002869 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002870 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002871 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002872 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2873 }
2874 temp = obj;
2875 } else {
2876 __ Add(temp, obj, offset);
2877 }
jessicahandojo05765752016-09-09 19:01:32 -07002878 if (maybe_compressed_char_at) {
2879 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002880 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2881 "Expecting 0=compressed, 1=uncompressed");
2882 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002883 __ Ldrb(Register(OutputCPURegister(instruction)),
2884 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2885 __ B(&done);
2886 __ Bind(&uncompressed_load);
2887 __ Ldrh(Register(OutputCPURegister(instruction)),
2888 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2889 __ Bind(&done);
2890 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002891 source = HeapOperand(temp, XRegisterFrom(index), LSL, DataType::SizeShift(type));
jessicahandojo05765752016-09-09 19:01:32 -07002892 }
Roland Levillain44015862016-01-22 11:47:17 +00002893 }
jessicahandojo05765752016-09-09 19:01:32 -07002894 if (!maybe_compressed_char_at) {
Artem Serov914d7a82017-02-07 14:33:49 +00002895 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2896 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
jessicahandojo05765752016-09-09 19:01:32 -07002897 codegen_->Load(type, OutputCPURegister(instruction), source);
2898 codegen_->MaybeRecordImplicitNullCheck(instruction);
2899 }
Roland Levillain44015862016-01-22 11:47:17 +00002900
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002901 if (type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002902 static_assert(
2903 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2904 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2905 Location obj_loc = locations->InAt(0);
2906 if (index.IsConstant()) {
2907 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2908 } else {
2909 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2910 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002911 }
Roland Levillain4d027112015-07-01 15:41:14 +01002912 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002913}
2914
Alexandre Rames5319def2014-10-23 10:03:10 +01002915void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002916 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002917 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002918 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002919}
2920
2921void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002922 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002923 vixl::aarch64::Register out = OutputRegister(instruction);
Artem Serov914d7a82017-02-07 14:33:49 +00002924 {
2925 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2926 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2927 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
2928 codegen_->MaybeRecordImplicitNullCheck(instruction);
2929 }
jessicahandojo05765752016-09-09 19:01:32 -07002930 // Mask out compression flag from String's array length.
2931 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002932 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002933 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002934}
2935
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002936void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002937 DataType::Type value_type = instruction->GetComponentType();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002938
2939 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002940 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002941 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002942 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002943 LocationSummary::kCallOnSlowPath :
2944 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002945 locations->SetInAt(0, Location::RequiresRegister());
2946 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002947 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2948 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002949 } else if (DataType::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002950 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002951 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002952 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002953 }
2954}
2955
2956void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002957 DataType::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002958 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002959 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002960 bool needs_write_barrier =
2961 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002962
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002963 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002964 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002965 CPURegister source = value;
2966 Location index = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002967 size_t offset = mirror::Array::DataOffset(DataType::Size(value_type)).Uint32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002968 MemOperand destination = HeapOperand(array);
2969 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002970
2971 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002972 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002973 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002974 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002975 destination = HeapOperand(array, offset);
2976 } else {
2977 UseScratchRegisterScope temps(masm);
2978 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002979 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002980 // We do not need to compute the intermediate address from the array: the
2981 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002982 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002983 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002984 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002985 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2986 }
2987 temp = array;
2988 } else {
2989 __ Add(temp, array, offset);
2990 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002991 destination = HeapOperand(temp,
2992 XRegisterFrom(index),
2993 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002994 DataType::SizeShift(value_type));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002995 }
Artem Serov914d7a82017-02-07 14:33:49 +00002996 {
2997 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2998 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2999 codegen_->Store(value_type, value, destination);
3000 codegen_->MaybeRecordImplicitNullCheck(instruction);
3001 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003002 } else {
Artem Serov328429f2016-07-06 16:23:04 +01003003 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003004 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003005 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01003006 {
3007 // We use a block to end the scratch scope before the write barrier, thus
3008 // freeing the temporary registers so they can be used in `MarkGCCard`.
3009 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003010 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01003011 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003012 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003013 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01003014 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01003015 destination = HeapOperand(temp,
3016 XRegisterFrom(index),
3017 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003018 DataType::SizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01003019 }
3020
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003021 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3022 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3023 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3024
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003025 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01003026 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathARM64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003027 codegen_->AddSlowPath(slow_path);
3028 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003029 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003030 __ Cbnz(Register(value), &non_zero);
3031 if (!index.IsConstant()) {
3032 __ Add(temp, array, offset);
3033 }
Artem Serov914d7a82017-02-07 14:33:49 +00003034 {
3035 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools
3036 // emitted.
3037 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3038 __ Str(wzr, destination);
3039 codegen_->MaybeRecordImplicitNullCheck(instruction);
3040 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003041 __ B(&done);
3042 __ Bind(&non_zero);
3043 }
3044
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003045 // Note that when Baker read barriers are enabled, the type
3046 // checks are performed without read barriers. This is fine,
3047 // even in the case where a class object is in the from-space
3048 // after the flip, as a comparison involving such a type would
3049 // not produce a false positive; it may of course produce a
3050 // false negative, in which case we would take the ArraySet
3051 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01003052
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003053 Register temp2 = temps.AcquireSameSizeAs(array);
3054 // /* HeapReference<Class> */ temp = array->klass_
Artem Serov914d7a82017-02-07 14:33:49 +00003055 {
3056 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
3057 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3058 __ Ldr(temp, HeapOperand(array, class_offset));
3059 codegen_->MaybeRecordImplicitNullCheck(instruction);
3060 }
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003061 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01003062
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003063 // /* HeapReference<Class> */ temp = temp->component_type_
3064 __ Ldr(temp, HeapOperand(temp, component_offset));
3065 // /* HeapReference<Class> */ temp2 = value->klass_
3066 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
3067 // If heap poisoning is enabled, no need to unpoison `temp`
3068 // nor `temp2`, as we are comparing two poisoned references.
3069 __ Cmp(temp, temp2);
3070 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01003071
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003072 if (instruction->StaticTypeOfArrayIsObjectArray()) {
3073 vixl::aarch64::Label do_put;
3074 __ B(eq, &do_put);
3075 // If heap poisoning is enabled, the `temp` reference has
3076 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003077 GetAssembler()->MaybeUnpoisonHeapReference(temp);
3078
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003079 // /* HeapReference<Class> */ temp = temp->super_class_
3080 __ Ldr(temp, HeapOperand(temp, super_offset));
3081 // If heap poisoning is enabled, no need to unpoison
3082 // `temp`, as we are comparing against null below.
3083 __ Cbnz(temp, slow_path->GetEntryLabel());
3084 __ Bind(&do_put);
3085 } else {
3086 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003087 }
3088 }
3089
3090 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01003091 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003092 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01003093 __ Mov(temp2, value.W());
3094 GetAssembler()->PoisonHeapReference(temp2);
3095 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003096 }
3097
3098 if (!index.IsConstant()) {
3099 __ Add(temp, array, offset);
Vladimir Markod1ef8732017-04-18 13:55:13 +01003100 } else {
3101 // We no longer need the `temp` here so release it as the store below may
3102 // need a scratch register (if the constant index makes the offset too large)
3103 // and the poisoned `source` could be using the other scratch register.
3104 temps.Release(temp);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003105 }
Artem Serov914d7a82017-02-07 14:33:49 +00003106 {
3107 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
3108 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3109 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003110
Artem Serov914d7a82017-02-07 14:33:49 +00003111 if (!may_need_runtime_call_for_type_check) {
3112 codegen_->MaybeRecordImplicitNullCheck(instruction);
3113 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003114 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003115 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003116
3117 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
3118
3119 if (done.IsLinked()) {
3120 __ Bind(&done);
3121 }
3122
3123 if (slow_path != nullptr) {
3124 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01003125 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003126 }
3127}
3128
Alexandre Rames67555f72014-11-18 10:55:16 +00003129void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003130 RegisterSet caller_saves = RegisterSet::Empty();
3131 InvokeRuntimeCallingConvention calling_convention;
3132 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
3133 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
3134 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00003135 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00003136 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00003137}
3138
3139void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01003140 BoundsCheckSlowPathARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003141 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003142 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00003143 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
3144 __ B(slow_path->GetEntryLabel(), hs);
3145}
3146
Alexandre Rames67555f72014-11-18 10:55:16 +00003147void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
3148 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003149 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexandre Rames67555f72014-11-18 10:55:16 +00003150 locations->SetInAt(0, Location::RequiresRegister());
3151 if (check->HasUses()) {
3152 locations->SetOut(Location::SameAsFirstInput());
3153 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01003154 // Rely on the type initialization to save everything we need.
3155 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Alexandre Rames67555f72014-11-18 10:55:16 +00003156}
3157
3158void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
3159 // We assume the class is not null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01003160 SlowPathCodeARM64* slow_path =
3161 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(check->GetLoadClass(), check);
Alexandre Rames67555f72014-11-18 10:55:16 +00003162 codegen_->AddSlowPath(slow_path);
3163 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
3164}
3165
Roland Levillain1a653882016-03-18 18:05:57 +00003166static bool IsFloatingPointZeroConstant(HInstruction* inst) {
3167 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
3168 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
3169}
3170
3171void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
3172 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
3173 Location rhs_loc = instruction->GetLocations()->InAt(1);
3174 if (rhs_loc.IsConstant()) {
3175 // 0.0 is the only immediate that can be encoded directly in
3176 // an FCMP instruction.
3177 //
3178 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
3179 // specify that in a floating-point comparison, positive zero
3180 // and negative zero are considered equal, so we can use the
3181 // literal 0.0 for both cases here.
3182 //
3183 // Note however that some methods (Float.equal, Float.compare,
3184 // Float.compareTo, Double.equal, Double.compare,
3185 // Double.compareTo, Math.max, Math.min, StrictMath.max,
3186 // StrictMath.min) consider 0.0 to be (strictly) greater than
3187 // -0.0. So if we ever translate calls to these methods into a
3188 // HCompare instruction, we must handle the -0.0 case with
3189 // care here.
3190 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
3191 __ Fcmp(lhs_reg, 0.0);
3192 } else {
3193 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
3194 }
Roland Levillain7f63c522015-07-13 15:54:55 +00003195}
3196
Serban Constantinescu02164b32014-11-13 14:05:07 +00003197void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003198 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003199 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003200 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01003201 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003202 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003203 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003204 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003205 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003206 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003207 case DataType::Type::kInt32:
3208 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003209 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00003210 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00003211 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3212 break;
3213 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003214 case DataType::Type::kFloat32:
3215 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003216 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00003217 locations->SetInAt(1,
3218 IsFloatingPointZeroConstant(compare->InputAt(1))
3219 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
3220 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00003221 locations->SetOut(Location::RequiresRegister());
3222 break;
3223 }
3224 default:
3225 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3226 }
3227}
3228
3229void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003230 DataType::Type in_type = compare->InputAt(0)->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00003231
3232 // 0 if: left == right
3233 // 1 if: left > right
3234 // -1 if: left < right
3235 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003236 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003237 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003238 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003239 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003240 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003241 case DataType::Type::kInt32:
3242 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003243 Register result = OutputRegister(compare);
3244 Register left = InputRegisterAt(compare, 0);
3245 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003246 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08003247 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
3248 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00003249 break;
3250 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003251 case DataType::Type::kFloat32:
3252 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003253 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00003254 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003255 __ Cset(result, ne);
3256 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01003257 break;
3258 }
3259 default:
3260 LOG(FATAL) << "Unimplemented compare type " << in_type;
3261 }
3262}
3263
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003264void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003265 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00003266
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003267 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003268 locations->SetInAt(0, Location::RequiresFpuRegister());
3269 locations->SetInAt(1,
3270 IsFloatingPointZeroConstant(instruction->InputAt(1))
3271 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
3272 : Location::RequiresFpuRegister());
3273 } else {
3274 // Integer cases.
3275 locations->SetInAt(0, Location::RequiresRegister());
3276 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
3277 }
3278
David Brazdilb3e773e2016-01-26 11:28:37 +00003279 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00003280 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01003281 }
3282}
3283
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003284void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003285 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003286 return;
3287 }
3288
3289 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01003290 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00003291 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01003292
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003293 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00003294 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003295 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00003296 } else {
3297 // Integer cases.
3298 Register lhs = InputRegisterAt(instruction, 0);
3299 Operand rhs = InputOperandAt(instruction, 1);
3300 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003301 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00003302 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003303}
3304
3305#define FOR_EACH_CONDITION_INSTRUCTION(M) \
3306 M(Equal) \
3307 M(NotEqual) \
3308 M(LessThan) \
3309 M(LessThanOrEqual) \
3310 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07003311 M(GreaterThanOrEqual) \
3312 M(Below) \
3313 M(BelowOrEqual) \
3314 M(Above) \
3315 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01003316#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003317void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
3318void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01003319FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00003320#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01003321#undef FOR_EACH_CONDITION_INSTRUCTION
3322
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003323void InstructionCodeGeneratorARM64::GenerateIntDivForPower2Denom(HDiv* instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003324 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003325 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003326 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
3327
3328 Register out = OutputRegister(instruction);
3329 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01003330
3331 if (abs_imm == 2) {
3332 int bits = DataType::Size(instruction->GetResultType()) * kBitsPerByte;
3333 __ Add(out, dividend, Operand(dividend, LSR, bits - 1));
3334 } else {
3335 UseScratchRegisterScope temps(GetVIXLAssembler());
3336 Register temp = temps.AcquireSameSizeAs(out);
3337 __ Add(temp, dividend, abs_imm - 1);
3338 __ Cmp(dividend, 0);
3339 __ Csel(out, temp, dividend, lt);
3340 }
3341
Zheng Xuc6667102015-05-15 16:08:45 +08003342 int ctz_imm = CTZ(abs_imm);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003343 if (imm > 0) {
3344 __ Asr(out, out, ctz_imm);
Zheng Xuc6667102015-05-15 16:08:45 +08003345 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003346 __ Neg(out, Operand(out, ASR, ctz_imm));
Zheng Xuc6667102015-05-15 16:08:45 +08003347 }
3348}
3349
3350void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3351 DCHECK(instruction->IsDiv() || instruction->IsRem());
3352
3353 LocationSummary* locations = instruction->GetLocations();
3354 Location second = locations->InAt(1);
3355 DCHECK(second.IsConstant());
3356
3357 Register out = OutputRegister(instruction);
3358 Register dividend = InputRegisterAt(instruction, 0);
3359 int64_t imm = Int64FromConstant(second.GetConstant());
3360
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003361 DataType::Type type = instruction->GetResultType();
3362 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Zheng Xuc6667102015-05-15 16:08:45 +08003363
3364 int64_t magic;
3365 int shift;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003366 CalculateMagicAndShiftForDivRem(
3367 imm, type == DataType::Type::kInt64 /* is_long */, &magic, &shift);
Zheng Xuc6667102015-05-15 16:08:45 +08003368
3369 UseScratchRegisterScope temps(GetVIXLAssembler());
3370 Register temp = temps.AcquireSameSizeAs(out);
3371
3372 // temp = get_high(dividend * magic)
3373 __ Mov(temp, magic);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003374 if (type == DataType::Type::kInt64) {
Zheng Xuc6667102015-05-15 16:08:45 +08003375 __ Smulh(temp, dividend, temp);
3376 } else {
3377 __ Smull(temp.X(), dividend, temp);
3378 __ Lsr(temp.X(), temp.X(), 32);
3379 }
3380
3381 if (imm > 0 && magic < 0) {
3382 __ Add(temp, temp, dividend);
3383 } else if (imm < 0 && magic > 0) {
3384 __ Sub(temp, temp, dividend);
3385 }
3386
3387 if (shift != 0) {
3388 __ Asr(temp, temp, shift);
3389 }
3390
3391 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003392 __ Sub(out, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08003393 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003394 __ Sub(temp, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08003395 // TODO: Strength reduction for msub.
3396 Register temp_imm = temps.AcquireSameSizeAs(out);
3397 __ Mov(temp_imm, imm);
3398 __ Msub(out, temp, temp_imm, dividend);
3399 }
3400}
3401
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003402void InstructionCodeGeneratorARM64::GenerateIntDivForConstDenom(HDiv *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003403 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Zheng Xuc6667102015-05-15 16:08:45 +08003404
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003405 if (imm == 0) {
3406 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3407 return;
3408 }
Zheng Xuc6667102015-05-15 16:08:45 +08003409
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003410 if (IsPowerOfTwo(AbsOrMin(imm))) {
3411 GenerateIntDivForPower2Denom(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003412 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003413 // Cases imm == -1 or imm == 1 are handled by InstructionSimplifier.
3414 DCHECK(imm < -2 || imm > 2) << imm;
3415 GenerateDivRemWithAnyConstant(instruction);
3416 }
3417}
3418
3419void InstructionCodeGeneratorARM64::GenerateIntDiv(HDiv *instruction) {
3420 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
3421 << instruction->GetResultType();
3422
3423 if (instruction->GetLocations()->InAt(1).IsConstant()) {
3424 GenerateIntDivForConstDenom(instruction);
3425 } else {
3426 Register out = OutputRegister(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003427 Register dividend = InputRegisterAt(instruction, 0);
3428 Register divisor = InputRegisterAt(instruction, 1);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003429 __ Sdiv(out, dividend, divisor);
Zheng Xuc6667102015-05-15 16:08:45 +08003430 }
3431}
3432
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003433void LocationsBuilderARM64::VisitDiv(HDiv* div) {
3434 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003435 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003436 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003437 case DataType::Type::kInt32:
3438 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003439 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08003440 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003441 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3442 break;
3443
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003444 case DataType::Type::kFloat32:
3445 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003446 locations->SetInAt(0, Location::RequiresFpuRegister());
3447 locations->SetInAt(1, Location::RequiresFpuRegister());
3448 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3449 break;
3450
3451 default:
3452 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3453 }
3454}
3455
3456void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003457 DataType::Type type = div->GetResultType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003458 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003459 case DataType::Type::kInt32:
3460 case DataType::Type::kInt64:
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003461 GenerateIntDiv(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003462 break;
3463
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003464 case DataType::Type::kFloat32:
3465 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003466 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3467 break;
3468
3469 default:
3470 LOG(FATAL) << "Unexpected div type " << type;
3471 }
3472}
3473
Alexandre Rames67555f72014-11-18 10:55:16 +00003474void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003475 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003476 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003477}
3478
3479void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3480 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003481 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003482 codegen_->AddSlowPath(slow_path);
3483 Location value = instruction->GetLocations()->InAt(0);
3484
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003485 DataType::Type type = instruction->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +00003486
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003487 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003488 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00003489 return;
3490 }
3491
Alexandre Rames67555f72014-11-18 10:55:16 +00003492 if (value.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003493 int64_t divisor = Int64FromLocation(value);
Alexandre Rames67555f72014-11-18 10:55:16 +00003494 if (divisor == 0) {
3495 __ B(slow_path->GetEntryLabel());
3496 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003497 // A division by a non-null constant is valid. We don't need to perform
3498 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003499 }
3500 } else {
3501 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3502 }
3503}
3504
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003505void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3506 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003507 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003508 locations->SetOut(Location::ConstantLocation(constant));
3509}
3510
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003511void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3512 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003513 // Will be generated at use site.
3514}
3515
Alexandre Rames5319def2014-10-23 10:03:10 +01003516void LocationsBuilderARM64::VisitExit(HExit* exit) {
3517 exit->SetLocations(nullptr);
3518}
3519
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003520void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003521}
3522
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003523void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3524 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003525 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003526 locations->SetOut(Location::ConstantLocation(constant));
3527}
3528
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003529void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003530 // Will be generated at use site.
3531}
3532
David Brazdilfc6a86a2015-06-26 10:33:45 +00003533void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003534 if (successor->IsExitBlock()) {
3535 DCHECK(got->GetPrevious()->AlwaysThrows());
3536 return; // no code needed
3537 }
3538
Serban Constantinescu02164b32014-11-13 14:05:07 +00003539 HBasicBlock* block = got->GetBlock();
3540 HInstruction* previous = got->GetPrevious();
3541 HLoopInformation* info = block->GetLoopInformation();
3542
David Brazdil46e2a392015-03-16 17:31:52 +00003543 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00003544 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
3545 UseScratchRegisterScope temps(GetVIXLAssembler());
3546 Register temp1 = temps.AcquireX();
3547 Register temp2 = temps.AcquireX();
3548 __ Ldr(temp1, MemOperand(sp, 0));
3549 __ Ldrh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3550 __ Add(temp2, temp2, 1);
3551 __ Strh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3552 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003553 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3554 return;
3555 }
3556 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3557 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01003558 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003559 }
3560 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003561 __ B(codegen_->GetLabelOf(successor));
3562 }
3563}
3564
David Brazdilfc6a86a2015-06-26 10:33:45 +00003565void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3566 got->SetLocations(nullptr);
3567}
3568
3569void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3570 HandleGoto(got, got->GetSuccessor());
3571}
3572
3573void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3574 try_boundary->SetLocations(nullptr);
3575}
3576
3577void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3578 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3579 if (!successor->IsExitBlock()) {
3580 HandleGoto(try_boundary, successor);
3581 }
3582}
3583
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003584void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003585 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003586 vixl::aarch64::Label* true_target,
3587 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003588 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003589
David Brazdil0debae72015-11-12 18:37:00 +00003590 if (true_target == nullptr && false_target == nullptr) {
3591 // Nothing to do. The code always falls through.
3592 return;
3593 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003594 // Constant condition, statically compared against "true" (integer value 1).
3595 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003596 if (true_target != nullptr) {
3597 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003598 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003599 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003600 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003601 if (false_target != nullptr) {
3602 __ B(false_target);
3603 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003604 }
David Brazdil0debae72015-11-12 18:37:00 +00003605 return;
3606 }
3607
3608 // The following code generates these patterns:
3609 // (1) true_target == nullptr && false_target != nullptr
3610 // - opposite condition true => branch to false_target
3611 // (2) true_target != nullptr && false_target == nullptr
3612 // - condition true => branch to true_target
3613 // (3) true_target != nullptr && false_target != nullptr
3614 // - condition true => branch to true_target
3615 // - branch to false_target
3616 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003617 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003618 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003619 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003620 if (true_target == nullptr) {
3621 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3622 } else {
3623 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3624 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003625 } else {
3626 // The condition instruction has not been materialized, use its inputs as
3627 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003628 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003629
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003630 DataType::Type type = condition->InputAt(0)->GetType();
3631 if (DataType::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003632 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003633 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003634 IfCondition opposite_condition = condition->GetOppositeCondition();
3635 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003636 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003637 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003638 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003639 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003640 // Integer cases.
3641 Register lhs = InputRegisterAt(condition, 0);
3642 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003643
3644 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003645 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003646 if (true_target == nullptr) {
3647 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3648 non_fallthrough_target = false_target;
3649 } else {
3650 arm64_cond = ARM64Condition(condition->GetCondition());
3651 non_fallthrough_target = true_target;
3652 }
3653
Aart Bik086d27e2016-01-20 17:02:00 -08003654 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003655 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003656 switch (arm64_cond) {
3657 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003658 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003659 break;
3660 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003661 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003662 break;
3663 case lt:
3664 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003665 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003666 break;
3667 case ge:
3668 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003669 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003670 break;
3671 default:
3672 // Without the `static_cast` the compiler throws an error for
3673 // `-Werror=sign-promo`.
3674 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3675 }
3676 } else {
3677 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003678 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003679 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003680 }
3681 }
David Brazdil0debae72015-11-12 18:37:00 +00003682
3683 // If neither branch falls through (case 3), the conditional branch to `true_target`
3684 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3685 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003686 __ B(false_target);
3687 }
3688}
3689
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003690void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003691 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003692 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003693 locations->SetInAt(0, Location::RequiresRegister());
3694 }
3695}
3696
3697void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003698 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3699 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003700 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3701 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3702 true_target = nullptr;
3703 }
3704 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3705 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3706 false_target = nullptr;
3707 }
David Brazdil0debae72015-11-12 18:37:00 +00003708 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003709}
3710
3711void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003712 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003713 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01003714 InvokeRuntimeCallingConvention calling_convention;
3715 RegisterSet caller_saves = RegisterSet::Empty();
3716 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
3717 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00003718 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003719 locations->SetInAt(0, Location::RequiresRegister());
3720 }
3721}
3722
3723void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003724 SlowPathCodeARM64* slow_path =
3725 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003726 GenerateTestAndBranch(deoptimize,
3727 /* condition_input_index */ 0,
3728 slow_path->GetEntryLabel(),
3729 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003730}
3731
Mingyao Yang063fc772016-08-02 11:02:54 -07003732void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003733 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07003734 LocationSummary(flag, LocationSummary::kNoCall);
3735 locations->SetOut(Location::RequiresRegister());
3736}
3737
3738void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3739 __ Ldr(OutputRegister(flag),
3740 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3741}
3742
David Brazdilc0b601b2016-02-08 14:20:45 +00003743static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3744 return condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003745 DataType::IsFloatingPointType(condition->InputAt(0)->GetType());
David Brazdilc0b601b2016-02-08 14:20:45 +00003746}
3747
Alexandre Rames880f1192016-06-13 16:04:50 +01003748static inline Condition GetConditionForSelect(HCondition* condition) {
3749 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003750 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3751 : ARM64Condition(cond);
3752}
3753
David Brazdil74eb1b22015-12-14 11:44:01 +00003754void LocationsBuilderARM64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003755 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003756 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003757 locations->SetInAt(0, Location::RequiresFpuRegister());
3758 locations->SetInAt(1, Location::RequiresFpuRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08003759 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames880f1192016-06-13 16:04:50 +01003760 } else {
3761 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3762 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3763 bool is_true_value_constant = cst_true_value != nullptr;
3764 bool is_false_value_constant = cst_false_value != nullptr;
3765 // Ask VIXL whether we should synthesize constants in registers.
3766 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3767 Operand true_op = is_true_value_constant ?
3768 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3769 Operand false_op = is_false_value_constant ?
3770 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3771 bool true_value_in_register = false;
3772 bool false_value_in_register = false;
3773 MacroAssembler::GetCselSynthesisInformation(
3774 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3775 true_value_in_register |= !is_true_value_constant;
3776 false_value_in_register |= !is_false_value_constant;
3777
3778 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3779 : Location::ConstantLocation(cst_true_value));
3780 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3781 : Location::ConstantLocation(cst_false_value));
Donghui Bai426b49c2016-11-08 14:55:38 +08003782 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
David Brazdil74eb1b22015-12-14 11:44:01 +00003783 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003784
David Brazdil74eb1b22015-12-14 11:44:01 +00003785 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3786 locations->SetInAt(2, Location::RequiresRegister());
3787 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003788}
3789
3790void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003791 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003792 Condition csel_cond;
3793
3794 if (IsBooleanValueOrMaterializedCondition(cond)) {
3795 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003796 // Use the condition flags set by the previous instruction.
3797 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003798 } else {
3799 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003800 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003801 }
3802 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003803 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003804 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003805 } else {
3806 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003807 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003808 }
3809
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003810 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003811 __ Fcsel(OutputFPRegister(select),
3812 InputFPRegisterAt(select, 1),
3813 InputFPRegisterAt(select, 0),
3814 csel_cond);
3815 } else {
3816 __ Csel(OutputRegister(select),
3817 InputOperandAt(select, 1),
3818 InputOperandAt(select, 0),
3819 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003820 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003821}
3822
David Srbecky0cf44932015-12-09 14:09:59 +00003823void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003824 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00003825}
3826
David Srbeckyd28f4a02016-03-14 17:14:24 +00003827void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3828 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003829}
3830
3831void CodeGeneratorARM64::GenerateNop() {
3832 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003833}
3834
Alexandre Rames5319def2014-10-23 10:03:10 +01003835void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00003836 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003837}
3838
3839void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003840 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003841}
3842
3843void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003844 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003845}
3846
3847void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003848 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003849}
3850
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003851// Temp is used for read barrier.
3852static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3853 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003854 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003855 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3856 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3857 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3858 return 1;
3859 }
3860 return 0;
3861}
3862
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003863// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003864// interface pointer, one for loading the current interface.
3865// The other checks have one temp for loading the object's class.
3866static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3867 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3868 return 3;
3869 }
3870 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003871}
3872
Alexandre Rames67555f72014-11-18 10:55:16 +00003873void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003874 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003875 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003876 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003877 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003878 case TypeCheckKind::kExactCheck:
3879 case TypeCheckKind::kAbstractClassCheck:
3880 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00003881 case TypeCheckKind::kArrayObjectCheck: {
3882 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
3883 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
3884 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003885 break;
Vladimir Marko87584542017-12-12 17:47:52 +00003886 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003887 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003888 case TypeCheckKind::kUnresolvedCheck:
3889 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003890 call_kind = LocationSummary::kCallOnSlowPath;
3891 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00003892 case TypeCheckKind::kBitstringCheck:
3893 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003894 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003895
Vladimir Markoca6fff82017-10-03 14:49:14 +01003896 LocationSummary* locations =
3897 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003898 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003899 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003900 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003901 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00003902 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
3903 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
3904 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
3905 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
3906 } else {
3907 locations->SetInAt(1, Location::RequiresRegister());
3908 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003909 // The "out" register is used as a temporary, so it overlaps with the inputs.
3910 // Note that TypeCheckSlowPathARM64 uses this register too.
3911 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003912 // Add temps if necessary for read barriers.
3913 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003914}
3915
3916void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003917 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003918 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003919 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003920 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00003921 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
3922 ? Register()
3923 : InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003924 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003925 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003926 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3927 DCHECK_LE(num_temps, 1u);
3928 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003929 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3930 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3931 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3932 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003933
Scott Wakeling97c72b72016-06-24 16:19:36 +01003934 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003935 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003936
3937 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003938 // Avoid null check if we know `obj` is not null.
3939 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003940 __ Cbz(obj, &zero);
3941 }
3942
Roland Levillain44015862016-01-22 11:47:17 +00003943 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003944 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003945 ReadBarrierOption read_barrier_option =
3946 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003947 // /* HeapReference<Class> */ out = obj->klass_
3948 GenerateReferenceLoadTwoRegisters(instruction,
3949 out_loc,
3950 obj_loc,
3951 class_offset,
3952 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003953 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003954 __ Cmp(out, cls);
3955 __ Cset(out, eq);
3956 if (zero.IsLinked()) {
3957 __ B(&done);
3958 }
3959 break;
3960 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003961
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003962 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003963 ReadBarrierOption read_barrier_option =
3964 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003965 // /* HeapReference<Class> */ out = obj->klass_
3966 GenerateReferenceLoadTwoRegisters(instruction,
3967 out_loc,
3968 obj_loc,
3969 class_offset,
3970 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003971 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003972 // If the class is abstract, we eagerly fetch the super class of the
3973 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003974 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003975 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003976 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003977 GenerateReferenceLoadOneRegister(instruction,
3978 out_loc,
3979 super_offset,
3980 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003981 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003982 // If `out` is null, we use it for the result, and jump to `done`.
3983 __ Cbz(out, &done);
3984 __ Cmp(out, cls);
3985 __ B(ne, &loop);
3986 __ Mov(out, 1);
3987 if (zero.IsLinked()) {
3988 __ B(&done);
3989 }
3990 break;
3991 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003992
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003993 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003994 ReadBarrierOption read_barrier_option =
3995 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003996 // /* HeapReference<Class> */ out = obj->klass_
3997 GenerateReferenceLoadTwoRegisters(instruction,
3998 out_loc,
3999 obj_loc,
4000 class_offset,
4001 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004002 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004003 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004004 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004005 __ Bind(&loop);
4006 __ Cmp(out, cls);
4007 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004008 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004009 GenerateReferenceLoadOneRegister(instruction,
4010 out_loc,
4011 super_offset,
4012 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004013 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004014 __ Cbnz(out, &loop);
4015 // If `out` is null, we use it for the result, and jump to `done`.
4016 __ B(&done);
4017 __ Bind(&success);
4018 __ Mov(out, 1);
4019 if (zero.IsLinked()) {
4020 __ B(&done);
4021 }
4022 break;
4023 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004024
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004025 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00004026 ReadBarrierOption read_barrier_option =
4027 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08004028 // /* HeapReference<Class> */ out = obj->klass_
4029 GenerateReferenceLoadTwoRegisters(instruction,
4030 out_loc,
4031 obj_loc,
4032 class_offset,
4033 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004034 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004035 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004036 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004037 __ Cmp(out, cls);
4038 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004039 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004040 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004041 GenerateReferenceLoadOneRegister(instruction,
4042 out_loc,
4043 component_offset,
4044 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004045 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004046 // If `out` is null, we use it for the result, and jump to `done`.
4047 __ Cbz(out, &done);
4048 __ Ldrh(out, HeapOperand(out, primitive_offset));
4049 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
4050 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004051 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004052 __ Mov(out, 1);
4053 __ B(&done);
4054 break;
4055 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004056
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004057 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08004058 // No read barrier since the slow path will retry upon failure.
4059 // /* HeapReference<Class> */ out = obj->klass_
4060 GenerateReferenceLoadTwoRegisters(instruction,
4061 out_loc,
4062 obj_loc,
4063 class_offset,
4064 maybe_temp_loc,
4065 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004066 __ Cmp(out, cls);
4067 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01004068 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4069 instruction, /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004070 codegen_->AddSlowPath(slow_path);
4071 __ B(ne, slow_path->GetEntryLabel());
4072 __ Mov(out, 1);
4073 if (zero.IsLinked()) {
4074 __ B(&done);
4075 }
4076 break;
4077 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004078
Calin Juravle98893e12015-10-02 21:05:03 +01004079 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004080 case TypeCheckKind::kInterfaceCheck: {
4081 // Note that we indeed only call on slow path, but we always go
4082 // into the slow path for the unresolved and interface check
4083 // cases.
4084 //
4085 // We cannot directly call the InstanceofNonTrivial runtime
4086 // entry point without resorting to a type checking slow path
4087 // here (i.e. by calling InvokeRuntime directly), as it would
4088 // require to assign fixed registers for the inputs of this
4089 // HInstanceOf instruction (following the runtime calling
4090 // convention), which might be cluttered by the potential first
4091 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00004092 //
4093 // TODO: Introduce a new runtime entry point taking the object
4094 // to test (instead of its class) as argument, and let it deal
4095 // with the read barrier issues. This will let us refactor this
4096 // case of the `switch` code as it was previously (with a direct
4097 // call to the runtime not using a type checking slow path).
4098 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004099 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01004100 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4101 instruction, /* is_fatal */ false);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004102 codegen_->AddSlowPath(slow_path);
4103 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004104 if (zero.IsLinked()) {
4105 __ B(&done);
4106 }
4107 break;
4108 }
Vladimir Marko175e7862018-03-27 09:03:13 +00004109
4110 case TypeCheckKind::kBitstringCheck: {
4111 // /* HeapReference<Class> */ temp = obj->klass_
4112 GenerateReferenceLoadTwoRegisters(instruction,
4113 out_loc,
4114 obj_loc,
4115 class_offset,
4116 maybe_temp_loc,
4117 kWithoutReadBarrier);
4118
4119 GenerateBitstringTypeCheckCompare(instruction, out);
4120 __ Cset(out, eq);
4121 if (zero.IsLinked()) {
4122 __ B(&done);
4123 }
4124 break;
4125 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004126 }
4127
4128 if (zero.IsLinked()) {
4129 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01004130 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004131 }
4132
4133 if (done.IsLinked()) {
4134 __ Bind(&done);
4135 }
4136
4137 if (slow_path != nullptr) {
4138 __ Bind(slow_path->GetExitLabel());
4139 }
4140}
4141
4142void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004143 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00004144 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004145 LocationSummary* locations =
4146 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004147 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00004148 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
4149 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
4150 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
4151 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
4152 } else {
4153 locations->SetInAt(1, Location::RequiresRegister());
4154 }
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004155 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
4156 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004157}
4158
4159void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00004160 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004161 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004162 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004163 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00004164 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
4165 ? Register()
4166 : InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004167 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
4168 DCHECK_GE(num_temps, 1u);
4169 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004170 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004171 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
4172 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004173 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004174 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4175 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4176 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
4177 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
4178 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
4179 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
4180 const uint32_t object_array_data_offset =
4181 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004182
Vladimir Marko87584542017-12-12 17:47:52 +00004183 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004184 SlowPathCodeARM64* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004185 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4186 instruction, is_type_check_slow_path_fatal);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004187 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004188
Scott Wakeling97c72b72016-06-24 16:19:36 +01004189 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004190 // Avoid null check if we know obj is not null.
4191 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01004192 __ Cbz(obj, &done);
4193 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004194
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004195 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004196 case TypeCheckKind::kExactCheck:
4197 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004198 // /* HeapReference<Class> */ temp = obj->klass_
4199 GenerateReferenceLoadTwoRegisters(instruction,
4200 temp_loc,
4201 obj_loc,
4202 class_offset,
4203 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004204 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004205
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004206 __ Cmp(temp, cls);
4207 // Jump to slow path for throwing the exception or doing a
4208 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004209 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004210 break;
4211 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004212
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004213 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004214 // /* HeapReference<Class> */ temp = obj->klass_
4215 GenerateReferenceLoadTwoRegisters(instruction,
4216 temp_loc,
4217 obj_loc,
4218 class_offset,
4219 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004220 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004221
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004222 // If the class is abstract, we eagerly fetch the super class of the
4223 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004224 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004225 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004226 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004227 GenerateReferenceLoadOneRegister(instruction,
4228 temp_loc,
4229 super_offset,
4230 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004231 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004232
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004233 // If the class reference currently in `temp` is null, jump to the slow path to throw the
4234 // exception.
4235 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
4236 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004237 __ Cmp(temp, cls);
4238 __ B(ne, &loop);
4239 break;
4240 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004241
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004242 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004243 // /* HeapReference<Class> */ temp = obj->klass_
4244 GenerateReferenceLoadTwoRegisters(instruction,
4245 temp_loc,
4246 obj_loc,
4247 class_offset,
4248 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004249 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004250
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004251 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004252 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004253 __ Bind(&loop);
4254 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004255 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004256
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004257 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004258 GenerateReferenceLoadOneRegister(instruction,
4259 temp_loc,
4260 super_offset,
4261 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004262 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004263
4264 // If the class reference currently in `temp` is not null, jump
4265 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004266 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004267 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004268 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004269 break;
4270 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004271
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004272 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004273 // /* HeapReference<Class> */ temp = obj->klass_
4274 GenerateReferenceLoadTwoRegisters(instruction,
4275 temp_loc,
4276 obj_loc,
4277 class_offset,
4278 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004279 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004280
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004281 // Do an exact check.
4282 __ Cmp(temp, cls);
4283 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004284
4285 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004286 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004287 GenerateReferenceLoadOneRegister(instruction,
4288 temp_loc,
4289 component_offset,
4290 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004291 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004292
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004293 // If the component type is null, jump to the slow path to throw the exception.
4294 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
4295 // Otherwise, the object is indeed an array. Further check that this component type is not a
4296 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004297 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
4298 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004299 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004300 break;
4301 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004302
Calin Juravle98893e12015-10-02 21:05:03 +01004303 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004304 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004305 //
4306 // We cannot directly call the CheckCast runtime entry point
4307 // without resorting to a type checking slow path here (i.e. by
4308 // calling InvokeRuntime directly), as it would require to
4309 // assign fixed registers for the inputs of this HInstanceOf
4310 // instruction (following the runtime calling convention), which
4311 // might be cluttered by the potential first read barrier
4312 // emission at the beginning of this method.
4313 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004314 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004315 case TypeCheckKind::kInterfaceCheck: {
4316 // /* HeapReference<Class> */ temp = obj->klass_
4317 GenerateReferenceLoadTwoRegisters(instruction,
4318 temp_loc,
4319 obj_loc,
4320 class_offset,
4321 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004322 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004323
4324 // /* HeapReference<Class> */ temp = temp->iftable_
4325 GenerateReferenceLoadTwoRegisters(instruction,
4326 temp_loc,
4327 temp_loc,
4328 iftable_offset,
4329 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004330 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08004331 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004332 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08004333 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004334 vixl::aarch64::Label start_loop;
4335 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004336 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004337 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
4338 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004339 // Go to next interface.
4340 __ Add(temp, temp, 2 * kHeapReferenceSize);
4341 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004342 // Compare the classes and continue the loop if they do not match.
4343 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
4344 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004345 break;
4346 }
Vladimir Marko175e7862018-03-27 09:03:13 +00004347
4348 case TypeCheckKind::kBitstringCheck: {
4349 // /* HeapReference<Class> */ temp = obj->klass_
4350 GenerateReferenceLoadTwoRegisters(instruction,
4351 temp_loc,
4352 obj_loc,
4353 class_offset,
4354 maybe_temp2_loc,
4355 kWithoutReadBarrier);
4356
4357 GenerateBitstringTypeCheckCompare(instruction, temp);
4358 __ B(ne, type_check_slow_path->GetEntryLabel());
4359 break;
4360 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004361 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00004362 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004363
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004364 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004365}
4366
Alexandre Rames5319def2014-10-23 10:03:10 +01004367void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004368 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01004369 locations->SetOut(Location::ConstantLocation(constant));
4370}
4371
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004372void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004373 // Will be generated at use site.
4374}
4375
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004376void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004377 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004378 locations->SetOut(Location::ConstantLocation(constant));
4379}
4380
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004381void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004382 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004383}
4384
Calin Juravle175dc732015-08-25 15:42:32 +01004385void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4386 // The trampoline uses the same calling convention as dex calling conventions,
4387 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
4388 // the method_idx.
4389 HandleInvoke(invoke);
4390}
4391
4392void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4393 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004394 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Calin Juravle175dc732015-08-25 15:42:32 +01004395}
4396
Alexandre Rames5319def2014-10-23 10:03:10 +01004397void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01004398 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01004399 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01004400}
4401
Alexandre Rames67555f72014-11-18 10:55:16 +00004402void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4403 HandleInvoke(invoke);
4404}
4405
4406void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4407 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004408 LocationSummary* locations = invoke->GetLocations();
4409 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004410 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00004411 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004412 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00004413
4414 // The register ip1 is required to be used for the hidden argument in
4415 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01004416 MacroAssembler* masm = GetVIXLAssembler();
4417 UseScratchRegisterScope scratch_scope(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00004418 scratch_scope.Exclude(ip1);
4419 __ Mov(ip1, invoke->GetDexMethodIndex());
4420
Artem Serov914d7a82017-02-07 14:33:49 +00004421 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
Alexandre Rames67555f72014-11-18 10:55:16 +00004422 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004423 __ Ldr(temp.W(), StackOperandFrom(receiver));
Artem Serov914d7a82017-02-07 14:33:49 +00004424 {
4425 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4426 // /* HeapReference<Class> */ temp = temp->klass_
4427 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
4428 codegen_->MaybeRecordImplicitNullCheck(invoke);
4429 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004430 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00004431 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004432 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07004433 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Artem Serov914d7a82017-02-07 14:33:49 +00004434 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00004435 }
Artem Serov914d7a82017-02-07 14:33:49 +00004436
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004437 // Instead of simply (possibly) unpoisoning `temp` here, we should
4438 // emit a read barrier for the previous class reference load.
4439 // However this is not required in practice, as this is an
4440 // intermediate/temporary reference and because the current
4441 // concurrent copying collector keeps the from-space memory
4442 // intact/accessible until the end of the marking phase (the
4443 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01004444 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004445 __ Ldr(temp,
4446 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
4447 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004448 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00004449 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004450 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00004451 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07004452 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004453
4454 {
4455 // Ensure the pc position is recorded immediately after the `blr` instruction.
4456 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4457
4458 // lr();
4459 __ blr(lr);
4460 DCHECK(!codegen_->IsLeafMethod());
4461 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4462 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004463
4464 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004465}
4466
4467void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004468 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004469 if (intrinsic.TryDispatch(invoke)) {
4470 return;
4471 }
4472
Alexandre Rames67555f72014-11-18 10:55:16 +00004473 HandleInvoke(invoke);
4474}
4475
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00004476void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004477 // Explicit clinit checks triggered by static invokes must have been pruned by
4478 // art::PrepareForRegisterAllocation.
4479 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004480
Vladimir Markoca6fff82017-10-03 14:49:14 +01004481 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004482 if (intrinsic.TryDispatch(invoke)) {
4483 return;
4484 }
4485
Alexandre Rames67555f72014-11-18 10:55:16 +00004486 HandleInvoke(invoke);
4487}
4488
Andreas Gampe878d58c2015-01-15 23:24:00 -08004489static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
4490 if (invoke->GetLocations()->Intrinsified()) {
4491 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
4492 intrinsic.Dispatch(invoke);
4493 return true;
4494 }
4495 return false;
4496}
4497
Vladimir Markodc151b22015-10-15 18:02:30 +01004498HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
4499 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004500 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00004501 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01004502 return desired_dispatch_info;
4503}
4504
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004505void CodeGeneratorARM64::GenerateStaticOrDirectCall(
4506 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004507 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004508 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4509 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004510 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4511 uint32_t offset =
4512 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004513 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004514 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004515 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004516 }
Vladimir Marko58155012015-08-19 12:49:41 +00004517 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004518 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004519 break;
Vladimir Marko65979462017-05-19 17:25:12 +01004520 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
4521 DCHECK(GetCompilerOptions().IsBootImage());
4522 // Add ADRP with its PC-relative method patch.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004523 vixl::aarch64::Label* adrp_label = NewBootImageMethodPatch(invoke->GetTargetMethod());
Vladimir Marko65979462017-05-19 17:25:12 +01004524 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4525 // Add ADD with its PC-relative method patch.
4526 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004527 NewBootImageMethodPatch(invoke->GetTargetMethod(), adrp_label);
Vladimir Marko65979462017-05-19 17:25:12 +01004528 EmitAddPlaceholder(add_label, XRegisterFrom(temp), XRegisterFrom(temp));
4529 break;
4530 }
Vladimir Markob066d432018-01-03 13:14:37 +00004531 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
4532 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004533 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00004534 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_offset);
4535 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4536 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
4537 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_offset, adrp_label);
4538 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
4539 EmitLdrOffsetPlaceholder(ldr_label, WRegisterFrom(temp), XRegisterFrom(temp));
4540 break;
4541 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004542 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Markob066d432018-01-03 13:14:37 +00004543 // Add ADRP with its PC-relative .bss entry patch.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004544 MethodReference target_method(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
4545 vixl::aarch64::Label* adrp_label = NewMethodBssEntryPatch(target_method);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004546 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Markob066d432018-01-03 13:14:37 +00004547 // Add LDR with its PC-relative .bss entry patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004548 vixl::aarch64::Label* ldr_label =
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004549 NewMethodBssEntryPatch(target_method, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004550 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004551 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004552 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004553 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
4554 // Load method address from literal pool.
4555 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
4556 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004557 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
4558 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
4559 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko58155012015-08-19 12:49:41 +00004560 }
4561 }
4562
4563 switch (invoke->GetCodePtrLocation()) {
4564 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004565 {
4566 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
4567 ExactAssemblyScope eas(GetVIXLAssembler(),
4568 kInstructionSize,
4569 CodeBufferCheckScope::kExactSize);
4570 __ bl(&frame_entry_label_);
4571 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
4572 }
Vladimir Marko58155012015-08-19 12:49:41 +00004573 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004574 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4575 // LR = callee_method->entry_point_from_quick_compiled_code_;
4576 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004577 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004578 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004579 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004580 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004581 ExactAssemblyScope eas(GetVIXLAssembler(),
4582 kInstructionSize,
4583 CodeBufferCheckScope::kExactSize);
4584 // lr()
4585 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004586 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004587 }
Vladimir Marko58155012015-08-19 12:49:41 +00004588 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004589 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004590
Andreas Gampe878d58c2015-01-15 23:24:00 -08004591 DCHECK(!IsLeafMethod());
4592}
4593
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004594void CodeGeneratorARM64::GenerateVirtualCall(
4595 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004596 // Use the calling convention instead of the location of the receiver, as
4597 // intrinsics may have put the receiver in a different register. In the intrinsics
4598 // slow path, the arguments have been moved to the right place, so here we are
4599 // guaranteed that the receiver is the first register of the calling convention.
4600 InvokeDexCallingConvention calling_convention;
4601 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004602 Register temp = XRegisterFrom(temp_in);
4603 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4604 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4605 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004606 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004607
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004608 DCHECK(receiver.IsRegister());
Artem Serov914d7a82017-02-07 14:33:49 +00004609
4610 {
4611 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4612 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4613 // /* HeapReference<Class> */ temp = receiver->klass_
4614 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
4615 MaybeRecordImplicitNullCheck(invoke);
4616 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004617 // Instead of simply (possibly) unpoisoning `temp` here, we should
4618 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004619 // intermediate/temporary reference and because the current
4620 // concurrent copying collector keeps the from-space memory
4621 // intact/accessible until the end of the marking phase (the
4622 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004623 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
4624 // temp = temp->GetMethodAt(method_offset);
4625 __ Ldr(temp, MemOperand(temp, method_offset));
4626 // lr = temp->GetEntryPoint();
4627 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
Artem Serov914d7a82017-02-07 14:33:49 +00004628 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004629 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004630 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4631 // lr();
4632 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004633 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004634 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004635}
4636
Orion Hodsonac141392017-01-13 11:53:47 +00004637void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4638 HandleInvoke(invoke);
4639}
4640
4641void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4642 codegen_->GenerateInvokePolymorphicCall(invoke);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004643 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Orion Hodsonac141392017-01-13 11:53:47 +00004644}
4645
Orion Hodson4c8e12e2018-05-18 08:33:20 +01004646void LocationsBuilderARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4647 HandleInvoke(invoke);
4648}
4649
4650void InstructionCodeGeneratorARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4651 codegen_->GenerateInvokeCustomCall(invoke);
4652 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
4653}
4654
Vladimir Marko6fd16062018-06-26 11:02:04 +01004655vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageIntrinsicPatch(
4656 uint32_t intrinsic_data,
4657 vixl::aarch64::Label* adrp_label) {
4658 return NewPcRelativePatch(
4659 /* dex_file */ nullptr, intrinsic_data, adrp_label, &boot_image_intrinsic_patches_);
4660}
4661
Vladimir Markob066d432018-01-03 13:14:37 +00004662vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageRelRoPatch(
4663 uint32_t boot_image_offset,
4664 vixl::aarch64::Label* adrp_label) {
4665 return NewPcRelativePatch(
4666 /* dex_file */ nullptr, boot_image_offset, adrp_label, &boot_image_method_patches_);
4667}
4668
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004669vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageMethodPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004670 MethodReference target_method,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004671 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004672 return NewPcRelativePatch(
4673 target_method.dex_file, target_method.index, adrp_label, &boot_image_method_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004674}
4675
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004676vixl::aarch64::Label* CodeGeneratorARM64::NewMethodBssEntryPatch(
4677 MethodReference target_method,
4678 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004679 return NewPcRelativePatch(
4680 target_method.dex_file, target_method.index, adrp_label, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004681}
4682
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004683vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageTypePatch(
Scott Wakeling97c72b72016-06-24 16:19:36 +01004684 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004685 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004686 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004687 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &boot_image_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004688}
4689
Vladimir Marko1998cd02017-01-13 13:02:58 +00004690vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4691 const DexFile& dex_file,
4692 dex::TypeIndex type_index,
4693 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004694 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004695}
4696
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004697vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageStringPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004698 const DexFile& dex_file,
4699 dex::StringIndex string_index,
4700 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004701 return NewPcRelativePatch(
4702 &dex_file, string_index.index_, adrp_label, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01004703}
4704
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004705vixl::aarch64::Label* CodeGeneratorARM64::NewStringBssEntryPatch(
4706 const DexFile& dex_file,
4707 dex::StringIndex string_index,
4708 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004709 return NewPcRelativePatch(&dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004710}
4711
Vladimir Marko966b46f2018-08-03 10:20:19 +00004712void CodeGeneratorARM64::EmitBakerReadBarrierCbnz(uint32_t custom_data) {
4713 ExactAssemblyScope guard(GetVIXLAssembler(), 1 * vixl::aarch64::kInstructionSize);
4714 if (Runtime::Current()->UseJitCompilation()) {
4715 auto it = jit_baker_read_barrier_slow_paths_.FindOrAdd(custom_data);
4716 vixl::aarch64::Label* slow_path_entry = &it->second.label;
4717 __ cbnz(mr, slow_path_entry);
4718 } else {
4719 baker_read_barrier_patches_.emplace_back(custom_data);
4720 vixl::aarch64::Label* cbnz_label = &baker_read_barrier_patches_.back().label;
4721 __ bind(cbnz_label);
4722 __ cbnz(mr, static_cast<int64_t>(0)); // Placeholder, patched at link-time.
4723 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004724}
4725
Scott Wakeling97c72b72016-06-24 16:19:36 +01004726vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004727 const DexFile* dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004728 uint32_t offset_or_index,
4729 vixl::aarch64::Label* adrp_label,
4730 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004731 // Add a patch entry and return the label.
4732 patches->emplace_back(dex_file, offset_or_index);
4733 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004734 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004735 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4736 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4737 return label;
4738}
4739
Scott Wakeling97c72b72016-06-24 16:19:36 +01004740vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4741 uint64_t address) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004742 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004743}
4744
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004745vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004746 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004747 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004748 return jit_string_patches_.GetOrCreate(
4749 StringReference(&dex_file, string_index),
4750 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4751}
4752
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004753vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004754 const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004755 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004756 return jit_class_patches_.GetOrCreate(
4757 TypeReference(&dex_file, type_index),
4758 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4759}
4760
Vladimir Markoaad75c62016-10-03 08:46:48 +00004761void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4762 vixl::aarch64::Register reg) {
4763 DCHECK(reg.IsX());
4764 SingleEmissionCheckScope guard(GetVIXLAssembler());
4765 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004766 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004767}
4768
4769void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4770 vixl::aarch64::Register out,
4771 vixl::aarch64::Register base) {
4772 DCHECK(out.IsX());
4773 DCHECK(base.IsX());
4774 SingleEmissionCheckScope guard(GetVIXLAssembler());
4775 __ Bind(fixup_label);
4776 __ add(out, base, Operand(/* offset placeholder */ 0));
4777}
4778
4779void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4780 vixl::aarch64::Register out,
4781 vixl::aarch64::Register base) {
4782 DCHECK(base.IsX());
4783 SingleEmissionCheckScope guard(GetVIXLAssembler());
4784 __ Bind(fixup_label);
4785 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4786}
4787
Vladimir Markoeebb8212018-06-05 14:57:24 +01004788void CodeGeneratorARM64::LoadBootImageAddress(vixl::aarch64::Register reg,
Vladimir Marko6fd16062018-06-26 11:02:04 +01004789 uint32_t boot_image_reference) {
4790 if (GetCompilerOptions().IsBootImage()) {
4791 // Add ADRP with its PC-relative type patch.
4792 vixl::aarch64::Label* adrp_label = NewBootImageIntrinsicPatch(boot_image_reference);
4793 EmitAdrpPlaceholder(adrp_label, reg.X());
4794 // Add ADD with its PC-relative type patch.
4795 vixl::aarch64::Label* add_label = NewBootImageIntrinsicPatch(boot_image_reference, adrp_label);
4796 EmitAddPlaceholder(add_label, reg.X(), reg.X());
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004797 } else if (Runtime::Current()->IsAotCompiler()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01004798 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004799 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004800 EmitAdrpPlaceholder(adrp_label, reg.X());
4801 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004802 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_reference, adrp_label);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004803 EmitLdrOffsetPlaceholder(ldr_label, reg.W(), reg.X());
4804 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004805 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01004806 gc::Heap* heap = Runtime::Current()->GetHeap();
4807 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004808 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01004809 __ Ldr(reg.W(), DeduplicateBootImageAddressLiteral(reinterpret_cast<uintptr_t>(address)));
4810 }
4811}
4812
Vladimir Marko6fd16062018-06-26 11:02:04 +01004813void CodeGeneratorARM64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
4814 uint32_t boot_image_offset) {
4815 DCHECK(invoke->IsStatic());
4816 InvokeRuntimeCallingConvention calling_convention;
4817 Register argument = calling_convention.GetRegisterAt(0);
4818 if (GetCompilerOptions().IsBootImage()) {
4819 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
4820 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
4821 MethodReference target_method = invoke->GetTargetMethod();
4822 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
4823 // Add ADRP with its PC-relative type patch.
4824 vixl::aarch64::Label* adrp_label = NewBootImageTypePatch(*target_method.dex_file, type_idx);
4825 EmitAdrpPlaceholder(adrp_label, argument.X());
4826 // Add ADD with its PC-relative type patch.
4827 vixl::aarch64::Label* add_label =
4828 NewBootImageTypePatch(*target_method.dex_file, type_idx, adrp_label);
4829 EmitAddPlaceholder(add_label, argument.X(), argument.X());
4830 } else {
4831 LoadBootImageAddress(argument, boot_image_offset);
4832 }
4833 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
4834 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4835}
4836
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004837template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00004838inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4839 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004840 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00004841 for (const PcRelativePatchInfo& info : infos) {
4842 linker_patches->push_back(Factory(info.label.GetLocation(),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004843 info.target_dex_file,
Vladimir Markoaad75c62016-10-03 08:46:48 +00004844 info.pc_insn_label->GetLocation(),
4845 info.offset_or_index));
4846 }
4847}
4848
Vladimir Marko6fd16062018-06-26 11:02:04 +01004849template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
4850linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
4851 const DexFile* target_dex_file,
4852 uint32_t pc_insn_offset,
4853 uint32_t boot_image_offset) {
4854 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
4855 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00004856}
4857
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004858void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00004859 DCHECK(linker_patches->empty());
4860 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004861 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004862 method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004863 boot_image_type_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004864 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004865 boot_image_string_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004866 string_bss_entry_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01004867 boot_image_intrinsic_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004868 baker_read_barrier_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004869 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01004870 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004871 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004872 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004873 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004874 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004875 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004876 boot_image_string_patches_, linker_patches);
Vladimir Marko6fd16062018-06-26 11:02:04 +01004877 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
4878 boot_image_intrinsic_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01004879 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +01004880 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
Vladimir Markob066d432018-01-03 13:14:37 +00004881 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004882 DCHECK(boot_image_type_patches_.empty());
4883 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004884 DCHECK(boot_image_intrinsic_patches_.empty());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004885 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004886 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
4887 method_bss_entry_patches_, linker_patches);
4888 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
4889 type_bss_entry_patches_, linker_patches);
4890 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
4891 string_bss_entry_patches_, linker_patches);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004892 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004893 linker_patches->push_back(linker::LinkerPatch::BakerReadBarrierBranchPatch(
4894 info.label.GetLocation(), info.custom_data));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004895 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004896 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004897}
4898
Vladimir Markoca1e0382018-04-11 09:58:41 +00004899bool CodeGeneratorARM64::NeedsThunkCode(const linker::LinkerPatch& patch) const {
4900 return patch.GetType() == linker::LinkerPatch::Type::kBakerReadBarrierBranch ||
4901 patch.GetType() == linker::LinkerPatch::Type::kCallRelative;
4902}
4903
4904void CodeGeneratorARM64::EmitThunkCode(const linker::LinkerPatch& patch,
4905 /*out*/ ArenaVector<uint8_t>* code,
4906 /*out*/ std::string* debug_name) {
4907 Arm64Assembler assembler(GetGraph()->GetAllocator());
4908 switch (patch.GetType()) {
4909 case linker::LinkerPatch::Type::kCallRelative: {
4910 // The thunk just uses the entry point in the ArtMethod. This works even for calls
4911 // to the generic JNI and interpreter trampolines.
4912 Offset offset(ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4913 kArm64PointerSize).Int32Value());
4914 assembler.JumpTo(ManagedRegister(arm64::X0), offset, ManagedRegister(arm64::IP0));
4915 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
4916 *debug_name = "MethodCallThunk";
4917 }
4918 break;
4919 }
4920 case linker::LinkerPatch::Type::kBakerReadBarrierBranch: {
4921 DCHECK_EQ(patch.GetBakerCustomValue2(), 0u);
4922 CompileBakerReadBarrierThunk(assembler, patch.GetBakerCustomValue1(), debug_name);
4923 break;
4924 }
4925 default:
4926 LOG(FATAL) << "Unexpected patch type " << patch.GetType();
4927 UNREACHABLE();
4928 }
4929
4930 // Ensure we emit the literal pool if any.
4931 assembler.FinalizeCode();
4932 code->resize(assembler.CodeSize());
4933 MemoryRegion code_region(code->data(), code->size());
4934 assembler.FinalizeInstructions(code_region);
4935}
4936
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004937vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value) {
4938 return uint32_literals_.GetOrCreate(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004939 value,
4940 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4941}
4942
Scott Wakeling97c72b72016-06-24 16:19:36 +01004943vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004944 return uint64_literals_.GetOrCreate(
4945 value,
4946 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004947}
4948
Andreas Gampe878d58c2015-01-15 23:24:00 -08004949void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004950 // Explicit clinit checks triggered by static invokes must have been pruned by
4951 // art::PrepareForRegisterAllocation.
4952 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004953
Andreas Gampe878d58c2015-01-15 23:24:00 -08004954 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004955 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004956 return;
4957 }
4958
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004959 {
4960 // Ensure that between the BLR (emitted by GenerateStaticOrDirectCall) and RecordPcInfo there
4961 // are no pools emitted.
4962 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4963 LocationSummary* locations = invoke->GetLocations();
4964 codegen_->GenerateStaticOrDirectCall(
4965 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
4966 }
4967
4968 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004969}
4970
4971void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004972 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004973 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004974 return;
4975 }
4976
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004977 {
4978 // Ensure that between the BLR (emitted by GenerateVirtualCall) and RecordPcInfo there
4979 // are no pools emitted.
4980 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4981 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
4982 DCHECK(!codegen_->IsLeafMethod());
4983 }
4984
4985 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004986}
4987
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004988HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4989 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004990 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004991 case HLoadClass::LoadKind::kInvalid:
4992 LOG(FATAL) << "UNREACHABLE";
4993 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004994 case HLoadClass::LoadKind::kReferrersClass:
4995 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004996 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004997 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004998 case HLoadClass::LoadKind::kBssEntry:
4999 DCHECK(!Runtime::Current()->UseJitCompilation());
5000 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005001 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005002 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005003 DCHECK(Runtime::Current()->UseJitCompilation());
5004 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005005 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005006 break;
5007 }
5008 return desired_class_load_kind;
5009}
5010
Alexandre Rames67555f72014-11-18 10:55:16 +00005011void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005012 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005013 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005014 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00005015 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005016 cls,
5017 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00005018 LocationFrom(vixl::aarch64::x0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00005019 DCHECK(calling_convention.GetRegisterAt(0).Is(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005020 return;
5021 }
Vladimir Marko41559982017-01-06 14:04:23 +00005022 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005023
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005024 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5025 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005026 ? LocationSummary::kCallOnSlowPath
5027 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005028 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005029 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005030 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005031 }
5032
Vladimir Marko41559982017-01-06 14:04:23 +00005033 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005034 locations->SetInAt(0, Location::RequiresRegister());
5035 }
5036 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005037 if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
5038 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5039 // Rely on the type resolution or initialization and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01005040 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005041 } else {
5042 // For non-Baker read barrier we have a temp-clobbering call.
5043 }
5044 }
Alexandre Rames67555f72014-11-18 10:55:16 +00005045}
5046
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005047// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5048// move.
5049void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005050 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005051 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005052 codegen_->GenerateLoadClassRuntimeCall(cls);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005053 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Calin Juravle580b6092015-10-06 17:35:58 +01005054 return;
5055 }
Vladimir Marko41559982017-01-06 14:04:23 +00005056 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005057
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005058 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01005059 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00005060
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005061 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5062 ? kWithoutReadBarrier
5063 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005064 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00005065 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005066 case HLoadClass::LoadKind::kReferrersClass: {
5067 DCHECK(!cls->CanCallRuntime());
5068 DCHECK(!cls->MustGenerateClinitCheck());
5069 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5070 Register current_method = InputRegisterAt(cls, 0);
Vladimir Markoca1e0382018-04-11 09:58:41 +00005071 codegen_->GenerateGcRootFieldLoad(cls,
5072 out_loc,
5073 current_method,
5074 ArtMethod::DeclaringClassOffset().Int32Value(),
5075 /* fixup_label */ nullptr,
5076 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005077 break;
5078 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005079 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005080 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005081 // Add ADRP with its PC-relative type patch.
5082 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08005083 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005084 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005085 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005086 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005087 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005088 codegen_->NewBootImageTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005089 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005090 break;
5091 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005092 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005093 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005094 uint32_t boot_image_offset = codegen_->GetBootImageOffset(cls);
5095 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
5096 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005097 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005098 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005099 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005100 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005101 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005102 break;
5103 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005104 case HLoadClass::LoadKind::kBssEntry: {
5105 // Add ADRP with its PC-relative Class .bss entry patch.
5106 const DexFile& dex_file = cls->GetDexFile();
5107 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Markof3c52b42017-11-17 17:32:12 +00005108 vixl::aarch64::Register temp = XRegisterFrom(out_loc);
5109 vixl::aarch64::Label* adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
5110 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005111 // Add LDR with its PC-relative Class .bss entry patch.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005112 vixl::aarch64::Label* ldr_label =
Vladimir Markof3c52b42017-11-17 17:32:12 +00005113 codegen_->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005114 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoca1e0382018-04-11 09:58:41 +00005115 codegen_->GenerateGcRootFieldLoad(cls,
5116 out_loc,
5117 temp,
5118 /* offset placeholder */ 0u,
5119 ldr_label,
5120 read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005121 generate_null_check = true;
5122 break;
5123 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005124 case HLoadClass::LoadKind::kJitBootImageAddress: {
5125 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
5126 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
5127 DCHECK_NE(address, 0u);
5128 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
5129 break;
5130 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005131 case HLoadClass::LoadKind::kJitTableAddress: {
5132 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
5133 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005134 cls->GetClass()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00005135 codegen_->GenerateGcRootFieldLoad(cls,
5136 out_loc,
5137 out.X(),
5138 /* offset */ 0,
5139 /* fixup_label */ nullptr,
5140 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005141 break;
5142 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005143 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005144 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00005145 LOG(FATAL) << "UNREACHABLE";
5146 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005147 }
5148
Vladimir Markoea4c1262017-02-06 19:59:33 +00005149 bool do_clinit = cls->MustGenerateClinitCheck();
5150 if (generate_null_check || do_clinit) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005151 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01005152 SlowPathCodeARM64* slow_path =
5153 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005154 codegen_->AddSlowPath(slow_path);
5155 if (generate_null_check) {
5156 __ Cbz(out, slow_path->GetEntryLabel());
5157 }
5158 if (cls->MustGenerateClinitCheck()) {
5159 GenerateClassInitializationCheck(slow_path, out);
5160 } else {
5161 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00005162 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005163 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005164 }
5165}
5166
Orion Hodsondbaa5c72018-05-10 08:22:46 +01005167void LocationsBuilderARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
5168 InvokeRuntimeCallingConvention calling_convention;
5169 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
5170 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
5171}
5172
5173void InstructionCodeGeneratorARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
5174 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
5175}
5176
Orion Hodson18259d72018-04-12 11:18:23 +01005177void LocationsBuilderARM64::VisitLoadMethodType(HLoadMethodType* load) {
5178 InvokeRuntimeCallingConvention calling_convention;
5179 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
5180 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
5181}
5182
5183void InstructionCodeGeneratorARM64::VisitLoadMethodType(HLoadMethodType* load) {
5184 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
5185}
5186
David Brazdilcb1c0552015-08-04 16:22:25 +01005187static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005188 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01005189}
5190
Alexandre Rames67555f72014-11-18 10:55:16 +00005191void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
5192 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005193 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexandre Rames67555f72014-11-18 10:55:16 +00005194 locations->SetOut(Location::RequiresRegister());
5195}
5196
5197void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005198 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
5199}
5200
5201void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005202 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01005203}
5204
5205void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5206 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00005207}
5208
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005209HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
5210 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005211 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005212 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005213 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00005214 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005215 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005216 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005217 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005218 case HLoadString::LoadKind::kJitTableAddress:
5219 DCHECK(Runtime::Current()->UseJitCompilation());
5220 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005221 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005222 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005223 }
5224 return desired_string_load_kind;
5225}
5226
Alexandre Rames67555f72014-11-18 10:55:16 +00005227void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005228 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01005229 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005230 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005231 InvokeRuntimeCallingConvention calling_convention;
5232 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
5233 } else {
5234 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005235 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5236 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005237 // Rely on the pResolveString and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01005238 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005239 } else {
5240 // For non-Baker read barrier we have a temp-clobbering call.
5241 }
5242 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005243 }
Alexandre Rames67555f72014-11-18 10:55:16 +00005244}
5245
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005246// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5247// move.
5248void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00005249 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005250 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005251
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005252 switch (load->GetLoadKind()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005253 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005254 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005255 // Add ADRP with its PC-relative String patch.
5256 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005257 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005258 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005259 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005260 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005261 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005262 codegen_->NewBootImageStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005263 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005264 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005265 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005266 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005267 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005268 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
5269 uint32_t boot_image_offset = codegen_->GetBootImageOffset(load);
5270 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005271 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005272 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005273 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005274 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005275 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
5276 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005277 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005278 case HLoadString::LoadKind::kBssEntry: {
5279 // Add ADRP with its PC-relative String .bss entry patch.
5280 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005281 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00005282 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markof3c52b42017-11-17 17:32:12 +00005283 Register temp = XRegisterFrom(out_loc);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005284 vixl::aarch64::Label* adrp_label = codegen_->NewStringBssEntryPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005285 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005286 // Add LDR with its PC-relative String .bss entry patch.
Vladimir Markoaad75c62016-10-03 08:46:48 +00005287 vixl::aarch64::Label* ldr_label =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005288 codegen_->NewStringBssEntryPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005289 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoca1e0382018-04-11 09:58:41 +00005290 codegen_->GenerateGcRootFieldLoad(load,
5291 out_loc,
5292 temp,
5293 /* offset placeholder */ 0u,
5294 ldr_label,
5295 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005296 SlowPathCodeARM64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00005297 new (codegen_->GetScopedAllocator()) LoadStringSlowPathARM64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005298 codegen_->AddSlowPath(slow_path);
5299 __ Cbz(out.X(), slow_path->GetEntryLabel());
5300 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005301 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005302 return;
5303 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005304 case HLoadString::LoadKind::kJitBootImageAddress: {
5305 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
5306 DCHECK_NE(address, 0u);
5307 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
5308 return;
5309 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005310 case HLoadString::LoadKind::kJitTableAddress: {
5311 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005312 load->GetStringIndex(),
5313 load->GetString()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00005314 codegen_->GenerateGcRootFieldLoad(load,
5315 out_loc,
5316 out.X(),
5317 /* offset */ 0,
5318 /* fixup_label */ nullptr,
5319 kCompilerReadBarrierOption);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005320 return;
5321 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005322 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005323 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005324 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005325
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005326 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005327 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005328 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005329 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005330 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
5331 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005332 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005333}
5334
Alexandre Rames5319def2014-10-23 10:03:10 +01005335void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005336 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01005337 locations->SetOut(Location::ConstantLocation(constant));
5338}
5339
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005340void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005341 // Will be generated at use site.
5342}
5343
Alexandre Rames67555f72014-11-18 10:55:16 +00005344void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005345 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5346 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005347 InvokeRuntimeCallingConvention calling_convention;
5348 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5349}
5350
5351void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01005352 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005353 instruction,
5354 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005355 if (instruction->IsEnter()) {
5356 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5357 } else {
5358 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5359 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005360 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005361}
5362
Alexandre Rames42d641b2014-10-27 14:00:51 +00005363void LocationsBuilderARM64::VisitMul(HMul* mul) {
5364 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005365 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005366 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005367 case DataType::Type::kInt32:
5368 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005369 locations->SetInAt(0, Location::RequiresRegister());
5370 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005371 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005372 break;
5373
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005374 case DataType::Type::kFloat32:
5375 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005376 locations->SetInAt(0, Location::RequiresFpuRegister());
5377 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00005378 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005379 break;
5380
5381 default:
5382 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5383 }
5384}
5385
5386void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
5387 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005388 case DataType::Type::kInt32:
5389 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005390 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
5391 break;
5392
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005393 case DataType::Type::kFloat32:
5394 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005395 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00005396 break;
5397
5398 default:
5399 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5400 }
5401}
5402
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005403void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
5404 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005405 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005406 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005407 case DataType::Type::kInt32:
5408 case DataType::Type::kInt64:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00005409 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00005410 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005411 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005412
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005413 case DataType::Type::kFloat32:
5414 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005415 locations->SetInAt(0, Location::RequiresFpuRegister());
5416 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005417 break;
5418
5419 default:
5420 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5421 }
5422}
5423
5424void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
5425 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005426 case DataType::Type::kInt32:
5427 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005428 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
5429 break;
5430
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005431 case DataType::Type::kFloat32:
5432 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005433 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005434 break;
5435
5436 default:
5437 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5438 }
5439}
5440
5441void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005442 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5443 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005444 InvokeRuntimeCallingConvention calling_convention;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005445 locations->SetOut(LocationFrom(x0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005446 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5447 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005448}
5449
5450void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01005451 // Note: if heap poisoning is enabled, the entry point takes cares
5452 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00005453 QuickEntrypointEnum entrypoint =
5454 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
5455 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005456 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005457 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005458}
5459
Alexandre Rames5319def2014-10-23 10:03:10 +01005460void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005461 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5462 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01005463 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07005464 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005465 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexandre Rames5319def2014-10-23 10:03:10 +01005466}
5467
5468void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07005469 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
5470 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005471 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005472}
5473
5474void LocationsBuilderARM64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005475 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00005476 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005477 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01005478}
5479
5480void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00005481 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005482 case DataType::Type::kInt32:
5483 case DataType::Type::kInt64:
Roland Levillain55dcfb52014-10-24 18:09:09 +01005484 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01005485 break;
5486
5487 default:
5488 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5489 }
5490}
5491
David Brazdil66d126e2015-04-03 16:02:44 +01005492void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005493 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
David Brazdil66d126e2015-04-03 16:02:44 +01005494 locations->SetInAt(0, Location::RequiresRegister());
5495 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5496}
5497
5498void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005499 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01005500}
5501
Alexandre Rames5319def2014-10-23 10:03:10 +01005502void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005503 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5504 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01005505}
5506
Calin Juravle2ae48182016-03-16 14:05:09 +00005507void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5508 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005509 return;
5510 }
Artem Serov914d7a82017-02-07 14:33:49 +00005511 {
5512 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
5513 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5514 Location obj = instruction->GetLocations()->InAt(0);
5515 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
5516 RecordPcInfo(instruction, instruction->GetDexPc());
5517 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005518}
5519
Calin Juravle2ae48182016-03-16 14:05:09 +00005520void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005521 SlowPathCodeARM64* slow_path = new (GetScopedAllocator()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005522 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01005523
5524 LocationSummary* locations = instruction->GetLocations();
5525 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005526
5527 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01005528}
5529
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005530void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005531 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005532}
5533
Alexandre Rames67555f72014-11-18 10:55:16 +00005534void LocationsBuilderARM64::VisitOr(HOr* instruction) {
5535 HandleBinaryOp(instruction);
5536}
5537
5538void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
5539 HandleBinaryOp(instruction);
5540}
5541
Alexandre Rames3e69f162014-12-10 10:36:50 +00005542void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5543 LOG(FATAL) << "Unreachable";
5544}
5545
5546void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005547 if (instruction->GetNext()->IsSuspendCheck() &&
5548 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5549 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5550 // The back edge will generate the suspend check.
5551 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5552 }
5553
Alexandre Rames3e69f162014-12-10 10:36:50 +00005554 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5555}
5556
Alexandre Rames5319def2014-10-23 10:03:10 +01005557void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005558 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005559 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5560 if (location.IsStackSlot()) {
5561 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5562 } else if (location.IsDoubleStackSlot()) {
5563 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5564 }
5565 locations->SetOut(location);
5566}
5567
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005568void InstructionCodeGeneratorARM64::VisitParameterValue(
5569 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005570 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005571}
5572
5573void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
5574 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005575 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01005576 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005577}
5578
5579void InstructionCodeGeneratorARM64::VisitCurrentMethod(
5580 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5581 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01005582}
5583
5584void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005585 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005586 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005587 locations->SetInAt(i, Location::Any());
5588 }
5589 locations->SetOut(Location::Any());
5590}
5591
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005592void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005593 LOG(FATAL) << "Unreachable";
5594}
5595
Serban Constantinescu02164b32014-11-13 14:05:07 +00005596void LocationsBuilderARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005597 DataType::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00005598 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005599 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005600 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005601 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005602
5603 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005604 case DataType::Type::kInt32:
5605 case DataType::Type::kInt64:
Serban Constantinescu02164b32014-11-13 14:05:07 +00005606 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08005607 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00005608 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5609 break;
5610
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005611 case DataType::Type::kFloat32:
5612 case DataType::Type::kFloat64: {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005613 InvokeRuntimeCallingConvention calling_convention;
5614 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
5615 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
5616 locations->SetOut(calling_convention.GetReturnLocation(type));
5617
5618 break;
5619 }
5620
Serban Constantinescu02164b32014-11-13 14:05:07 +00005621 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005622 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00005623 }
5624}
5625
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005626void InstructionCodeGeneratorARM64::GenerateIntRemForPower2Denom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005627 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005628 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
5629 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
5630
5631 Register out = OutputRegister(instruction);
5632 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005633
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005634 if (abs_imm == 2) {
5635 __ Cmp(dividend, 0);
5636 __ And(out, dividend, 1);
5637 __ Csneg(out, out, out, ge);
5638 } else {
5639 UseScratchRegisterScope temps(GetVIXLAssembler());
5640 Register temp = temps.AcquireSameSizeAs(out);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005641
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005642 __ Negs(temp, dividend);
5643 __ And(out, dividend, abs_imm - 1);
5644 __ And(temp, temp, abs_imm - 1);
5645 __ Csneg(out, out, temp, mi);
5646 }
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005647}
5648
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005649void InstructionCodeGeneratorARM64::GenerateIntRemForConstDenom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005650 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005651
5652 if (imm == 0) {
5653 // Do not generate anything.
5654 // DivZeroCheck would prevent any code to be executed.
5655 return;
5656 }
5657
Evgeny Astigeevichf58dc652018-06-25 17:54:07 +01005658 if (IsPowerOfTwo(AbsOrMin(imm))) {
5659 // Cases imm == -1 or imm == 1 are handled in constant folding by
5660 // InstructionWithAbsorbingInputSimplifier.
5661 // If the cases have survided till code generation they are handled in
5662 // GenerateIntRemForPower2Denom becauses -1 and 1 are the power of 2 (2^0).
5663 // The correct code is generated for them, just more instructions.
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005664 GenerateIntRemForPower2Denom(instruction);
5665 } else {
5666 DCHECK(imm < -2 || imm > 2) << imm;
5667 GenerateDivRemWithAnyConstant(instruction);
5668 }
5669}
5670
5671void InstructionCodeGeneratorARM64::GenerateIntRem(HRem* instruction) {
5672 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
5673 << instruction->GetResultType();
5674
5675 if (instruction->GetLocations()->InAt(1).IsConstant()) {
5676 GenerateIntRemForConstDenom(instruction);
5677 } else {
5678 Register out = OutputRegister(instruction);
5679 Register dividend = InputRegisterAt(instruction, 0);
5680 Register divisor = InputRegisterAt(instruction, 1);
5681 UseScratchRegisterScope temps(GetVIXLAssembler());
5682 Register temp = temps.AcquireSameSizeAs(out);
5683 __ Sdiv(temp, dividend, divisor);
5684 __ Msub(out, temp, divisor, dividend);
5685 }
5686}
5687
Serban Constantinescu02164b32014-11-13 14:05:07 +00005688void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005689 DataType::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005690
Serban Constantinescu02164b32014-11-13 14:05:07 +00005691 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005692 case DataType::Type::kInt32:
5693 case DataType::Type::kInt64: {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005694 GenerateIntRem(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005695 break;
5696 }
5697
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005698 case DataType::Type::kFloat32:
5699 case DataType::Type::kFloat64: {
5700 QuickEntrypointEnum entrypoint =
5701 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005702 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005703 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00005704 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5705 } else {
5706 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5707 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005708 break;
5709 }
5710
Serban Constantinescu02164b32014-11-13 14:05:07 +00005711 default:
5712 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005713 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00005714 }
5715}
5716
Aart Bik1f8d51b2018-02-15 10:42:37 -08005717void LocationsBuilderARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005718 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005719}
5720
Aart Bik1f8d51b2018-02-15 10:42:37 -08005721void InstructionCodeGeneratorARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005722 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005723}
5724
5725void LocationsBuilderARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005726 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005727}
5728
5729void InstructionCodeGeneratorARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005730 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005731}
5732
Aart Bik3dad3412018-02-28 12:01:46 -08005733void LocationsBuilderARM64::VisitAbs(HAbs* abs) {
5734 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
5735 switch (abs->GetResultType()) {
5736 case DataType::Type::kInt32:
5737 case DataType::Type::kInt64:
5738 locations->SetInAt(0, Location::RequiresRegister());
5739 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5740 break;
5741 case DataType::Type::kFloat32:
5742 case DataType::Type::kFloat64:
5743 locations->SetInAt(0, Location::RequiresFpuRegister());
5744 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5745 break;
5746 default:
5747 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5748 }
5749}
5750
5751void InstructionCodeGeneratorARM64::VisitAbs(HAbs* abs) {
5752 switch (abs->GetResultType()) {
5753 case DataType::Type::kInt32:
5754 case DataType::Type::kInt64: {
5755 Register in_reg = InputRegisterAt(abs, 0);
5756 Register out_reg = OutputRegister(abs);
5757 __ Cmp(in_reg, Operand(0));
5758 __ Cneg(out_reg, in_reg, lt);
5759 break;
5760 }
5761 case DataType::Type::kFloat32:
5762 case DataType::Type::kFloat64: {
5763 FPRegister in_reg = InputFPRegisterAt(abs, 0);
5764 FPRegister out_reg = OutputFPRegister(abs);
5765 __ Fabs(out_reg, in_reg);
5766 break;
5767 }
5768 default:
5769 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5770 }
5771}
5772
Igor Murashkind01745e2017-04-05 16:40:31 -07005773void LocationsBuilderARM64::VisitConstructorFence(HConstructorFence* constructor_fence) {
5774 constructor_fence->SetLocations(nullptr);
5775}
5776
5777void InstructionCodeGeneratorARM64::VisitConstructorFence(
5778 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
5779 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
5780}
5781
Calin Juravle27df7582015-04-17 19:12:31 +01005782void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5783 memory_barrier->SetLocations(nullptr);
5784}
5785
5786void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005787 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01005788}
5789
Alexandre Rames5319def2014-10-23 10:03:10 +01005790void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005791 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005792 DataType::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005793 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01005794}
5795
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005796void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005797 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005798}
5799
5800void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
5801 instruction->SetLocations(nullptr);
5802}
5803
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005804void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005805 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005806}
5807
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005808void LocationsBuilderARM64::VisitRor(HRor* ror) {
5809 HandleBinaryOp(ror);
5810}
5811
5812void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
5813 HandleBinaryOp(ror);
5814}
5815
Serban Constantinescu02164b32014-11-13 14:05:07 +00005816void LocationsBuilderARM64::VisitShl(HShl* shl) {
5817 HandleShift(shl);
5818}
5819
5820void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5821 HandleShift(shl);
5822}
5823
5824void LocationsBuilderARM64::VisitShr(HShr* shr) {
5825 HandleShift(shr);
5826}
5827
5828void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5829 HandleShift(shr);
5830}
5831
Alexandre Rames5319def2014-10-23 10:03:10 +01005832void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005833 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005834}
5835
5836void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005837 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005838}
5839
Alexandre Rames67555f72014-11-18 10:55:16 +00005840void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005841 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005842}
5843
5844void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005845 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005846}
5847
5848void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005849 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005850}
5851
Alexandre Rames67555f72014-11-18 10:55:16 +00005852void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005853 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005854}
5855
Calin Juravlee460d1d2015-09-29 04:52:17 +01005856void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5857 HUnresolvedInstanceFieldGet* instruction) {
5858 FieldAccessCallingConventionARM64 calling_convention;
5859 codegen_->CreateUnresolvedFieldLocationSummary(
5860 instruction, instruction->GetFieldType(), calling_convention);
5861}
5862
5863void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5864 HUnresolvedInstanceFieldGet* instruction) {
5865 FieldAccessCallingConventionARM64 calling_convention;
5866 codegen_->GenerateUnresolvedFieldAccess(instruction,
5867 instruction->GetFieldType(),
5868 instruction->GetFieldIndex(),
5869 instruction->GetDexPc(),
5870 calling_convention);
5871}
5872
5873void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5874 HUnresolvedInstanceFieldSet* instruction) {
5875 FieldAccessCallingConventionARM64 calling_convention;
5876 codegen_->CreateUnresolvedFieldLocationSummary(
5877 instruction, instruction->GetFieldType(), calling_convention);
5878}
5879
5880void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5881 HUnresolvedInstanceFieldSet* instruction) {
5882 FieldAccessCallingConventionARM64 calling_convention;
5883 codegen_->GenerateUnresolvedFieldAccess(instruction,
5884 instruction->GetFieldType(),
5885 instruction->GetFieldIndex(),
5886 instruction->GetDexPc(),
5887 calling_convention);
5888}
5889
5890void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5891 HUnresolvedStaticFieldGet* instruction) {
5892 FieldAccessCallingConventionARM64 calling_convention;
5893 codegen_->CreateUnresolvedFieldLocationSummary(
5894 instruction, instruction->GetFieldType(), calling_convention);
5895}
5896
5897void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5898 HUnresolvedStaticFieldGet* instruction) {
5899 FieldAccessCallingConventionARM64 calling_convention;
5900 codegen_->GenerateUnresolvedFieldAccess(instruction,
5901 instruction->GetFieldType(),
5902 instruction->GetFieldIndex(),
5903 instruction->GetDexPc(),
5904 calling_convention);
5905}
5906
5907void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5908 HUnresolvedStaticFieldSet* instruction) {
5909 FieldAccessCallingConventionARM64 calling_convention;
5910 codegen_->CreateUnresolvedFieldLocationSummary(
5911 instruction, instruction->GetFieldType(), calling_convention);
5912}
5913
5914void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5915 HUnresolvedStaticFieldSet* instruction) {
5916 FieldAccessCallingConventionARM64 calling_convention;
5917 codegen_->GenerateUnresolvedFieldAccess(instruction,
5918 instruction->GetFieldType(),
5919 instruction->GetFieldIndex(),
5920 instruction->GetDexPc(),
5921 calling_convention);
5922}
5923
Alexandre Rames5319def2014-10-23 10:03:10 +01005924void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005925 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5926 instruction, LocationSummary::kCallOnSlowPath);
Artem Serov7957d952017-04-04 15:44:09 +01005927 // In suspend check slow path, usually there are no caller-save registers at all.
5928 // If SIMD instructions are present, however, we force spilling all live SIMD
5929 // registers in full width (since the runtime only saves/restores lower part).
5930 locations->SetCustomSlowPathCallerSaves(
5931 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexandre Rames5319def2014-10-23 10:03:10 +01005932}
5933
5934void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005935 HBasicBlock* block = instruction->GetBlock();
5936 if (block->GetLoopInformation() != nullptr) {
5937 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5938 // The back edge will generate the suspend check.
5939 return;
5940 }
5941 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5942 // The goto will generate the suspend check.
5943 return;
5944 }
5945 GenerateSuspendCheck(instruction, nullptr);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005946 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005947}
5948
Alexandre Rames67555f72014-11-18 10:55:16 +00005949void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005950 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5951 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005952 InvokeRuntimeCallingConvention calling_convention;
5953 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5954}
5955
5956void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005957 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08005958 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005959}
5960
5961void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
5962 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005963 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005964 DataType::Type input_type = conversion->GetInputType();
5965 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005966 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
5967 << input_type << " -> " << result_type;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005968 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
5969 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005970 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5971 }
5972
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005973 if (DataType::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005974 locations->SetInAt(0, Location::RequiresFpuRegister());
5975 } else {
5976 locations->SetInAt(0, Location::RequiresRegister());
5977 }
5978
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005979 if (DataType::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005980 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5981 } else {
5982 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5983 }
5984}
5985
5986void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005987 DataType::Type result_type = conversion->GetResultType();
5988 DataType::Type input_type = conversion->GetInputType();
Alexandre Rames67555f72014-11-18 10:55:16 +00005989
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005990 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
5991 << input_type << " -> " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005992
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005993 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
5994 int result_size = DataType::Size(result_type);
5995 int input_size = DataType::Size(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00005996 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005997 Register output = OutputRegister(conversion);
5998 Register source = InputRegisterAt(conversion, 0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005999 if (result_type == DataType::Type::kInt32 && input_type == DataType::Type::kInt64) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01006000 // 'int' values are used directly as W registers, discarding the top
6001 // bits, so we don't need to sign-extend and can just perform a move.
6002 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
6003 // top 32 bits of the target register. We theoretically could leave those
6004 // bits unchanged, but we would have to make sure that no code uses a
6005 // 32bit input value as a 64bit value assuming that the top 32 bits are
6006 // zero.
6007 __ Mov(output.W(), source.W());
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006008 } else if (DataType::IsUnsignedType(result_type) ||
6009 (DataType::IsUnsignedType(input_type) && input_size < result_size)) {
6010 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, result_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00006011 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00006012 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00006013 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006014 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00006015 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006016 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
6017 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Serban Constantinescu02164b32014-11-13 14:05:07 +00006018 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006019 } else if (DataType::IsFloatingPointType(result_type) &&
6020 DataType::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00006021 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
6022 } else {
6023 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
6024 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00006025 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00006026}
Alexandre Rames67555f72014-11-18 10:55:16 +00006027
Serban Constantinescu02164b32014-11-13 14:05:07 +00006028void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
6029 HandleShift(ushr);
6030}
6031
6032void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
6033 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00006034}
6035
6036void LocationsBuilderARM64::VisitXor(HXor* instruction) {
6037 HandleBinaryOp(instruction);
6038}
6039
6040void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
6041 HandleBinaryOp(instruction);
6042}
6043
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006044void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006045 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006046 LOG(FATAL) << "Unreachable";
6047}
6048
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006049void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006050 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006051 LOG(FATAL) << "Unreachable";
6052}
6053
Mark Mendellfe57faa2015-09-18 09:26:15 -04006054// Simple implementation of packed switch - generate cascaded compare/jumps.
6055void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6056 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006057 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006058 locations->SetInAt(0, Location::RequiresRegister());
6059}
6060
6061void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6062 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08006063 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006064 Register value_reg = InputRegisterAt(switch_instr, 0);
6065 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6066
Zheng Xu3927c8b2015-11-18 17:46:25 +08006067 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01006068 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08006069 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
6070 // make sure we don't emit it if the target may run out of range.
6071 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
6072 // ranges and emit the tables only as required.
6073 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04006074
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006075 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08006076 // Current instruction id is an upper bound of the number of HIRs in the graph.
6077 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
6078 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006079 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
6080 Register temp = temps.AcquireW();
6081 __ Subs(temp, value_reg, Operand(lower_bound));
6082
Zheng Xu3927c8b2015-11-18 17:46:25 +08006083 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006084 // Jump to successors[0] if value == lower_bound.
6085 __ B(eq, codegen_->GetLabelOf(successors[0]));
6086 int32_t last_index = 0;
6087 for (; num_entries - last_index > 2; last_index += 2) {
6088 __ Subs(temp, temp, Operand(2));
6089 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
6090 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
6091 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
6092 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
6093 }
6094 if (num_entries - last_index == 2) {
6095 // The last missing case_value.
6096 __ Cmp(temp, Operand(1));
6097 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08006098 }
6099
6100 // And the default for any other value.
6101 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6102 __ B(codegen_->GetLabelOf(default_block));
6103 }
6104 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01006105 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08006106
6107 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
6108
6109 // Below instructions should use at most one blocked register. Since there are two blocked
6110 // registers, we are free to block one.
6111 Register temp_w = temps.AcquireW();
6112 Register index;
6113 // Remove the bias.
6114 if (lower_bound != 0) {
6115 index = temp_w;
6116 __ Sub(index, value_reg, Operand(lower_bound));
6117 } else {
6118 index = value_reg;
6119 }
6120
6121 // Jump to default block if index is out of the range.
6122 __ Cmp(index, Operand(num_entries));
6123 __ B(hs, codegen_->GetLabelOf(default_block));
6124
6125 // In current VIXL implementation, it won't require any blocked registers to encode the
6126 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
6127 // register pressure.
6128 Register table_base = temps.AcquireX();
6129 // Load jump offset from the table.
6130 __ Adr(table_base, jump_table->GetTableStartLabel());
6131 Register jump_offset = temp_w;
6132 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
6133
6134 // Jump to target block by branching to table_base(pc related) + offset.
6135 Register target_address = table_base;
6136 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
6137 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006138 }
6139}
6140
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006141void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
6142 HInstruction* instruction,
6143 Location out,
6144 uint32_t offset,
6145 Location maybe_temp,
6146 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006147 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006148 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006149 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006150 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006151 if (kUseBakerReadBarrier) {
6152 // Load with fast path based Baker's read barrier.
6153 // /* HeapReference<Object> */ out = *(out + offset)
6154 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6155 out,
6156 out_reg,
6157 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006158 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006159 /* needs_null_check */ false,
6160 /* use_load_acquire */ false);
6161 } else {
6162 // Load with slow path based read barrier.
6163 // Save the value of `out` into `maybe_temp` before overwriting it
6164 // in the following move operation, as we will need it for the
6165 // read barrier below.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006166 Register temp_reg = RegisterFrom(maybe_temp, type);
Roland Levillain44015862016-01-22 11:47:17 +00006167 __ Mov(temp_reg, out_reg);
6168 // /* HeapReference<Object> */ out = *(out + offset)
6169 __ Ldr(out_reg, HeapOperand(out_reg, offset));
6170 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6171 }
6172 } else {
6173 // Plain load with no read barrier.
6174 // /* HeapReference<Object> */ out = *(out + offset)
6175 __ Ldr(out_reg, HeapOperand(out_reg, offset));
6176 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
6177 }
6178}
6179
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006180void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
6181 HInstruction* instruction,
6182 Location out,
6183 Location obj,
6184 uint32_t offset,
6185 Location maybe_temp,
6186 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006187 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006188 Register out_reg = RegisterFrom(out, type);
6189 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006190 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006191 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006192 if (kUseBakerReadBarrier) {
6193 // Load with fast path based Baker's read barrier.
Roland Levillain44015862016-01-22 11:47:17 +00006194 // /* HeapReference<Object> */ out = *(obj + offset)
6195 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6196 out,
6197 obj_reg,
6198 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006199 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006200 /* needs_null_check */ false,
6201 /* use_load_acquire */ false);
6202 } else {
6203 // Load with slow path based read barrier.
6204 // /* HeapReference<Object> */ out = *(obj + offset)
6205 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
6206 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6207 }
6208 } else {
6209 // Plain load with no read barrier.
6210 // /* HeapReference<Object> */ out = *(obj + offset)
6211 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
6212 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
6213 }
6214}
6215
Vladimir Markoca1e0382018-04-11 09:58:41 +00006216void CodeGeneratorARM64::GenerateGcRootFieldLoad(
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006217 HInstruction* instruction,
6218 Location root,
6219 Register obj,
6220 uint32_t offset,
6221 vixl::aarch64::Label* fixup_label,
6222 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00006223 DCHECK(fixup_label == nullptr || offset == 0u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006224 Register root_reg = RegisterFrom(root, DataType::Type::kReference);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006225 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006226 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006227 if (kUseBakerReadBarrier) {
6228 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillainba650a42017-03-06 13:52:32 +00006229 // Baker's read barrier are used.
Roland Levillain44015862016-01-22 11:47:17 +00006230
Vladimir Marko008e09f32018-08-06 15:42:43 +01006231 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in
6232 // the Marking Register) to decide whether we need to enter
6233 // the slow path to mark the GC root.
6234 //
6235 // We use shared thunks for the slow path; shared within the method
6236 // for JIT, across methods for AOT. That thunk checks the reference
6237 // and jumps to the entrypoint if needed.
6238 //
6239 // lr = &return_address;
6240 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
6241 // if (mr) { // Thread::Current()->GetIsGcMarking()
6242 // goto gc_root_thunk<root_reg>(lr)
6243 // }
6244 // return_address:
Roland Levillainba650a42017-03-06 13:52:32 +00006245
Vladimir Marko008e09f32018-08-06 15:42:43 +01006246 UseScratchRegisterScope temps(GetVIXLAssembler());
6247 DCHECK(temps.IsAvailable(ip0));
6248 DCHECK(temps.IsAvailable(ip1));
6249 temps.Exclude(ip0, ip1);
6250 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(root_reg.GetCode());
Roland Levillain44015862016-01-22 11:47:17 +00006251
Vladimir Marko008e09f32018-08-06 15:42:43 +01006252 ExactAssemblyScope guard(GetVIXLAssembler(), 3 * vixl::aarch64::kInstructionSize);
6253 vixl::aarch64::Label return_address;
6254 __ adr(lr, &return_address);
6255 if (fixup_label != nullptr) {
6256 __ bind(fixup_label);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006257 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006258 static_assert(BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_OFFSET == -8,
6259 "GC root LDR must be 2 instruction (8B) before the return address label.");
6260 __ ldr(root_reg, MemOperand(obj.X(), offset));
6261 EmitBakerReadBarrierCbnz(custom_data);
6262 __ bind(&return_address);
Roland Levillain44015862016-01-22 11:47:17 +00006263 } else {
6264 // GC root loaded through a slow path for read barriers other
6265 // than Baker's.
6266 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006267 if (fixup_label == nullptr) {
6268 __ Add(root_reg.X(), obj.X(), offset);
6269 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006270 EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006271 }
Roland Levillain44015862016-01-22 11:47:17 +00006272 // /* mirror::Object* */ root = root->Read()
Vladimir Markoca1e0382018-04-11 09:58:41 +00006273 GenerateReadBarrierForRootSlow(instruction, root, root);
Roland Levillain44015862016-01-22 11:47:17 +00006274 }
6275 } else {
6276 // Plain GC root load with no read barrier.
6277 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006278 if (fixup_label == nullptr) {
6279 __ Ldr(root_reg, MemOperand(obj, offset));
6280 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006281 EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006282 }
Roland Levillain44015862016-01-22 11:47:17 +00006283 // Note that GC roots are not affected by heap poisoning, thus we
6284 // do not have to unpoison `root_reg` here.
6285 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006286 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillain44015862016-01-22 11:47:17 +00006287}
6288
6289void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6290 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006291 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006292 uint32_t offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006293 Location maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006294 bool needs_null_check,
6295 bool use_load_acquire) {
6296 DCHECK(kEmitCompilerReadBarrier);
6297 DCHECK(kUseBakerReadBarrier);
6298
Vladimir Marko0ecac682018-08-07 10:40:38 +01006299 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6300 // Marking Register) to decide whether we need to enter the slow
6301 // path to mark the reference. Then, in the slow path, check the
6302 // gray bit in the lock word of the reference's holder (`obj`) to
6303 // decide whether to mark `ref` or not.
6304 //
6305 // We use shared thunks for the slow path; shared within the method
6306 // for JIT, across methods for AOT. That thunk checks the holder
6307 // and jumps to the entrypoint if needed. If the holder is not gray,
6308 // it creates a fake dependency and returns to the LDR instruction.
6309 //
6310 // lr = &gray_return_address;
6311 // if (mr) { // Thread::Current()->GetIsGcMarking()
6312 // goto field_thunk<holder_reg, base_reg, use_load_acquire>(lr)
6313 // }
6314 // not_gray_return_address:
6315 // // Original reference load. If the offset is too large to fit
6316 // // into LDR, we use an adjusted base register here.
6317 // HeapReference<mirror::Object> reference = *(obj+offset);
6318 // gray_return_address:
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006319
Vladimir Marko0ecac682018-08-07 10:40:38 +01006320 DCHECK_ALIGNED(offset, sizeof(mirror::HeapReference<mirror::Object>));
6321 Register base = obj;
6322 if (use_load_acquire) {
6323 DCHECK(maybe_temp.IsRegister());
6324 base = WRegisterFrom(maybe_temp);
6325 __ Add(base, obj, offset);
6326 offset = 0u;
6327 } else if (offset >= kReferenceLoadMinFarOffset) {
6328 DCHECK(maybe_temp.IsRegister());
6329 base = WRegisterFrom(maybe_temp);
6330 static_assert(IsPowerOfTwo(kReferenceLoadMinFarOffset), "Expecting a power of 2.");
6331 __ Add(base, obj, Operand(offset & ~(kReferenceLoadMinFarOffset - 1u)));
6332 offset &= (kReferenceLoadMinFarOffset - 1u);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006333 }
Vladimir Marko0ecac682018-08-07 10:40:38 +01006334 UseScratchRegisterScope temps(GetVIXLAssembler());
6335 DCHECK(temps.IsAvailable(ip0));
6336 DCHECK(temps.IsAvailable(ip1));
6337 temps.Exclude(ip0, ip1);
6338 uint32_t custom_data = use_load_acquire
6339 ? EncodeBakerReadBarrierAcquireData(base.GetCode(), obj.GetCode())
6340 : EncodeBakerReadBarrierFieldData(base.GetCode(), obj.GetCode());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006341
Vladimir Marko0ecac682018-08-07 10:40:38 +01006342 {
6343 ExactAssemblyScope guard(GetVIXLAssembler(),
6344 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6345 vixl::aarch64::Label return_address;
6346 __ adr(lr, &return_address);
6347 EmitBakerReadBarrierCbnz(custom_data);
6348 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6349 "Field LDR must be 1 instruction (4B) before the return address label; "
6350 " 2 instructions (8B) for heap poisoning.");
6351 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
6352 if (use_load_acquire) {
6353 DCHECK_EQ(offset, 0u);
6354 __ ldar(ref_reg, MemOperand(base.X()));
6355 } else {
6356 __ ldr(ref_reg, MemOperand(base.X(), offset));
6357 }
6358 if (needs_null_check) {
6359 MaybeRecordImplicitNullCheck(instruction);
6360 }
6361 // Unpoison the reference explicitly if needed. MaybeUnpoisonHeapReference() uses
6362 // macro instructions disallowed in ExactAssemblyScope.
6363 if (kPoisonHeapReferences) {
6364 __ neg(ref_reg, Operand(ref_reg));
6365 }
6366 __ bind(&return_address);
6367 }
6368 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__, /* temp_loc */ LocationFrom(ip1));
Roland Levillain44015862016-01-22 11:47:17 +00006369}
6370
Vladimir Marko008e09f32018-08-06 15:42:43 +01006371void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006372 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006373 uint32_t data_offset,
6374 Location index,
6375 Register temp,
6376 bool needs_null_check) {
6377 DCHECK(kEmitCompilerReadBarrier);
6378 DCHECK(kUseBakerReadBarrier);
6379
Vladimir Marko66d691d2017-04-07 17:53:39 +01006380 static_assert(
6381 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6382 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006383 size_t scale_factor = DataType::SizeShift(DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006384
Vladimir Marko008e09f32018-08-06 15:42:43 +01006385 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6386 // Marking Register) to decide whether we need to enter the slow
6387 // path to mark the reference. Then, in the slow path, check the
6388 // gray bit in the lock word of the reference's holder (`obj`) to
6389 // decide whether to mark `ref` or not.
6390 //
6391 // We use shared thunks for the slow path; shared within the method
6392 // for JIT, across methods for AOT. That thunk checks the holder
6393 // and jumps to the entrypoint if needed. If the holder is not gray,
6394 // it creates a fake dependency and returns to the LDR instruction.
6395 //
6396 // lr = &gray_return_address;
6397 // if (mr) { // Thread::Current()->GetIsGcMarking()
6398 // goto array_thunk<base_reg>(lr)
6399 // }
6400 // not_gray_return_address:
6401 // // Original reference load. If the offset is too large to fit
6402 // // into LDR, we use an adjusted base register here.
6403 // HeapReference<mirror::Object> reference = data[index];
6404 // gray_return_address:
Vladimir Marko66d691d2017-04-07 17:53:39 +01006405
Vladimir Marko008e09f32018-08-06 15:42:43 +01006406 DCHECK(index.IsValid());
6407 Register index_reg = RegisterFrom(index, DataType::Type::kInt32);
6408 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006409
Vladimir Marko008e09f32018-08-06 15:42:43 +01006410 UseScratchRegisterScope temps(GetVIXLAssembler());
6411 DCHECK(temps.IsAvailable(ip0));
6412 DCHECK(temps.IsAvailable(ip1));
6413 temps.Exclude(ip0, ip1);
6414 uint32_t custom_data = EncodeBakerReadBarrierArrayData(temp.GetCode());
Vladimir Marko66d691d2017-04-07 17:53:39 +01006415
Vladimir Marko008e09f32018-08-06 15:42:43 +01006416 __ Add(temp.X(), obj.X(), Operand(data_offset));
6417 {
6418 ExactAssemblyScope guard(GetVIXLAssembler(),
6419 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6420 vixl::aarch64::Label return_address;
6421 __ adr(lr, &return_address);
6422 EmitBakerReadBarrierCbnz(custom_data);
6423 static_assert(BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6424 "Array LDR must be 1 instruction (4B) before the return address label; "
6425 " 2 instructions (8B) for heap poisoning.");
6426 __ ldr(ref_reg, MemOperand(temp.X(), index_reg.X(), LSL, scale_factor));
6427 DCHECK(!needs_null_check); // The thunk cannot handle the null check.
6428 // Unpoison the reference explicitly if needed. MaybeUnpoisonHeapReference() uses
6429 // macro instructions disallowed in ExactAssemblyScope.
6430 if (kPoisonHeapReferences) {
6431 __ neg(ref_reg, Operand(ref_reg));
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006432 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006433 __ bind(&return_address);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006434 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006435 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__, /* temp_loc */ LocationFrom(ip1));
Roland Levillain44015862016-01-22 11:47:17 +00006436}
6437
6438void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6439 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006440 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006441 uint32_t offset,
6442 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01006443 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00006444 Register temp,
6445 bool needs_null_check,
Roland Levillainff487002017-03-07 16:50:01 +00006446 bool use_load_acquire) {
Roland Levillain44015862016-01-22 11:47:17 +00006447 DCHECK(kEmitCompilerReadBarrier);
6448 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01006449 // If we are emitting an array load, we should not be using a
6450 // Load Acquire instruction. In other words:
6451 // `instruction->IsArrayGet()` => `!use_load_acquire`.
6452 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00006453
Roland Levillain97c46462017-05-11 14:04:03 +01006454 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6455 // Marking Register) to decide whether we need to enter the slow
6456 // path to mark the reference. Then, in the slow path, check the
6457 // gray bit in the lock word of the reference's holder (`obj`) to
6458 // decide whether to mark `ref` or not.
Roland Levillain44015862016-01-22 11:47:17 +00006459 //
Roland Levillain97c46462017-05-11 14:04:03 +01006460 // if (mr) { // Thread::Current()->GetIsGcMarking()
Roland Levillainba650a42017-03-06 13:52:32 +00006461 // // Slow path.
Roland Levillain54f869e2017-03-06 13:54:11 +00006462 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6463 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6464 // HeapReference<mirror::Object> ref = *src; // Original reference load.
6465 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6466 // if (is_gray) {
Roland Levillain97c46462017-05-11 14:04:03 +01006467 // entrypoint = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6468 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
Roland Levillain54f869e2017-03-06 13:54:11 +00006469 // }
6470 // } else {
6471 // HeapReference<mirror::Object> ref = *src; // Original reference load.
Roland Levillain44015862016-01-22 11:47:17 +00006472 // }
Roland Levillain44015862016-01-22 11:47:17 +00006473
Roland Levillainba650a42017-03-06 13:52:32 +00006474 // Slow path marking the object `ref` when the GC is marking. The
Roland Levillain97c46462017-05-11 14:04:03 +01006475 // entrypoint will be loaded by the slow path code.
Roland Levillainff487002017-03-07 16:50:01 +00006476 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006477 new (GetScopedAllocator()) LoadReferenceWithBakerReadBarrierSlowPathARM64(
Roland Levillainff487002017-03-07 16:50:01 +00006478 instruction,
6479 ref,
6480 obj,
6481 offset,
6482 index,
6483 scale_factor,
6484 needs_null_check,
6485 use_load_acquire,
Roland Levillain97c46462017-05-11 14:04:03 +01006486 temp);
Roland Levillainba650a42017-03-06 13:52:32 +00006487 AddSlowPath(slow_path);
6488
Roland Levillain97c46462017-05-11 14:04:03 +01006489 __ Cbnz(mr, slow_path->GetEntryLabel());
Roland Levillainff487002017-03-07 16:50:01 +00006490 // Fast path: the GC is not marking: just load the reference.
Roland Levillain54f869e2017-03-06 13:54:11 +00006491 GenerateRawReferenceLoad(
6492 instruction, ref, obj, offset, index, scale_factor, needs_null_check, use_load_acquire);
Roland Levillainba650a42017-03-06 13:52:32 +00006493 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006494 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillainba650a42017-03-06 13:52:32 +00006495}
6496
Roland Levillainff487002017-03-07 16:50:01 +00006497void CodeGeneratorARM64::UpdateReferenceFieldWithBakerReadBarrier(HInstruction* instruction,
6498 Location ref,
6499 Register obj,
6500 Location field_offset,
6501 Register temp,
6502 bool needs_null_check,
6503 bool use_load_acquire) {
6504 DCHECK(kEmitCompilerReadBarrier);
6505 DCHECK(kUseBakerReadBarrier);
6506 // If we are emitting an array load, we should not be using a
6507 // Load Acquire instruction. In other words:
6508 // `instruction->IsArrayGet()` => `!use_load_acquire`.
6509 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
6510
Roland Levillain97c46462017-05-11 14:04:03 +01006511 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6512 // Marking Register) to decide whether we need to enter the slow
6513 // path to update the reference field within `obj`. Then, in the
6514 // slow path, check the gray bit in the lock word of the reference's
6515 // holder (`obj`) to decide whether to mark `ref` and update the
6516 // field or not.
Roland Levillainff487002017-03-07 16:50:01 +00006517 //
Roland Levillain97c46462017-05-11 14:04:03 +01006518 // if (mr) { // Thread::Current()->GetIsGcMarking()
Roland Levillainff487002017-03-07 16:50:01 +00006519 // // Slow path.
6520 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6521 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6522 // HeapReference<mirror::Object> ref = *(obj + field_offset); // Reference load.
6523 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6524 // if (is_gray) {
6525 // old_ref = ref;
Roland Levillain97c46462017-05-11 14:04:03 +01006526 // entrypoint = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6527 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
Roland Levillainff487002017-03-07 16:50:01 +00006528 // compareAndSwapObject(obj, field_offset, old_ref, ref);
6529 // }
6530 // }
6531
6532 // Slow path updating the object reference at address `obj + field_offset`
Roland Levillain97c46462017-05-11 14:04:03 +01006533 // when the GC is marking. The entrypoint will be loaded by the slow path code.
Roland Levillainff487002017-03-07 16:50:01 +00006534 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006535 new (GetScopedAllocator()) LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64(
Roland Levillainff487002017-03-07 16:50:01 +00006536 instruction,
6537 ref,
6538 obj,
6539 /* offset */ 0u,
6540 /* index */ field_offset,
6541 /* scale_factor */ 0u /* "times 1" */,
6542 needs_null_check,
6543 use_load_acquire,
Roland Levillain97c46462017-05-11 14:04:03 +01006544 temp);
Roland Levillainff487002017-03-07 16:50:01 +00006545 AddSlowPath(slow_path);
6546
Roland Levillain97c46462017-05-11 14:04:03 +01006547 __ Cbnz(mr, slow_path->GetEntryLabel());
Roland Levillainff487002017-03-07 16:50:01 +00006548 // Fast path: the GC is not marking: nothing to do (the field is
6549 // up-to-date, and we don't need to load the reference).
6550 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006551 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillainff487002017-03-07 16:50:01 +00006552}
6553
Roland Levillainba650a42017-03-06 13:52:32 +00006554void CodeGeneratorARM64::GenerateRawReferenceLoad(HInstruction* instruction,
6555 Location ref,
6556 Register obj,
6557 uint32_t offset,
6558 Location index,
6559 size_t scale_factor,
6560 bool needs_null_check,
6561 bool use_load_acquire) {
6562 DCHECK(obj.IsW());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006563 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006564 Register ref_reg = RegisterFrom(ref, type);
Roland Levillain44015862016-01-22 11:47:17 +00006565
Roland Levillainba650a42017-03-06 13:52:32 +00006566 // If needed, vixl::EmissionCheckScope guards are used to ensure
6567 // that no pools are emitted between the load (macro) instruction
6568 // and MaybeRecordImplicitNullCheck.
Roland Levillain44015862016-01-22 11:47:17 +00006569
Roland Levillain44015862016-01-22 11:47:17 +00006570 if (index.IsValid()) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006571 // Load types involving an "index": ArrayGet,
6572 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6573 // intrinsics.
Roland Levillainbfea3352016-06-23 13:48:47 +01006574 if (use_load_acquire) {
6575 // UnsafeGetObjectVolatile intrinsic case.
6576 // Register `index` is not an index in an object array, but an
6577 // offset to an object reference field within object `obj`.
6578 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
6579 DCHECK(instruction->GetLocations()->Intrinsified());
6580 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
6581 << instruction->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006582 DCHECK_EQ(offset, 0u);
6583 DCHECK_EQ(scale_factor, 0u);
Roland Levillainba650a42017-03-06 13:52:32 +00006584 DCHECK_EQ(needs_null_check, false);
6585 // /* HeapReference<mirror::Object> */ ref = *(obj + index)
Roland Levillainbfea3352016-06-23 13:48:47 +01006586 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
6587 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00006588 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006589 // ArrayGet and UnsafeGetObject and UnsafeCASObject intrinsics cases.
6590 // /* HeapReference<mirror::Object> */ ref = *(obj + offset + (index << scale_factor))
Roland Levillainbfea3352016-06-23 13:48:47 +01006591 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01006592 uint32_t computed_offset = offset + (Int64FromLocation(index) << scale_factor);
Roland Levillainba650a42017-03-06 13:52:32 +00006593 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillainbfea3352016-06-23 13:48:47 +01006594 Load(type, ref_reg, HeapOperand(obj, computed_offset));
Roland Levillainba650a42017-03-06 13:52:32 +00006595 if (needs_null_check) {
6596 MaybeRecordImplicitNullCheck(instruction);
6597 }
Roland Levillainbfea3352016-06-23 13:48:47 +01006598 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006599 UseScratchRegisterScope temps(GetVIXLAssembler());
6600 Register temp = temps.AcquireW();
6601 __ Add(temp, obj, offset);
6602 {
6603 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
6604 Load(type, ref_reg, HeapOperand(temp, XRegisterFrom(index), LSL, scale_factor));
6605 if (needs_null_check) {
6606 MaybeRecordImplicitNullCheck(instruction);
6607 }
6608 }
Roland Levillainbfea3352016-06-23 13:48:47 +01006609 }
Roland Levillain44015862016-01-22 11:47:17 +00006610 }
Roland Levillain44015862016-01-22 11:47:17 +00006611 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006612 // /* HeapReference<mirror::Object> */ ref = *(obj + offset)
Roland Levillain44015862016-01-22 11:47:17 +00006613 MemOperand field = HeapOperand(obj, offset);
6614 if (use_load_acquire) {
Roland Levillainba650a42017-03-06 13:52:32 +00006615 // Implicit null checks are handled by CodeGeneratorARM64::LoadAcquire.
6616 LoadAcquire(instruction, ref_reg, field, needs_null_check);
Roland Levillain44015862016-01-22 11:47:17 +00006617 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006618 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain44015862016-01-22 11:47:17 +00006619 Load(type, ref_reg, field);
Roland Levillainba650a42017-03-06 13:52:32 +00006620 if (needs_null_check) {
6621 MaybeRecordImplicitNullCheck(instruction);
6622 }
Roland Levillain44015862016-01-22 11:47:17 +00006623 }
6624 }
6625
6626 // Object* ref = ref_addr->AsMirrorPtr()
6627 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
Roland Levillain44015862016-01-22 11:47:17 +00006628}
6629
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006630void CodeGeneratorARM64::MaybeGenerateMarkingRegisterCheck(int code, Location temp_loc) {
6631 // The following condition is a compile-time one, so it does not have a run-time cost.
6632 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && kIsDebugBuild) {
6633 // The following condition is a run-time one; it is executed after the
6634 // previous compile-time test, to avoid penalizing non-debug builds.
6635 if (GetCompilerOptions().EmitRunTimeChecksInDebugMode()) {
6636 UseScratchRegisterScope temps(GetVIXLAssembler());
6637 Register temp = temp_loc.IsValid() ? WRegisterFrom(temp_loc) : temps.AcquireW();
6638 GetAssembler()->GenerateMarkingRegisterCheck(temp, code);
6639 }
6640 }
6641}
6642
Roland Levillain44015862016-01-22 11:47:17 +00006643void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
6644 Location out,
6645 Location ref,
6646 Location obj,
6647 uint32_t offset,
6648 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006649 DCHECK(kEmitCompilerReadBarrier);
6650
Roland Levillain44015862016-01-22 11:47:17 +00006651 // Insert a slow path based read barrier *after* the reference load.
6652 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006653 // If heap poisoning is enabled, the unpoisoning of the loaded
6654 // reference will be carried out by the runtime within the slow
6655 // path.
6656 //
6657 // Note that `ref` currently does not get unpoisoned (when heap
6658 // poisoning is enabled), which is alright as the `ref` argument is
6659 // not used by the artReadBarrierSlow entry point.
6660 //
6661 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006662 SlowPathCodeARM64* slow_path = new (GetScopedAllocator())
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006663 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
6664 AddSlowPath(slow_path);
6665
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006666 __ B(slow_path->GetEntryLabel());
6667 __ Bind(slow_path->GetExitLabel());
6668}
6669
Roland Levillain44015862016-01-22 11:47:17 +00006670void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6671 Location out,
6672 Location ref,
6673 Location obj,
6674 uint32_t offset,
6675 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006676 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00006677 // Baker's read barriers shall be handled by the fast path
6678 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
6679 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006680 // If heap poisoning is enabled, unpoisoning will be taken care of
6681 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00006682 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006683 } else if (kPoisonHeapReferences) {
6684 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
6685 }
6686}
6687
Roland Levillain44015862016-01-22 11:47:17 +00006688void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6689 Location out,
6690 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006691 DCHECK(kEmitCompilerReadBarrier);
6692
Roland Levillain44015862016-01-22 11:47:17 +00006693 // Insert a slow path based read barrier *after* the GC root load.
6694 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006695 // Note that GC roots are not affected by heap poisoning, so we do
6696 // not need to do anything special for this here.
6697 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006698 new (GetScopedAllocator()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006699 AddSlowPath(slow_path);
6700
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006701 __ B(slow_path->GetEntryLabel());
6702 __ Bind(slow_path->GetExitLabel());
6703}
6704
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006705void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
6706 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006707 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006708 locations->SetInAt(0, Location::RequiresRegister());
6709 locations->SetOut(Location::RequiresRegister());
6710}
6711
6712void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
6713 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00006714 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006715 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006716 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006717 __ Ldr(XRegisterFrom(locations->Out()),
6718 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006719 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006720 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006721 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006722 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
6723 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006724 __ Ldr(XRegisterFrom(locations->Out()),
6725 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006726 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006727}
6728
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006729static void PatchJitRootUse(uint8_t* code,
6730 const uint8_t* roots_data,
6731 vixl::aarch64::Literal<uint32_t>* literal,
6732 uint64_t index_in_table) {
6733 uint32_t literal_offset = literal->GetOffset();
6734 uintptr_t address =
6735 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
6736 uint8_t* data = code + literal_offset;
6737 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
6738}
6739
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006740void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
6741 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006742 const StringReference& string_reference = entry.first;
6743 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006744 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006745 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006746 }
6747 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006748 const TypeReference& type_reference = entry.first;
6749 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006750 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006751 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006752 }
6753}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006754
Alexandre Rames67555f72014-11-18 10:55:16 +00006755#undef __
6756#undef QUICK_ENTRY_POINT
6757
Vladimir Markoca1e0382018-04-11 09:58:41 +00006758#define __ assembler.GetVIXLAssembler()->
6759
6760static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler,
6761 vixl::aarch64::Register base_reg,
6762 vixl::aarch64::MemOperand& lock_word,
Vladimir Marko7a695052018-04-12 10:26:50 +01006763 vixl::aarch64::Label* slow_path,
6764 vixl::aarch64::Label* throw_npe = nullptr) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006765 // Load the lock word containing the rb_state.
6766 __ Ldr(ip0.W(), lock_word);
6767 // Given the numeric representation, it's enough to check the low bit of the rb_state.
6768 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6769 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6770 __ Tbnz(ip0.W(), LockWord::kReadBarrierStateShift, slow_path);
6771 static_assert(
6772 BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET,
6773 "Field and array LDR offsets must be the same to reuse the same code.");
Vladimir Marko7a695052018-04-12 10:26:50 +01006774 // To throw NPE, we return to the fast path; the artificial dependence below does not matter.
6775 if (throw_npe != nullptr) {
6776 __ Bind(throw_npe);
6777 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006778 // Adjust the return address back to the LDR (1 instruction; 2 for heap poisoning).
6779 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6780 "Field LDR must be 1 instruction (4B) before the return address label; "
6781 " 2 instructions (8B) for heap poisoning.");
6782 __ Add(lr, lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6783 // Introduce a dependency on the lock_word including rb_state,
6784 // to prevent load-load reordering, and without using
6785 // a memory barrier (which would be more expensive).
6786 __ Add(base_reg, base_reg, Operand(ip0, LSR, 32));
6787 __ Br(lr); // And return back to the function.
6788 // Note: The fake dependency is unnecessary for the slow path.
6789}
6790
6791// Load the read barrier introspection entrypoint in register `entrypoint`.
6792static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler,
6793 vixl::aarch64::Register entrypoint) {
6794 // entrypoint = Thread::Current()->pReadBarrierMarkReg16, i.e. pReadBarrierMarkIntrospection.
6795 DCHECK_EQ(ip0.GetCode(), 16u);
6796 const int32_t entry_point_offset =
6797 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ip0.GetCode());
6798 __ Ldr(entrypoint, MemOperand(tr, entry_point_offset));
6799}
6800
6801void CodeGeneratorARM64::CompileBakerReadBarrierThunk(Arm64Assembler& assembler,
6802 uint32_t encoded_data,
6803 /*out*/ std::string* debug_name) {
6804 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
6805 switch (kind) {
Vladimir Marko0ecac682018-08-07 10:40:38 +01006806 case BakerReadBarrierKind::kField:
6807 case BakerReadBarrierKind::kAcquire: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006808 auto base_reg =
6809 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6810 CheckValidReg(base_reg.GetCode());
6811 auto holder_reg =
6812 Register::GetXRegFromCode(BakerReadBarrierSecondRegField::Decode(encoded_data));
6813 CheckValidReg(holder_reg.GetCode());
6814 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6815 temps.Exclude(ip0, ip1);
Vladimir Marko7a695052018-04-12 10:26:50 +01006816 // If base_reg differs from holder_reg, the offset was too large and we must have emitted
6817 // an explicit null check before the load. Otherwise, for implicit null checks, we need to
6818 // null-check the holder as we do not necessarily do that check before going to the thunk.
6819 vixl::aarch64::Label throw_npe_label;
6820 vixl::aarch64::Label* throw_npe = nullptr;
6821 if (GetCompilerOptions().GetImplicitNullChecks() && holder_reg.Is(base_reg)) {
6822 throw_npe = &throw_npe_label;
6823 __ Cbz(holder_reg.W(), throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006824 }
Vladimir Marko7a695052018-04-12 10:26:50 +01006825 // Check if the holder is gray and, if not, add fake dependency to the base register
6826 // and return to the LDR instruction to load the reference. Otherwise, use introspection
6827 // to load the reference and call the entrypoint that performs further checks on the
6828 // reference and marks it if needed.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006829 vixl::aarch64::Label slow_path;
6830 MemOperand lock_word(holder_reg, mirror::Object::MonitorOffset().Int32Value());
Vladimir Marko7a695052018-04-12 10:26:50 +01006831 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006832 __ Bind(&slow_path);
Vladimir Marko0ecac682018-08-07 10:40:38 +01006833 if (kind == BakerReadBarrierKind::kField) {
6834 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6835 __ Ldr(ip0.W(), ldr_address); // Load the LDR (immediate) unsigned offset.
6836 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6837 __ Ubfx(ip0.W(), ip0.W(), 10, 12); // Extract the offset.
6838 __ Ldr(ip0.W(), MemOperand(base_reg, ip0, LSL, 2)); // Load the reference.
6839 } else {
6840 DCHECK(kind == BakerReadBarrierKind::kAcquire);
6841 DCHECK(!base_reg.Is(holder_reg));
6842 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6843 __ Ldar(ip0.W(), MemOperand(base_reg));
6844 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006845 // Do not unpoison. With heap poisoning enabled, the entrypoint expects a poisoned reference.
6846 __ Br(ip1); // Jump to the entrypoint.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006847 break;
6848 }
6849 case BakerReadBarrierKind::kArray: {
6850 auto base_reg =
6851 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6852 CheckValidReg(base_reg.GetCode());
6853 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6854 BakerReadBarrierSecondRegField::Decode(encoded_data));
6855 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6856 temps.Exclude(ip0, ip1);
6857 vixl::aarch64::Label slow_path;
6858 int32_t data_offset =
6859 mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
6860 MemOperand lock_word(base_reg, mirror::Object::MonitorOffset().Int32Value() - data_offset);
6861 DCHECK_LT(lock_word.GetOffset(), 0);
6862 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path);
6863 __ Bind(&slow_path);
6864 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET);
6865 __ Ldr(ip0.W(), ldr_address); // Load the LDR (register) unsigned offset.
6866 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6867 __ Ubfx(ip0, ip0, 16, 6); // Extract the index register, plus 32 (bit 21 is set).
6868 __ Bfi(ip1, ip0, 3, 6); // Insert ip0 to the entrypoint address to create
6869 // a switch case target based on the index register.
6870 __ Mov(ip0, base_reg); // Move the base register to ip0.
6871 __ Br(ip1); // Jump to the entrypoint's array switch case.
6872 break;
6873 }
6874 case BakerReadBarrierKind::kGcRoot: {
6875 // Check if the reference needs to be marked and if so (i.e. not null, not marked yet
6876 // and it does not have a forwarding address), call the correct introspection entrypoint;
6877 // otherwise return the reference (or the extracted forwarding address).
6878 // There is no gray bit check for GC roots.
6879 auto root_reg =
6880 Register::GetWRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6881 CheckValidReg(root_reg.GetCode());
6882 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6883 BakerReadBarrierSecondRegField::Decode(encoded_data));
6884 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6885 temps.Exclude(ip0, ip1);
6886 vixl::aarch64::Label return_label, not_marked, forwarding_address;
6887 __ Cbz(root_reg, &return_label);
6888 MemOperand lock_word(root_reg.X(), mirror::Object::MonitorOffset().Int32Value());
6889 __ Ldr(ip0.W(), lock_word);
6890 __ Tbz(ip0.W(), LockWord::kMarkBitStateShift, &not_marked);
6891 __ Bind(&return_label);
6892 __ Br(lr);
6893 __ Bind(&not_marked);
6894 __ Tst(ip0.W(), Operand(ip0.W(), LSL, 1));
6895 __ B(&forwarding_address, mi);
6896 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6897 // Adjust the art_quick_read_barrier_mark_introspection address in IP1 to
6898 // art_quick_read_barrier_mark_introspection_gc_roots.
6899 __ Add(ip1, ip1, Operand(BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRYPOINT_OFFSET));
6900 __ Mov(ip0.W(), root_reg);
6901 __ Br(ip1);
6902 __ Bind(&forwarding_address);
6903 __ Lsl(root_reg, ip0.W(), LockWord::kForwardingAddressShift);
6904 __ Br(lr);
6905 break;
6906 }
6907 default:
6908 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
6909 UNREACHABLE();
6910 }
6911
Vladimir Marko966b46f2018-08-03 10:20:19 +00006912 // For JIT, the slow path is considered part of the compiled method,
6913 // so JIT should pass null as `debug_name`. Tests may not have a runtime.
6914 DCHECK(Runtime::Current() == nullptr ||
6915 !Runtime::Current()->UseJitCompilation() ||
6916 debug_name == nullptr);
6917 if (debug_name != nullptr && GetCompilerOptions().GenerateAnyDebugInfo()) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006918 std::ostringstream oss;
6919 oss << "BakerReadBarrierThunk";
6920 switch (kind) {
6921 case BakerReadBarrierKind::kField:
6922 oss << "Field_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
6923 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
6924 break;
Vladimir Marko0ecac682018-08-07 10:40:38 +01006925 case BakerReadBarrierKind::kAcquire:
6926 oss << "Acquire_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
6927 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
6928 break;
Vladimir Markoca1e0382018-04-11 09:58:41 +00006929 case BakerReadBarrierKind::kArray:
6930 oss << "Array_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
6931 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6932 BakerReadBarrierSecondRegField::Decode(encoded_data));
6933 break;
6934 case BakerReadBarrierKind::kGcRoot:
6935 oss << "GcRoot_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
6936 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6937 BakerReadBarrierSecondRegField::Decode(encoded_data));
6938 break;
6939 }
6940 *debug_name = oss.str();
6941 }
6942}
6943
6944#undef __
6945
Alexandre Rames5319def2014-10-23 10:03:10 +01006946} // namespace arm64
6947} // namespace art