blob: 8a5cbcade07759958ea21a0f20d75c9f06f9091e [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Vladimir Markof4f2daa2017-03-20 18:26:59 +000019#include "arch/arm64/asm_support_arm64.h"
Serban Constantinescu579885a2015-02-22 20:51:33 +000020#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070021#include "art_method.h"
Andreas Gampe5678db52017-06-08 14:11:18 -070022#include "base/bit_utils.h"
23#include "base/bit_utils_iterator.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010024#include "class_table.h"
Zheng Xuc6667102015-05-15 16:08:45 +080025#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000026#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010027#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080028#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010029#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010030#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070031#include "heap_poisoning.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080032#include "intrinsics.h"
33#include "intrinsics_arm64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010034#include "linker/linker_patch.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070035#include "lock_word.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010036#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070037#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000038#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010039#include "thread.h"
40#include "utils/arm64/assembler_arm64.h"
41#include "utils/assembler.h"
42#include "utils/stack_checks.h"
43
Scott Wakeling97c72b72016-06-24 16:19:36 +010044using namespace vixl::aarch64; // NOLINT(build/namespaces)
Artem Serov914d7a82017-02-07 14:33:49 +000045using vixl::ExactAssemblyScope;
46using vixl::CodeBufferCheckScope;
47using vixl::EmissionCheckScope;
Alexandre Rames5319def2014-10-23 10:03:10 +010048
49#ifdef __
50#error "ARM64 Codegen VIXL macro-assembler macro already defined."
51#endif
52
Alexandre Rames5319def2014-10-23 10:03:10 +010053namespace art {
54
Roland Levillain22ccc3a2015-11-24 13:10:05 +000055template<class MirrorType>
56class GcRoot;
57
Alexandre Rames5319def2014-10-23 10:03:10 +010058namespace arm64 {
59
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::ARM64EncodableConstantOrRegister;
61using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080062using helpers::CPURegisterFrom;
63using helpers::DRegisterFrom;
64using helpers::FPRegisterFrom;
65using helpers::HeapOperand;
66using helpers::HeapOperandFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010067using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080068using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080069using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010070using helpers::InputRegisterAt;
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +010071using helpers::Int64FromLocation;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010072using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080073using helpers::LocationFrom;
74using helpers::OperandFromMemOperand;
75using helpers::OutputCPURegister;
76using helpers::OutputFPRegister;
77using helpers::OutputRegister;
Artem Serovd4bccf12017-04-03 18:47:32 +010078using helpers::QRegisterFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080079using helpers::RegisterFrom;
80using helpers::StackOperandFrom;
81using helpers::VIXLRegCodeFromART;
82using helpers::WRegisterFrom;
83using helpers::XRegisterFrom;
84
Vladimir Markof3e0ee22015-12-17 15:23:13 +000085// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080086// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
87// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000088static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010089
Vladimir Markof4f2daa2017-03-20 18:26:59 +000090// Reference load (except object array loads) is using LDR Wt, [Xn, #offset] which can handle
91// offset < 16KiB. For offsets >= 16KiB, the load shall be emitted as two or more instructions.
92// For the Baker read barrier implementation using link-generated thunks we need to split
93// the offset explicitly.
94constexpr uint32_t kReferenceLoadMinFarOffset = 16 * KB;
95
96// Flags controlling the use of link-time generated thunks for Baker read barriers.
Vladimir Markod1ef8732017-04-18 13:55:13 +010097constexpr bool kBakerReadBarrierLinkTimeThunksEnableForFields = true;
Vladimir Marko66d691d2017-04-07 17:53:39 +010098constexpr bool kBakerReadBarrierLinkTimeThunksEnableForArrays = true;
Vladimir Markod1ef8732017-04-18 13:55:13 +010099constexpr bool kBakerReadBarrierLinkTimeThunksEnableForGcRoots = true;
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000100
101// Some instructions have special requirements for a temporary, for example
102// LoadClass/kBssEntry and LoadString/kBssEntry for Baker read barrier require
103// temp that's not an R0 (to avoid an extra move) and Baker read barrier field
104// loads with large offsets need a fixed register to limit the number of link-time
105// thunks we generate. For these and similar cases, we want to reserve a specific
106// register that's neither callee-save nor an argument register. We choose x15.
107inline Location FixedTempLocation() {
108 return Location::RegisterLocation(x15.GetCode());
109}
110
Alexandre Rames5319def2014-10-23 10:03:10 +0100111inline Condition ARM64Condition(IfCondition cond) {
112 switch (cond) {
113 case kCondEQ: return eq;
114 case kCondNE: return ne;
115 case kCondLT: return lt;
116 case kCondLE: return le;
117 case kCondGT: return gt;
118 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -0700119 case kCondB: return lo;
120 case kCondBE: return ls;
121 case kCondA: return hi;
122 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +0100123 }
Roland Levillain7f63c522015-07-13 15:54:55 +0000124 LOG(FATAL) << "Unreachable";
125 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +0100126}
127
Vladimir Markod6e069b2016-01-18 11:11:01 +0000128inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
129 // The ARM64 condition codes can express all the necessary branches, see the
130 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
131 // There is no dex instruction or HIR that would need the missing conditions
132 // "equal or unordered" or "not equal".
133 switch (cond) {
134 case kCondEQ: return eq;
135 case kCondNE: return ne /* unordered */;
136 case kCondLT: return gt_bias ? cc : lt /* unordered */;
137 case kCondLE: return gt_bias ? ls : le /* unordered */;
138 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
139 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
140 default:
141 LOG(FATAL) << "UNREACHABLE";
142 UNREACHABLE();
143 }
144}
145
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100146Location ARM64ReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000147 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
148 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
149 // but we use the exact registers for clarity.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100150 if (return_type == DataType::Type::kFloat32) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000151 return LocationFrom(s0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100152 } else if (return_type == DataType::Type::kFloat64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000153 return LocationFrom(d0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100154 } else if (return_type == DataType::Type::kInt64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000155 return LocationFrom(x0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100156 } else if (return_type == DataType::Type::kVoid) {
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100157 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000158 } else {
159 return LocationFrom(w0);
160 }
161}
162
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100163Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000164 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100165}
166
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100167// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
168#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700169#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100170
Zheng Xuda403092015-04-24 17:35:39 +0800171// Calculate memory accessing operand for save/restore live registers.
172static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100173 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800174 int64_t spill_offset,
175 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100176 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
177 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
178 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800179 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100180 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800181 codegen->GetNumberOfFloatingPointRegisters()));
182
Vladimir Marko804b03f2016-09-14 16:26:36 +0100183 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
Artem Serov7957d952017-04-04 15:44:09 +0100184 unsigned v_reg_size = codegen->GetGraph()->HasSIMD() ? kQRegSize : kDRegSize;
185 CPURegList fp_list = CPURegList(CPURegister::kVRegister, v_reg_size, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800186
187 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
188 UseScratchRegisterScope temps(masm);
189
190 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100191 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
192 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800193 int64_t reg_size = kXRegSizeInBytes;
194 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
195 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100196 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800197 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
198 // If the offset does not fit in the instruction's immediate field, use an alternate register
199 // to compute the base address(float point registers spill base address).
200 Register new_base = temps.AcquireSameSizeAs(base);
201 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
202 base = new_base;
203 spill_offset = -core_spill_size;
204 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
205 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
206 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
207 }
208
209 if (is_save) {
210 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
211 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
212 } else {
213 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
214 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
215 }
216}
217
218void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800219 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100220 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
221 for (uint32_t i : LowToHighBits(core_spills)) {
222 // If the register holds an object, update the stack mask.
223 if (locations->RegisterContainsObject(i)) {
224 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800225 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100226 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
227 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
228 saved_core_stack_offsets_[i] = stack_offset;
229 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800230 }
231
Vladimir Marko804b03f2016-09-14 16:26:36 +0100232 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
233 for (uint32_t i : LowToHighBits(fp_spills)) {
234 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
235 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
236 saved_fpu_stack_offsets_[i] = stack_offset;
237 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800238 }
239
Vladimir Marko804b03f2016-09-14 16:26:36 +0100240 SaveRestoreLiveRegistersHelper(codegen,
241 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800242 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
243}
244
245void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100246 SaveRestoreLiveRegistersHelper(codegen,
247 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800248 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
249}
250
Alexandre Rames5319def2014-10-23 10:03:10 +0100251class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
252 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000253 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100254
Alexandre Rames67555f72014-11-18 10:55:16 +0000255 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100256 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000257 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100258
Alexandre Rames5319def2014-10-23 10:03:10 +0100259 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000260 if (instruction_->CanThrowIntoCatchBlock()) {
261 // Live registers will be restored in the catch block if caught.
262 SaveLiveRegisters(codegen, instruction_->GetLocations());
263 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000264 // We're moving two locations to locations that could overlap, so we need a parallel
265 // move resolver.
266 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100267 codegen->EmitParallelMoves(locations->InAt(0),
268 LocationFrom(calling_convention.GetRegisterAt(0)),
269 DataType::Type::kInt32,
270 locations->InAt(1),
271 LocationFrom(calling_convention.GetRegisterAt(1)),
272 DataType::Type::kInt32);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000273 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
274 ? kQuickThrowStringBounds
275 : kQuickThrowArrayBounds;
276 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100277 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800278 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100279 }
280
Alexandre Rames8158f282015-08-07 10:26:17 +0100281 bool IsFatal() const OVERRIDE { return true; }
282
Alexandre Rames9931f312015-06-19 14:47:01 +0100283 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
284
Alexandre Rames5319def2014-10-23 10:03:10 +0100285 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100286 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
287};
288
Alexandre Rames67555f72014-11-18 10:55:16 +0000289class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
290 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000291 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000292
293 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
294 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
295 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000296 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800297 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000298 }
299
Alexandre Rames8158f282015-08-07 10:26:17 +0100300 bool IsFatal() const OVERRIDE { return true; }
301
Alexandre Rames9931f312015-06-19 14:47:01 +0100302 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
303
Alexandre Rames67555f72014-11-18 10:55:16 +0000304 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000305 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
306};
307
308class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
309 public:
310 LoadClassSlowPathARM64(HLoadClass* cls,
311 HInstruction* at,
312 uint32_t dex_pc,
Vladimir Markof3c52b42017-11-17 17:32:12 +0000313 bool do_clinit)
Vladimir Markoea4c1262017-02-06 19:59:33 +0000314 : SlowPathCodeARM64(at),
315 cls_(cls),
316 dex_pc_(dex_pc),
Vladimir Markof3c52b42017-11-17 17:32:12 +0000317 do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000318 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
319 }
320
321 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000322 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000323 Location out = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +0000324 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
325
326 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000327 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000328
Vladimir Markof3c52b42017-11-17 17:32:12 +0000329 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000330 dex::TypeIndex type_index = cls_->GetTypeIndex();
331 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000332 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
333 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000334 arm64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800335 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100336 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800337 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100338 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800339 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000340
341 // Move the class to the desired location.
Alexandre Rames67555f72014-11-18 10:55:16 +0000342 if (out.IsValid()) {
343 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100344 DataType::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000345 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000346 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000347 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000348 __ B(GetExitLabel());
349 }
350
Alexandre Rames9931f312015-06-19 14:47:01 +0100351 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
352
Alexandre Rames67555f72014-11-18 10:55:16 +0000353 private:
354 // The class this slow path will load.
355 HLoadClass* const cls_;
356
Alexandre Rames67555f72014-11-18 10:55:16 +0000357 // The dex PC of `at_`.
358 const uint32_t dex_pc_;
359
360 // Whether to initialize the class.
361 const bool do_clinit_;
362
363 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
364};
365
Vladimir Markoaad75c62016-10-03 08:46:48 +0000366class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
367 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000368 explicit LoadStringSlowPathARM64(HLoadString* instruction)
369 : SlowPathCodeARM64(instruction) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000370
371 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
372 LocationSummary* locations = instruction_->GetLocations();
373 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
374 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
375
376 __ Bind(GetEntryLabel());
377 SaveLiveRegisters(codegen, locations);
378
Vladimir Markof3c52b42017-11-17 17:32:12 +0000379 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000380 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
381 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000382 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
383 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100384 DataType::Type type = instruction_->GetType();
Vladimir Markoaad75c62016-10-03 08:46:48 +0000385 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
386
387 RestoreLiveRegisters(codegen, locations);
388
Vladimir Markoaad75c62016-10-03 08:46:48 +0000389 __ B(GetExitLabel());
390 }
391
392 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
393
394 private:
395 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
396};
397
Alexandre Rames5319def2014-10-23 10:03:10 +0100398class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
399 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000400 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100401
Alexandre Rames67555f72014-11-18 10:55:16 +0000402 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
403 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100404 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000405 if (instruction_->CanThrowIntoCatchBlock()) {
406 // Live registers will be restored in the catch block if caught.
407 SaveLiveRegisters(codegen, instruction_->GetLocations());
408 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000409 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
410 instruction_,
411 instruction_->GetDexPc(),
412 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800413 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100414 }
415
Alexandre Rames8158f282015-08-07 10:26:17 +0100416 bool IsFatal() const OVERRIDE { return true; }
417
Alexandre Rames9931f312015-06-19 14:47:01 +0100418 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
419
Alexandre Rames5319def2014-10-23 10:03:10 +0100420 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100421 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
422};
423
424class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
425 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100426 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000427 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100428
Alexandre Rames67555f72014-11-18 10:55:16 +0000429 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Artem Serov7957d952017-04-04 15:44:09 +0100430 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +0000431 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100432 __ Bind(GetEntryLabel());
Artem Serov7957d952017-04-04 15:44:09 +0100433 SaveLiveRegisters(codegen, locations); // Only saves live 128-bit regs for SIMD.
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000434 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800435 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Artem Serov7957d952017-04-04 15:44:09 +0100436 RestoreLiveRegisters(codegen, locations); // Only restores live 128-bit regs for SIMD.
Alexandre Rames67555f72014-11-18 10:55:16 +0000437 if (successor_ == nullptr) {
438 __ B(GetReturnLabel());
439 } else {
440 __ B(arm64_codegen->GetLabelOf(successor_));
441 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100442 }
443
Scott Wakeling97c72b72016-06-24 16:19:36 +0100444 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100445 DCHECK(successor_ == nullptr);
446 return &return_label_;
447 }
448
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100449 HBasicBlock* GetSuccessor() const {
450 return successor_;
451 }
452
Alexandre Rames9931f312015-06-19 14:47:01 +0100453 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
454
Alexandre Rames5319def2014-10-23 10:03:10 +0100455 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100456 // If not null, the block to branch to after the suspend check.
457 HBasicBlock* const successor_;
458
459 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100460 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100461
462 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
463};
464
Alexandre Rames67555f72014-11-18 10:55:16 +0000465class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
466 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000467 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000468 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000469
470 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000471 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800472
Alexandre Rames3e69f162014-12-10 10:36:50 +0000473 DCHECK(instruction_->IsCheckCast()
474 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
475 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100476 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000477
Alexandre Rames67555f72014-11-18 10:55:16 +0000478 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000479
Vladimir Marko87584542017-12-12 17:47:52 +0000480 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000481 SaveLiveRegisters(codegen, locations);
482 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000483
484 // We're moving two locations to locations that could overlap, so we need a parallel
485 // move resolver.
486 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800487 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800488 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100489 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800490 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800491 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100492 DataType::Type::kReference);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000493 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000494 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800495 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100496 DataType::Type ret_type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000497 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
498 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
499 } else {
500 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800501 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
502 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000503 }
504
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000505 if (!is_fatal_) {
506 RestoreLiveRegisters(codegen, locations);
507 __ B(GetExitLabel());
508 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000509 }
510
Alexandre Rames9931f312015-06-19 14:47:01 +0100511 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100512 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100513
Alexandre Rames67555f72014-11-18 10:55:16 +0000514 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000515 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000516
Alexandre Rames67555f72014-11-18 10:55:16 +0000517 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
518};
519
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700520class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
521 public:
Aart Bik42249c32016-01-07 15:33:50 -0800522 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000523 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700524
525 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800526 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700527 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100528 LocationSummary* locations = instruction_->GetLocations();
529 SaveLiveRegisters(codegen, locations);
530 InvokeRuntimeCallingConvention calling_convention;
531 __ Mov(calling_convention.GetRegisterAt(0),
532 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000533 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100534 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700535 }
536
Alexandre Rames9931f312015-06-19 14:47:01 +0100537 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
538
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700539 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700540 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
541};
542
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100543class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
544 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000545 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100546
547 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
548 LocationSummary* locations = instruction_->GetLocations();
549 __ Bind(GetEntryLabel());
550 SaveLiveRegisters(codegen, locations);
551
552 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100553 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100554 parallel_move.AddMove(
555 locations->InAt(0),
556 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100557 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100558 nullptr);
559 parallel_move.AddMove(
560 locations->InAt(1),
561 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100562 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100563 nullptr);
564 parallel_move.AddMove(
565 locations->InAt(2),
566 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100567 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100568 nullptr);
569 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
570
571 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000572 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100573 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
574 RestoreLiveRegisters(codegen, locations);
575 __ B(GetExitLabel());
576 }
577
578 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
579
580 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100581 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
582};
583
Zheng Xu3927c8b2015-11-18 17:46:25 +0800584void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
585 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000586 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800587
588 // We are about to use the assembler to place literals directly. Make sure we have enough
589 // underlying code buffer and we have generated the jump table with right size.
Artem Serov914d7a82017-02-07 14:33:49 +0000590 EmissionCheckScope scope(codegen->GetVIXLAssembler(),
591 num_entries * sizeof(int32_t),
592 CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800593
594 __ Bind(&table_start_);
595 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
596 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100597 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800598 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100599 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800600 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
601 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
602 Literal<int32_t> literal(jump_offset);
603 __ place(&literal);
604 }
605}
606
Roland Levillain54f869e2017-03-06 13:54:11 +0000607// Abstract base class for read barrier slow paths marking a reference
608// `ref`.
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000609//
Roland Levillain54f869e2017-03-06 13:54:11 +0000610// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100611// barrier marking runtime entry point to be invoked or an empty
612// location; in the latter case, the read barrier marking runtime
613// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000614class ReadBarrierMarkSlowPathBaseARM64 : public SlowPathCodeARM64 {
615 protected:
616 ReadBarrierMarkSlowPathBaseARM64(HInstruction* instruction, Location ref, Location entrypoint)
617 : SlowPathCodeARM64(instruction), ref_(ref), entrypoint_(entrypoint) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000618 DCHECK(kEmitCompilerReadBarrier);
619 }
620
Roland Levillain54f869e2017-03-06 13:54:11 +0000621 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathBaseARM64"; }
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000622
Roland Levillain54f869e2017-03-06 13:54:11 +0000623 // Generate assembly code calling the read barrier marking runtime
624 // entry point (ReadBarrierMarkRegX).
625 void GenerateReadBarrierMarkRuntimeCall(CodeGenerator* codegen) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000626 // No need to save live registers; it's taken care of by the
627 // entrypoint. Also, there is no need to update the stack mask,
628 // as this runtime call will not trigger a garbage collection.
629 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
630 DCHECK_NE(ref_.reg(), LR);
631 DCHECK_NE(ref_.reg(), WSP);
632 DCHECK_NE(ref_.reg(), WZR);
633 // IP0 is used internally by the ReadBarrierMarkRegX entry point
634 // as a temporary, it cannot be the entry point's input/output.
635 DCHECK_NE(ref_.reg(), IP0);
636 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
637 // "Compact" slow path, saving two moves.
638 //
639 // Instead of using the standard runtime calling convention (input
640 // and output in W0):
641 //
642 // W0 <- ref
643 // W0 <- ReadBarrierMark(W0)
644 // ref <- W0
645 //
646 // we just use rX (the register containing `ref`) as input and output
647 // of a dedicated entrypoint:
648 //
649 // rX <- ReadBarrierMarkRegX(rX)
650 //
651 if (entrypoint_.IsValid()) {
652 arm64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
653 __ Blr(XRegisterFrom(entrypoint_));
654 } else {
655 // Entrypoint is not already loaded, load from the thread.
656 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100657 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000658 // This runtime call does not require a stack map.
659 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
660 }
661 }
662
663 // The location (register) of the marked object reference.
664 const Location ref_;
665
666 // The location of the entrypoint if it is already loaded.
667 const Location entrypoint_;
668
Roland Levillain54f869e2017-03-06 13:54:11 +0000669 private:
670 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathBaseARM64);
671};
672
Alexandre Rames5319def2014-10-23 10:03:10 +0100673// Slow path marking an object reference `ref` during a read
674// barrier. The field `obj.field` in the object `obj` holding this
Roland Levillain54f869e2017-03-06 13:54:11 +0000675// reference does not get updated by this slow path after marking.
Alexandre Rames5319def2014-10-23 10:03:10 +0100676//
677// This means that after the execution of this slow path, `ref` will
678// always be up-to-date, but `obj.field` may not; i.e., after the
679// flip, `ref` will be a to-space reference, but `obj.field` will
680// probably still be a from-space reference (unless it gets updated by
681// another thread, or if another thread installed another object
682// reference (different from `ref`) in `obj.field`).
683//
Roland Levillain97c46462017-05-11 14:04:03 +0100684// Argument `entrypoint` must be a register location holding the read
685// barrier marking runtime entry point to be invoked or an empty
686// location; in the latter case, the read barrier marking runtime
687// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000688class ReadBarrierMarkSlowPathARM64 : public ReadBarrierMarkSlowPathBaseARM64 {
Alexandre Rames5319def2014-10-23 10:03:10 +0100689 public:
690 ReadBarrierMarkSlowPathARM64(HInstruction* instruction,
691 Location ref,
692 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000693 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100694 DCHECK(kEmitCompilerReadBarrier);
Alexandre Rames5319def2014-10-23 10:03:10 +0100695 }
696
697 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
698
699 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames542361f2015-01-29 16:57:31 +0000700 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100701 DCHECK(locations->CanCall());
702 DCHECK(ref_.IsRegister()) << ref_;
Alexandre Rames542361f2015-01-29 16:57:31 +0000703 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain54f869e2017-03-06 13:54:11 +0000704 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
705 << "Unexpected instruction in read barrier marking slow path: "
706 << instruction_->DebugName();
707
708 __ Bind(GetEntryLabel());
709 GenerateReadBarrierMarkRuntimeCall(codegen);
710 __ B(GetExitLabel());
711 }
712
713 private:
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000714 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
715};
716
Roland Levillain54f869e2017-03-06 13:54:11 +0000717// Slow path loading `obj`'s lock word, loading a reference from
718// object `*(obj + offset + (index << scale_factor))` into `ref`, and
719// marking `ref` if `obj` is gray according to the lock word (Baker
720// read barrier). The field `obj.field` in the object `obj` holding
721// this reference does not get updated by this slow path after marking
722// (see LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64
723// below for that).
724//
725// This means that after the execution of this slow path, `ref` will
726// always be up-to-date, but `obj.field` may not; i.e., after the
727// flip, `ref` will be a to-space reference, but `obj.field` will
728// probably still be a from-space reference (unless it gets updated by
729// another thread, or if another thread installed another object
730// reference (different from `ref`) in `obj.field`).
731//
732// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100733// barrier marking runtime entry point to be invoked or an empty
734// location; in the latter case, the read barrier marking runtime
735// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000736class LoadReferenceWithBakerReadBarrierSlowPathARM64 : public ReadBarrierMarkSlowPathBaseARM64 {
737 public:
738 LoadReferenceWithBakerReadBarrierSlowPathARM64(HInstruction* instruction,
739 Location ref,
740 Register obj,
741 uint32_t offset,
742 Location index,
743 size_t scale_factor,
744 bool needs_null_check,
745 bool use_load_acquire,
746 Register temp,
Roland Levillain97c46462017-05-11 14:04:03 +0100747 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000748 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint),
749 obj_(obj),
750 offset_(offset),
751 index_(index),
752 scale_factor_(scale_factor),
753 needs_null_check_(needs_null_check),
754 use_load_acquire_(use_load_acquire),
755 temp_(temp) {
756 DCHECK(kEmitCompilerReadBarrier);
757 DCHECK(kUseBakerReadBarrier);
758 }
759
760 const char* GetDescription() const OVERRIDE {
761 return "LoadReferenceWithBakerReadBarrierSlowPathARM64";
762 }
763
764 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
765 LocationSummary* locations = instruction_->GetLocations();
766 DCHECK(locations->CanCall());
767 DCHECK(ref_.IsRegister()) << ref_;
768 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
769 DCHECK(obj_.IsW());
770 DCHECK_NE(ref_.reg(), LocationFrom(temp_).reg());
Alexandre Rames5319def2014-10-23 10:03:10 +0100771 DCHECK(instruction_->IsInstanceFieldGet() ||
772 instruction_->IsStaticFieldGet() ||
773 instruction_->IsArrayGet() ||
774 instruction_->IsArraySet() ||
Alexandre Rames5319def2014-10-23 10:03:10 +0100775 instruction_->IsInstanceOf() ||
776 instruction_->IsCheckCast() ||
777 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
778 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
779 << "Unexpected instruction in read barrier marking slow path: "
780 << instruction_->DebugName();
781 // The read barrier instrumentation of object ArrayGet
782 // instructions does not support the HIntermediateAddress
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000783 // instruction.
784 DCHECK(!(instruction_->IsArrayGet() &&
Alexandre Rames542361f2015-01-29 16:57:31 +0000785 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
786
Roland Levillain54f869e2017-03-06 13:54:11 +0000787 // Temporary register `temp_`, used to store the lock word, must
788 // not be IP0 nor IP1, as we may use them to emit the reference
789 // load (in the call to GenerateRawReferenceLoad below), and we
790 // need the lock word to still be in `temp_` after the reference
791 // load.
792 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
793 DCHECK_NE(LocationFrom(temp_).reg(), IP1);
794
Alexandre Rames5319def2014-10-23 10:03:10 +0100795 __ Bind(GetEntryLabel());
Roland Levillain54f869e2017-03-06 13:54:11 +0000796
797 // When using MaybeGenerateReadBarrierSlow, the read barrier call is
798 // inserted after the original load. However, in fast path based
799 // Baker's read barriers, we need to perform the load of
800 // mirror::Object::monitor_ *before* the original reference load.
801 // This load-load ordering is required by the read barrier.
Roland Levillainff487002017-03-07 16:50:01 +0000802 // The slow path (for Baker's algorithm) should look like:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100803 //
Roland Levillain54f869e2017-03-06 13:54:11 +0000804 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
805 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
806 // HeapReference<mirror::Object> ref = *src; // Original reference load.
807 // bool is_gray = (rb_state == ReadBarrier::GrayState());
808 // if (is_gray) {
809 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
810 // }
Roland Levillaind966ce72017-02-09 16:20:14 +0000811 //
Roland Levillain54f869e2017-03-06 13:54:11 +0000812 // Note: the original implementation in ReadBarrier::Barrier is
813 // slightly more complex as it performs additional checks that we do
814 // not do here for performance reasons.
815
816 // /* int32_t */ monitor = obj->monitor_
817 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
818 __ Ldr(temp_, HeapOperand(obj_, monitor_offset));
819 if (needs_null_check_) {
820 codegen->MaybeRecordImplicitNullCheck(instruction_);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100821 }
Roland Levillain54f869e2017-03-06 13:54:11 +0000822 // /* LockWord */ lock_word = LockWord(monitor)
823 static_assert(sizeof(LockWord) == sizeof(int32_t),
824 "art::LockWord and int32_t have different sizes.");
825
826 // Introduce a dependency on the lock_word including rb_state,
827 // to prevent load-load reordering, and without using
828 // a memory barrier (which would be more expensive).
829 // `obj` is unchanged by this operation, but its value now depends
830 // on `temp`.
831 __ Add(obj_.X(), obj_.X(), Operand(temp_.X(), LSR, 32));
832
833 // The actual reference load.
834 // A possible implicit null check has already been handled above.
835 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
836 arm64_codegen->GenerateRawReferenceLoad(instruction_,
837 ref_,
838 obj_,
839 offset_,
840 index_,
841 scale_factor_,
842 /* needs_null_check */ false,
843 use_load_acquire_);
844
845 // Mark the object `ref` when `obj` is gray.
846 //
847 // if (rb_state == ReadBarrier::GrayState())
848 // ref = ReadBarrier::Mark(ref);
849 //
850 // Given the numeric representation, it's enough to check the low bit of the rb_state.
851 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
852 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
853 __ Tbz(temp_, LockWord::kReadBarrierStateShift, GetExitLabel());
854 GenerateReadBarrierMarkRuntimeCall(codegen);
855
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000856 __ B(GetExitLabel());
857 }
858
859 private:
Roland Levillain54f869e2017-03-06 13:54:11 +0000860 // The register containing the object holding the marked object reference field.
861 Register obj_;
862 // The offset, index and scale factor to access the reference in `obj_`.
863 uint32_t offset_;
864 Location index_;
865 size_t scale_factor_;
866 // Is a null check required?
867 bool needs_null_check_;
868 // Should this reference load use Load-Acquire semantics?
869 bool use_load_acquire_;
870 // A temporary register used to hold the lock word of `obj_`.
871 Register temp_;
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000872
Roland Levillain54f869e2017-03-06 13:54:11 +0000873 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierSlowPathARM64);
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000874};
875
Roland Levillain54f869e2017-03-06 13:54:11 +0000876// Slow path loading `obj`'s lock word, loading a reference from
877// object `*(obj + offset + (index << scale_factor))` into `ref`, and
878// marking `ref` if `obj` is gray according to the lock word (Baker
879// read barrier). If needed, this slow path also atomically updates
880// the field `obj.field` in the object `obj` holding this reference
881// after marking (contrary to
882// LoadReferenceWithBakerReadBarrierSlowPathARM64 above, which never
883// tries to update `obj.field`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100884//
885// This means that after the execution of this slow path, both `ref`
886// and `obj.field` will be up-to-date; i.e., after the flip, both will
887// hold the same to-space reference (unless another thread installed
888// another object reference (different from `ref`) in `obj.field`).
Roland Levillainba650a42017-03-06 13:52:32 +0000889//
Roland Levillain54f869e2017-03-06 13:54:11 +0000890// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100891// barrier marking runtime entry point to be invoked or an empty
892// location; in the latter case, the read barrier marking runtime
893// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000894class LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64
895 : public ReadBarrierMarkSlowPathBaseARM64 {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100896 public:
Roland Levillain97c46462017-05-11 14:04:03 +0100897 LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64(
898 HInstruction* instruction,
899 Location ref,
900 Register obj,
901 uint32_t offset,
902 Location index,
903 size_t scale_factor,
904 bool needs_null_check,
905 bool use_load_acquire,
906 Register temp,
907 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000908 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint),
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100909 obj_(obj),
Roland Levillain54f869e2017-03-06 13:54:11 +0000910 offset_(offset),
911 index_(index),
912 scale_factor_(scale_factor),
913 needs_null_check_(needs_null_check),
914 use_load_acquire_(use_load_acquire),
Roland Levillain35345a52017-02-27 14:32:08 +0000915 temp_(temp) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100916 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain54f869e2017-03-06 13:54:11 +0000917 DCHECK(kUseBakerReadBarrier);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100918 }
919
920 const char* GetDescription() const OVERRIDE {
Roland Levillain54f869e2017-03-06 13:54:11 +0000921 return "LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64";
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100922 }
923
924 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
925 LocationSummary* locations = instruction_->GetLocations();
926 Register ref_reg = WRegisterFrom(ref_);
927 DCHECK(locations->CanCall());
928 DCHECK(ref_.IsRegister()) << ref_;
929 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain54f869e2017-03-06 13:54:11 +0000930 DCHECK(obj_.IsW());
931 DCHECK_NE(ref_.reg(), LocationFrom(temp_).reg());
932
933 // This slow path is only used by the UnsafeCASObject intrinsic at the moment.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100934 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
935 << "Unexpected instruction in read barrier marking and field updating slow path: "
936 << instruction_->DebugName();
937 DCHECK(instruction_->GetLocations()->Intrinsified());
938 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
Roland Levillain54f869e2017-03-06 13:54:11 +0000939 DCHECK_EQ(offset_, 0u);
940 DCHECK_EQ(scale_factor_, 0u);
941 DCHECK_EQ(use_load_acquire_, false);
942 // The location of the offset of the marked reference field within `obj_`.
943 Location field_offset = index_;
944 DCHECK(field_offset.IsRegister()) << field_offset;
945
946 // Temporary register `temp_`, used to store the lock word, must
947 // not be IP0 nor IP1, as we may use them to emit the reference
948 // load (in the call to GenerateRawReferenceLoad below), and we
949 // need the lock word to still be in `temp_` after the reference
950 // load.
951 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
952 DCHECK_NE(LocationFrom(temp_).reg(), IP1);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100953
954 __ Bind(GetEntryLabel());
955
Roland Levillainff487002017-03-07 16:50:01 +0000956 // The implementation is similar to LoadReferenceWithBakerReadBarrierSlowPathARM64's:
957 //
958 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
959 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
960 // HeapReference<mirror::Object> ref = *src; // Original reference load.
961 // bool is_gray = (rb_state == ReadBarrier::GrayState());
962 // if (is_gray) {
963 // old_ref = ref;
964 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
965 // compareAndSwapObject(obj, field_offset, old_ref, ref);
966 // }
967
Roland Levillain54f869e2017-03-06 13:54:11 +0000968 // /* int32_t */ monitor = obj->monitor_
969 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
970 __ Ldr(temp_, HeapOperand(obj_, monitor_offset));
971 if (needs_null_check_) {
972 codegen->MaybeRecordImplicitNullCheck(instruction_);
973 }
974 // /* LockWord */ lock_word = LockWord(monitor)
975 static_assert(sizeof(LockWord) == sizeof(int32_t),
976 "art::LockWord and int32_t have different sizes.");
977
978 // Introduce a dependency on the lock_word including rb_state,
979 // to prevent load-load reordering, and without using
980 // a memory barrier (which would be more expensive).
981 // `obj` is unchanged by this operation, but its value now depends
982 // on `temp`.
983 __ Add(obj_.X(), obj_.X(), Operand(temp_.X(), LSR, 32));
984
985 // The actual reference load.
986 // A possible implicit null check has already been handled above.
987 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
988 arm64_codegen->GenerateRawReferenceLoad(instruction_,
989 ref_,
990 obj_,
991 offset_,
992 index_,
993 scale_factor_,
994 /* needs_null_check */ false,
995 use_load_acquire_);
996
997 // Mark the object `ref` when `obj` is gray.
998 //
999 // if (rb_state == ReadBarrier::GrayState())
1000 // ref = ReadBarrier::Mark(ref);
1001 //
1002 // Given the numeric representation, it's enough to check the low bit of the rb_state.
1003 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
1004 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
1005 __ Tbz(temp_, LockWord::kReadBarrierStateShift, GetExitLabel());
1006
1007 // Save the old value of the reference before marking it.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001008 // Note that we cannot use IP to save the old reference, as IP is
1009 // used internally by the ReadBarrierMarkRegX entry point, and we
1010 // need the old reference after the call to that entry point.
1011 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
1012 __ Mov(temp_.W(), ref_reg);
1013
Roland Levillain54f869e2017-03-06 13:54:11 +00001014 GenerateReadBarrierMarkRuntimeCall(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001015
1016 // If the new reference is different from the old reference,
Roland Levillain54f869e2017-03-06 13:54:11 +00001017 // update the field in the holder (`*(obj_ + field_offset)`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001018 //
1019 // Note that this field could also hold a different object, if
1020 // another thread had concurrently changed it. In that case, the
1021 // LDXR/CMP/BNE sequence of instructions in the compare-and-set
1022 // (CAS) operation below would abort the CAS, leaving the field
1023 // as-is.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001024 __ Cmp(temp_.W(), ref_reg);
Roland Levillain54f869e2017-03-06 13:54:11 +00001025 __ B(eq, GetExitLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001026
1027 // Update the the holder's field atomically. This may fail if
1028 // mutator updates before us, but it's OK. This is achieved
1029 // using a strong compare-and-set (CAS) operation with relaxed
1030 // memory synchronization ordering, where the expected value is
1031 // the old reference and the desired value is the new reference.
1032
1033 MacroAssembler* masm = arm64_codegen->GetVIXLAssembler();
1034 UseScratchRegisterScope temps(masm);
1035
1036 // Convenience aliases.
1037 Register base = obj_.W();
Roland Levillain54f869e2017-03-06 13:54:11 +00001038 Register offset = XRegisterFrom(field_offset);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001039 Register expected = temp_.W();
1040 Register value = ref_reg;
1041 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1042 Register tmp_value = temps.AcquireW(); // Value in memory.
1043
1044 __ Add(tmp_ptr, base.X(), Operand(offset));
1045
1046 if (kPoisonHeapReferences) {
1047 arm64_codegen->GetAssembler()->PoisonHeapReference(expected);
1048 if (value.Is(expected)) {
1049 // Do not poison `value`, as it is the same register as
1050 // `expected`, which has just been poisoned.
1051 } else {
1052 arm64_codegen->GetAssembler()->PoisonHeapReference(value);
1053 }
1054 }
1055
1056 // do {
1057 // tmp_value = [tmp_ptr] - expected;
1058 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1059
Roland Levillain24a4d112016-10-26 13:10:46 +01001060 vixl::aarch64::Label loop_head, comparison_failed, exit_loop;
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001061 __ Bind(&loop_head);
1062 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
1063 __ Cmp(tmp_value, expected);
Roland Levillain24a4d112016-10-26 13:10:46 +01001064 __ B(&comparison_failed, ne);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001065 __ Stxr(tmp_value, value, MemOperand(tmp_ptr));
1066 __ Cbnz(tmp_value, &loop_head);
Roland Levillain24a4d112016-10-26 13:10:46 +01001067 __ B(&exit_loop);
1068 __ Bind(&comparison_failed);
1069 __ Clrex();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001070 __ Bind(&exit_loop);
1071
1072 if (kPoisonHeapReferences) {
1073 arm64_codegen->GetAssembler()->UnpoisonHeapReference(expected);
1074 if (value.Is(expected)) {
1075 // Do not unpoison `value`, as it is the same register as
1076 // `expected`, which has just been unpoisoned.
1077 } else {
1078 arm64_codegen->GetAssembler()->UnpoisonHeapReference(value);
1079 }
1080 }
1081
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001082 __ B(GetExitLabel());
1083 }
1084
1085 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001086 // The register containing the object holding the marked object reference field.
1087 const Register obj_;
Roland Levillain54f869e2017-03-06 13:54:11 +00001088 // The offset, index and scale factor to access the reference in `obj_`.
1089 uint32_t offset_;
1090 Location index_;
1091 size_t scale_factor_;
1092 // Is a null check required?
1093 bool needs_null_check_;
1094 // Should this reference load use Load-Acquire semantics?
1095 bool use_load_acquire_;
1096 // A temporary register used to hold the lock word of `obj_`; and
1097 // also to hold the original reference value, when the reference is
1098 // marked.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001099 const Register temp_;
1100
Roland Levillain54f869e2017-03-06 13:54:11 +00001101 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001102};
1103
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001104// Slow path generating a read barrier for a heap reference.
1105class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
1106 public:
1107 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
1108 Location out,
1109 Location ref,
1110 Location obj,
1111 uint32_t offset,
1112 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +00001113 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001114 out_(out),
1115 ref_(ref),
1116 obj_(obj),
1117 offset_(offset),
1118 index_(index) {
1119 DCHECK(kEmitCompilerReadBarrier);
1120 // If `obj` is equal to `out` or `ref`, it means the initial object
1121 // has been overwritten by (or after) the heap object reference load
1122 // to be instrumented, e.g.:
1123 //
1124 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +00001125 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001126 //
1127 // In that case, we have lost the information about the original
1128 // object, and the emitted read barrier cannot work properly.
1129 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
1130 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
1131 }
1132
1133 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1134 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1135 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001136 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001137 DCHECK(locations->CanCall());
1138 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +01001139 DCHECK(instruction_->IsInstanceFieldGet() ||
1140 instruction_->IsStaticFieldGet() ||
1141 instruction_->IsArrayGet() ||
1142 instruction_->IsInstanceOf() ||
1143 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -07001144 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +00001145 << "Unexpected instruction in read barrier for heap reference slow path: "
1146 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +00001147 // The read barrier instrumentation of object ArrayGet
1148 // instructions does not support the HIntermediateAddress
1149 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001150 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +01001151 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001152
1153 __ Bind(GetEntryLabel());
1154
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001155 SaveLiveRegisters(codegen, locations);
1156
1157 // We may have to change the index's value, but as `index_` is a
1158 // constant member (like other "inputs" of this slow path),
1159 // introduce a copy of it, `index`.
1160 Location index = index_;
1161 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +01001162 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001163 if (instruction_->IsArrayGet()) {
1164 // Compute the actual memory offset and store it in `index`.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001165 Register index_reg = RegisterFrom(index_, DataType::Type::kInt32);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001166 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
1167 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
1168 // We are about to change the value of `index_reg` (see the
1169 // calls to vixl::MacroAssembler::Lsl and
1170 // vixl::MacroAssembler::Mov below), but it has
1171 // not been saved by the previous call to
1172 // art::SlowPathCode::SaveLiveRegisters, as it is a
1173 // callee-save register --
1174 // art::SlowPathCode::SaveLiveRegisters does not consider
1175 // callee-save registers, as it has been designed with the
1176 // assumption that callee-save registers are supposed to be
1177 // handled by the called function. So, as a callee-save
1178 // register, `index_reg` _would_ eventually be saved onto
1179 // the stack, but it would be too late: we would have
1180 // changed its value earlier. Therefore, we manually save
1181 // it here into another freely available register,
1182 // `free_reg`, chosen of course among the caller-save
1183 // registers (as a callee-save `free_reg` register would
1184 // exhibit the same problem).
1185 //
1186 // Note we could have requested a temporary register from
1187 // the register allocator instead; but we prefer not to, as
1188 // this is a slow path, and we know we can find a
1189 // caller-save register that is available.
1190 Register free_reg = FindAvailableCallerSaveRegister(codegen);
1191 __ Mov(free_reg.W(), index_reg);
1192 index_reg = free_reg;
1193 index = LocationFrom(index_reg);
1194 } else {
1195 // The initial register stored in `index_` has already been
1196 // saved in the call to art::SlowPathCode::SaveLiveRegisters
1197 // (as it is not a callee-save register), so we can freely
1198 // use it.
1199 }
1200 // Shifting the index value contained in `index_reg` by the scale
1201 // factor (2) cannot overflow in practice, as the runtime is
1202 // unable to allocate object arrays with a size larger than
1203 // 2^26 - 1 (that is, 2^28 - 4 bytes).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001204 __ Lsl(index_reg, index_reg, DataType::SizeShift(type));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001205 static_assert(
1206 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
1207 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
1208 __ Add(index_reg, index_reg, Operand(offset_));
1209 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01001210 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
1211 // intrinsics, `index_` is not shifted by a scale factor of 2
1212 // (as in the case of ArrayGet), as it is actually an offset
1213 // to an object field within an object.
1214 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001215 DCHECK(instruction_->GetLocations()->Intrinsified());
1216 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
1217 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
1218 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001219 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +01001220 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001221 }
1222 }
1223
1224 // We're moving two or three locations to locations that could
1225 // overlap, so we need a parallel move resolver.
1226 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +01001227 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001228 parallel_move.AddMove(ref_,
1229 LocationFrom(calling_convention.GetRegisterAt(0)),
1230 type,
1231 nullptr);
1232 parallel_move.AddMove(obj_,
1233 LocationFrom(calling_convention.GetRegisterAt(1)),
1234 type,
1235 nullptr);
1236 if (index.IsValid()) {
1237 parallel_move.AddMove(index,
1238 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001239 DataType::Type::kInt32,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001240 nullptr);
1241 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1242 } else {
1243 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1244 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
1245 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001246 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001247 instruction_,
1248 instruction_->GetDexPc(),
1249 this);
1250 CheckEntrypointTypes<
1251 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
1252 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1253
1254 RestoreLiveRegisters(codegen, locations);
1255
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001256 __ B(GetExitLabel());
1257 }
1258
1259 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
1260
1261 private:
1262 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001263 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
1264 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001265 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1266 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
1267 return Register(VIXLRegCodeFromART(i), kXRegSize);
1268 }
1269 }
1270 // We shall never fail to find a free caller-save register, as
1271 // there are more than two core caller-save registers on ARM64
1272 // (meaning it is possible to find one which is different from
1273 // `ref` and `obj`).
1274 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1275 LOG(FATAL) << "Could not find a free register";
1276 UNREACHABLE();
1277 }
1278
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001279 const Location out_;
1280 const Location ref_;
1281 const Location obj_;
1282 const uint32_t offset_;
1283 // An additional location containing an index to an array.
1284 // Only used for HArrayGet and the UnsafeGetObject &
1285 // UnsafeGetObjectVolatile intrinsics.
1286 const Location index_;
1287
1288 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
1289};
1290
1291// Slow path generating a read barrier for a GC root.
1292class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
1293 public:
1294 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +00001295 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +00001296 DCHECK(kEmitCompilerReadBarrier);
1297 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001298
1299 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1300 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001301 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001302 DCHECK(locations->CanCall());
1303 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +00001304 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1305 << "Unexpected instruction in read barrier for GC root slow path: "
1306 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001307
1308 __ Bind(GetEntryLabel());
1309 SaveLiveRegisters(codegen, locations);
1310
1311 InvokeRuntimeCallingConvention calling_convention;
1312 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1313 // The argument of the ReadBarrierForRootSlow is not a managed
1314 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
1315 // thus we need a 64-bit move here, and we cannot use
1316 //
1317 // arm64_codegen->MoveLocation(
1318 // LocationFrom(calling_convention.GetRegisterAt(0)),
1319 // root_,
1320 // type);
1321 //
1322 // which would emit a 32-bit move, as `type` is a (32-bit wide)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001323 // reference type (`DataType::Type::kReference`).
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001324 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001325 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001326 instruction_,
1327 instruction_->GetDexPc(),
1328 this);
1329 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1330 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1331
1332 RestoreLiveRegisters(codegen, locations);
1333 __ B(GetExitLabel());
1334 }
1335
1336 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
1337
1338 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001339 const Location out_;
1340 const Location root_;
1341
1342 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
1343};
1344
Alexandre Rames5319def2014-10-23 10:03:10 +01001345#undef __
1346
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001347Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(DataType::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001348 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001349 if (type == DataType::Type::kVoid) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001350 LOG(FATAL) << "Unreachable type " << type;
1351 }
1352
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001353 if (DataType::IsFloatingPointType(type) &&
Alexandre Rames5319def2014-10-23 10:03:10 +01001354 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001355 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001356 } else if (!DataType::IsFloatingPointType(type) &&
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001357 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
1358 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
1359 } else {
1360 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001361 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
1362 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +01001363 }
1364
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001365 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001366 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +01001367 return next_location;
1368}
1369
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001370Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +01001371 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001372}
1373
Serban Constantinescu579885a2015-02-22 20:51:33 +00001374CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001375 const CompilerOptions& compiler_options,
1376 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +01001377 : CodeGenerator(graph,
1378 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001379 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +00001380 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001381 callee_saved_core_registers.GetList(),
1382 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001383 compiler_options,
1384 stats),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001385 block_labels_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1386 jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +01001387 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +00001388 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001389 move_resolver_(graph->GetAllocator(), this),
1390 assembler_(graph->GetAllocator()),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001391 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001392 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +01001393 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001394 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001395 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001396 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001397 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001398 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001399 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001400 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6fd16062018-06-26 11:02:04 +01001401 boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001402 baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001403 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001404 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001405 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Marko450f1d02018-04-25 17:27:45 +01001406 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1407 jit_baker_read_barrier_slow_paths_(std::less<uint32_t>(),
1408 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001409 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001410 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001411}
Alexandre Rames5319def2014-10-23 10:03:10 +01001412
Alexandre Rames67555f72014-11-18 10:55:16 +00001413#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +01001414
Zheng Xu3927c8b2015-11-18 17:46:25 +08001415void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001416 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001417 jump_table->EmitTable(this);
1418 }
1419}
1420
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001421void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001422 EmitJumpTables();
Vladimir Marko450f1d02018-04-25 17:27:45 +01001423
1424 // Emit JIT baker read barrier slow paths.
1425 DCHECK(Runtime::Current()->UseJitCompilation() || jit_baker_read_barrier_slow_paths_.empty());
1426 for (auto& entry : jit_baker_read_barrier_slow_paths_) {
1427 uint32_t encoded_data = entry.first;
1428 vixl::aarch64::Label* slow_path_entry = &entry.second.label;
1429 __ Bind(slow_path_entry);
1430 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name */ nullptr);
1431 }
1432
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001433 // Ensure we emit the literal pool.
1434 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +00001435
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001436 CodeGenerator::Finalize(allocator);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001437
1438 // Verify Baker read barrier linker patches.
1439 if (kIsDebugBuild) {
1440 ArrayRef<const uint8_t> code = allocator->GetMemory();
1441 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
1442 DCHECK(info.label.IsBound());
1443 uint32_t literal_offset = info.label.GetLocation();
1444 DCHECK_ALIGNED(literal_offset, 4u);
1445
1446 auto GetInsn = [&code](uint32_t offset) {
1447 DCHECK_ALIGNED(offset, 4u);
1448 return
1449 (static_cast<uint32_t>(code[offset + 0]) << 0) +
1450 (static_cast<uint32_t>(code[offset + 1]) << 8) +
1451 (static_cast<uint32_t>(code[offset + 2]) << 16)+
1452 (static_cast<uint32_t>(code[offset + 3]) << 24);
1453 };
1454
1455 const uint32_t encoded_data = info.custom_data;
1456 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
1457 // Check that the next instruction matches the expected LDR.
1458 switch (kind) {
1459 case BakerReadBarrierKind::kField: {
1460 DCHECK_GE(code.size() - literal_offset, 8u);
1461 uint32_t next_insn = GetInsn(literal_offset + 4u);
1462 // LDR (immediate) with correct base_reg.
1463 CheckValidReg(next_insn & 0x1fu); // Check destination register.
1464 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1465 CHECK_EQ(next_insn & 0xffc003e0u, 0xb9400000u | (base_reg << 5));
1466 break;
1467 }
1468 case BakerReadBarrierKind::kArray: {
1469 DCHECK_GE(code.size() - literal_offset, 8u);
1470 uint32_t next_insn = GetInsn(literal_offset + 4u);
1471 // LDR (register) with the correct base_reg, size=10 (32-bit), option=011 (extend = LSL),
1472 // and S=1 (shift amount = 2 for 32-bit version), i.e. LDR Wt, [Xn, Xm, LSL #2].
1473 CheckValidReg(next_insn & 0x1fu); // Check destination register.
1474 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1475 CHECK_EQ(next_insn & 0xffe0ffe0u, 0xb8607800u | (base_reg << 5));
1476 CheckValidReg((next_insn >> 16) & 0x1f); // Check index register
1477 break;
1478 }
1479 case BakerReadBarrierKind::kGcRoot: {
1480 DCHECK_GE(literal_offset, 4u);
1481 uint32_t prev_insn = GetInsn(literal_offset - 4u);
1482 // LDR (immediate) with correct root_reg.
1483 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1484 CHECK_EQ(prev_insn & 0xffc0001fu, 0xb9400000u | root_reg);
1485 break;
1486 }
1487 default:
1488 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
1489 UNREACHABLE();
1490 }
1491 }
1492 }
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001493}
1494
Zheng Xuad4450e2015-04-17 18:48:56 +08001495void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1496 // Note: There are 6 kinds of moves:
1497 // 1. constant -> GPR/FPR (non-cycle)
1498 // 2. constant -> stack (non-cycle)
1499 // 3. GPR/FPR -> GPR/FPR
1500 // 4. GPR/FPR -> stack
1501 // 5. stack -> GPR/FPR
1502 // 6. stack -> stack (non-cycle)
1503 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1504 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1505 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1506 // dependency.
1507 vixl_temps_.Open(GetVIXLAssembler());
1508}
1509
1510void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1511 vixl_temps_.Close();
1512}
1513
1514Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
Artem Serovd4bccf12017-04-03 18:47:32 +01001515 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister
1516 || kind == Location::kStackSlot || kind == Location::kDoubleStackSlot
1517 || kind == Location::kSIMDStackSlot);
1518 kind = (kind == Location::kFpuRegister || kind == Location::kSIMDStackSlot)
1519 ? Location::kFpuRegister
1520 : Location::kRegister;
Zheng Xuad4450e2015-04-17 18:48:56 +08001521 Location scratch = GetScratchLocation(kind);
1522 if (!scratch.Equals(Location::NoLocation())) {
1523 return scratch;
1524 }
1525 // Allocate from VIXL temp registers.
1526 if (kind == Location::kRegister) {
1527 scratch = LocationFrom(vixl_temps_.AcquireX());
1528 } else {
Roland Levillain952b2352017-05-03 19:49:14 +01001529 DCHECK_EQ(kind, Location::kFpuRegister);
Artem Serovd4bccf12017-04-03 18:47:32 +01001530 scratch = LocationFrom(codegen_->GetGraph()->HasSIMD()
1531 ? vixl_temps_.AcquireVRegisterOfSize(kQRegSize)
1532 : vixl_temps_.AcquireD());
Zheng Xuad4450e2015-04-17 18:48:56 +08001533 }
1534 AddScratchLocation(scratch);
1535 return scratch;
1536}
1537
1538void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1539 if (loc.IsRegister()) {
1540 vixl_temps_.Release(XRegisterFrom(loc));
1541 } else {
1542 DCHECK(loc.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001543 vixl_temps_.Release(codegen_->GetGraph()->HasSIMD() ? QRegisterFrom(loc) : DRegisterFrom(loc));
Zheng Xuad4450e2015-04-17 18:48:56 +08001544 }
1545 RemoveScratchLocation(loc);
1546}
1547
Alexandre Rames3e69f162014-12-10 10:36:50 +00001548void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001549 MoveOperands* move = moves_[index];
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001550 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001551}
1552
Alexandre Rames5319def2014-10-23 10:03:10 +01001553void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001554 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001555 __ Bind(&frame_entry_label_);
1556
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001557 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1558 UseScratchRegisterScope temps(masm);
1559 Register temp = temps.AcquireX();
1560 __ Ldrh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1561 __ Add(temp, temp, 1);
1562 __ Strh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1563 }
1564
Vladimir Marko33bff252017-11-01 14:35:42 +00001565 bool do_overflow_check =
1566 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm64) || !IsLeafMethod();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001567 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001568 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001569 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001570 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Vladimir Marko33bff252017-11-01 14:35:42 +00001571 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kArm64)));
Artem Serov914d7a82017-02-07 14:33:49 +00001572 {
1573 // Ensure that between load and RecordPcInfo there are no pools emitted.
1574 ExactAssemblyScope eas(GetVIXLAssembler(),
1575 kInstructionSize,
1576 CodeBufferCheckScope::kExactSize);
1577 __ ldr(wzr, MemOperand(temp, 0));
1578 RecordPcInfo(nullptr, 0);
1579 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001580 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001581
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001582 if (!HasEmptyFrame()) {
1583 int frame_size = GetFrameSize();
1584 // Stack layout:
1585 // sp[frame_size - 8] : lr.
1586 // ... : other preserved core registers.
1587 // ... : other preserved fp registers.
1588 // ... : reserved frame space.
1589 // sp[0] : current method.
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001590
1591 // Save the current method if we need it. Note that we do not
1592 // do this in HCurrentMethod, as the instruction might have been removed
1593 // in the SSA graph.
1594 if (RequiresCurrentMethod()) {
1595 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001596 } else {
1597 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001598 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001599 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001600 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1601 frame_size - GetCoreSpillSize());
1602 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1603 frame_size - FrameEntrySpillSize());
Mingyao Yang063fc772016-08-02 11:02:54 -07001604
1605 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1606 // Initialize should_deoptimize flag to 0.
1607 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1608 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1609 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001610 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01001611
1612 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01001613}
1614
1615void CodeGeneratorARM64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001616 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001617 if (!HasEmptyFrame()) {
1618 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001619 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1620 frame_size - FrameEntrySpillSize());
1621 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1622 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001623 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001624 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001625 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001626 __ Ret();
1627 GetAssembler()->cfi().RestoreState();
1628 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001629}
1630
Scott Wakeling97c72b72016-06-24 16:19:36 +01001631CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001632 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001633 return CPURegList(CPURegister::kRegister, kXRegSize,
1634 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001635}
1636
Scott Wakeling97c72b72016-06-24 16:19:36 +01001637CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001638 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1639 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001640 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1641 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001642}
1643
Alexandre Rames5319def2014-10-23 10:03:10 +01001644void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1645 __ Bind(GetLabelOf(block));
1646}
1647
Calin Juravle175dc732015-08-25 15:42:32 +01001648void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1649 DCHECK(location.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001650 __ Mov(RegisterFrom(location, DataType::Type::kInt32), value);
Calin Juravle175dc732015-08-25 15:42:32 +01001651}
1652
Calin Juravlee460d1d2015-09-29 04:52:17 +01001653void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1654 if (location.IsRegister()) {
1655 locations->AddTemp(location);
1656 } else {
1657 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1658 }
1659}
1660
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001661void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001662 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001663 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001664 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001665 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001666 if (value_can_be_null) {
1667 __ Cbz(value, &done);
1668 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001669 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001670 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001671 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001672 if (value_can_be_null) {
1673 __ Bind(&done);
1674 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001675}
1676
David Brazdil58282f42016-01-14 12:45:10 +00001677void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001678 // Blocked core registers:
1679 // lr : Runtime reserved.
1680 // tr : Runtime reserved.
Roland Levillain97c46462017-05-11 14:04:03 +01001681 // mr : Runtime reserved.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001682 // ip1 : VIXL core temp.
1683 // ip0 : VIXL core temp.
1684 //
1685 // Blocked fp registers:
1686 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001687 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1688 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001689 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001690 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001691 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001692
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001693 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001694 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001695 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001696 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001697
David Brazdil58282f42016-01-14 12:45:10 +00001698 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001699 // Stubs do not save callee-save floating point registers. If the graph
1700 // is debuggable, we need to deal with these registers differently. For
1701 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001702 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1703 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001704 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001705 }
1706 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001707}
1708
Alexandre Rames3e69f162014-12-10 10:36:50 +00001709size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1710 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1711 __ Str(reg, MemOperand(sp, stack_index));
1712 return kArm64WordSize;
1713}
1714
1715size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1716 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1717 __ Ldr(reg, MemOperand(sp, stack_index));
1718 return kArm64WordSize;
1719}
1720
1721size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1722 FPRegister reg = FPRegister(reg_id, kDRegSize);
1723 __ Str(reg, MemOperand(sp, stack_index));
1724 return kArm64WordSize;
1725}
1726
1727size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1728 FPRegister reg = FPRegister(reg_id, kDRegSize);
1729 __ Ldr(reg, MemOperand(sp, stack_index));
1730 return kArm64WordSize;
1731}
1732
Alexandre Rames5319def2014-10-23 10:03:10 +01001733void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001734 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001735}
1736
1737void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001738 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001739}
1740
Vladimir Markoa0431112018-06-25 09:32:54 +01001741const Arm64InstructionSetFeatures& CodeGeneratorARM64::GetInstructionSetFeatures() const {
1742 return *GetCompilerOptions().GetInstructionSetFeatures()->AsArm64InstructionSetFeatures();
1743}
1744
Alexandre Rames67555f72014-11-18 10:55:16 +00001745void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001746 if (constant->IsIntConstant()) {
1747 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1748 } else if (constant->IsLongConstant()) {
1749 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1750 } else if (constant->IsNullConstant()) {
1751 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001752 } else if (constant->IsFloatConstant()) {
1753 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1754 } else {
1755 DCHECK(constant->IsDoubleConstant());
1756 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1757 }
1758}
1759
Alexandre Rames3e69f162014-12-10 10:36:50 +00001760
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001761static bool CoherentConstantAndType(Location constant, DataType::Type type) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001762 DCHECK(constant.IsConstant());
1763 HConstant* cst = constant.GetConstant();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001764 return (cst->IsIntConstant() && type == DataType::Type::kInt32) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001765 // Null is mapped to a core W register, which we associate with kPrimInt.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001766 (cst->IsNullConstant() && type == DataType::Type::kInt32) ||
1767 (cst->IsLongConstant() && type == DataType::Type::kInt64) ||
1768 (cst->IsFloatConstant() && type == DataType::Type::kFloat32) ||
1769 (cst->IsDoubleConstant() && type == DataType::Type::kFloat64);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001770}
1771
Roland Levillain952b2352017-05-03 19:49:14 +01001772// Allocate a scratch register from the VIXL pool, querying first
1773// the floating-point register pool, and then the core register
1774// pool. This is essentially a reimplementation of
Roland Levillain558dea12017-01-27 19:40:44 +00001775// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
1776// using a different allocation strategy.
1777static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
1778 vixl::aarch64::UseScratchRegisterScope* temps,
1779 int size_in_bits) {
1780 return masm->GetScratchFPRegisterList()->IsEmpty()
1781 ? CPURegister(temps->AcquireRegisterOfSize(size_in_bits))
1782 : CPURegister(temps->AcquireVRegisterOfSize(size_in_bits));
1783}
1784
Calin Juravlee460d1d2015-09-29 04:52:17 +01001785void CodeGeneratorARM64::MoveLocation(Location destination,
1786 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001787 DataType::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001788 if (source.Equals(destination)) {
1789 return;
1790 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001791
1792 // A valid move can always be inferred from the destination and source
1793 // locations. When moving from and to a register, the argument type can be
1794 // used to generate 32bit instead of 64bit moves. In debug mode we also
1795 // checks the coherency of the locations and the type.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001796 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001797
1798 if (destination.IsRegister() || destination.IsFpuRegister()) {
1799 if (unspecified_type) {
1800 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1801 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001802 (src_cst != nullptr && (src_cst->IsIntConstant()
1803 || src_cst->IsFloatConstant()
1804 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001805 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001806 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexandre Rames67555f72014-11-18 10:55:16 +00001807 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001808 // If the source is a double stack slot or a 64bit constant, a 64bit
1809 // type is appropriate. Else the source is a register, and since the
1810 // type has not been specified, we chose a 64bit type to force a 64bit
1811 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001812 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexandre Rames67555f72014-11-18 10:55:16 +00001813 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001814 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001815 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1816 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001817 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001818 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1819 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1820 __ Ldr(dst, StackOperandFrom(source));
Artem Serovd4bccf12017-04-03 18:47:32 +01001821 } else if (source.IsSIMDStackSlot()) {
1822 __ Ldr(QRegisterFrom(destination), StackOperandFrom(source));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001823 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001824 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001825 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001826 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001827 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001828 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001829 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001830 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001831 DataType::Type source_type = DataType::Is64BitType(dst_type)
1832 ? DataType::Type::kInt64
1833 : DataType::Type::kInt32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001834 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1835 }
1836 } else {
1837 DCHECK(source.IsFpuRegister());
1838 if (destination.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001839 DataType::Type source_type = DataType::Is64BitType(dst_type)
1840 ? DataType::Type::kFloat64
1841 : DataType::Type::kFloat32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001842 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1843 } else {
1844 DCHECK(destination.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001845 if (GetGraph()->HasSIMD()) {
1846 __ Mov(QRegisterFrom(destination), QRegisterFrom(source));
1847 } else {
1848 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
1849 }
1850 }
1851 }
1852 } else if (destination.IsSIMDStackSlot()) {
1853 if (source.IsFpuRegister()) {
1854 __ Str(QRegisterFrom(source), StackOperandFrom(destination));
1855 } else {
1856 DCHECK(source.IsSIMDStackSlot());
1857 UseScratchRegisterScope temps(GetVIXLAssembler());
1858 if (GetVIXLAssembler()->GetScratchFPRegisterList()->IsEmpty()) {
1859 Register temp = temps.AcquireX();
1860 __ Ldr(temp, MemOperand(sp, source.GetStackIndex()));
1861 __ Str(temp, MemOperand(sp, destination.GetStackIndex()));
1862 __ Ldr(temp, MemOperand(sp, source.GetStackIndex() + kArm64WordSize));
1863 __ Str(temp, MemOperand(sp, destination.GetStackIndex() + kArm64WordSize));
1864 } else {
1865 FPRegister temp = temps.AcquireVRegisterOfSize(kQRegSize);
1866 __ Ldr(temp, StackOperandFrom(source));
1867 __ Str(temp, StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001868 }
1869 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001870 } else { // The destination is not a register. It must be a stack slot.
1871 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1872 if (source.IsRegister() || source.IsFpuRegister()) {
1873 if (unspecified_type) {
1874 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001875 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001876 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001877 dst_type =
1878 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001879 }
1880 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001881 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1882 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001883 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001884 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001885 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1886 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001887 UseScratchRegisterScope temps(GetVIXLAssembler());
1888 HConstant* src_cst = source.GetConstant();
1889 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001890 if (src_cst->IsZeroBitPattern()) {
Scott Wakeling79db9972017-01-19 14:08:42 +00001891 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant())
1892 ? Register(xzr)
1893 : Register(wzr);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001894 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001895 if (src_cst->IsIntConstant()) {
1896 temp = temps.AcquireW();
1897 } else if (src_cst->IsLongConstant()) {
1898 temp = temps.AcquireX();
1899 } else if (src_cst->IsFloatConstant()) {
1900 temp = temps.AcquireS();
1901 } else {
1902 DCHECK(src_cst->IsDoubleConstant());
1903 temp = temps.AcquireD();
1904 }
1905 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001906 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001907 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001908 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001909 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001910 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001911 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001912 // Use any scratch register (a core or a floating-point one)
1913 // from VIXL scratch register pools as a temporary.
1914 //
1915 // We used to only use the FP scratch register pool, but in some
1916 // rare cases the only register from this pool (D31) would
1917 // already be used (e.g. within a ParallelMove instruction, when
1918 // a move is blocked by a another move requiring a scratch FP
1919 // register, which would reserve D31). To prevent this issue, we
1920 // ask for a scratch register of any type (core or FP).
Roland Levillain558dea12017-01-27 19:40:44 +00001921 //
1922 // Also, we start by asking for a FP scratch register first, as the
Roland Levillain952b2352017-05-03 19:49:14 +01001923 // demand of scratch core registers is higher. This is why we
Roland Levillain558dea12017-01-27 19:40:44 +00001924 // use AcquireFPOrCoreCPURegisterOfSize instead of
1925 // UseScratchRegisterScope::AcquireCPURegisterOfSize, which
1926 // allocates core scratch registers first.
1927 CPURegister temp = AcquireFPOrCoreCPURegisterOfSize(
1928 GetVIXLAssembler(),
1929 &temps,
1930 (destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001931 __ Ldr(temp, StackOperandFrom(source));
1932 __ Str(temp, StackOperandFrom(destination));
1933 }
1934 }
1935}
1936
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001937void CodeGeneratorARM64::Load(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001938 CPURegister dst,
1939 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001940 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001941 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001942 case DataType::Type::kUint8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001943 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001944 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001945 case DataType::Type::kInt8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001946 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001947 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001948 case DataType::Type::kUint16:
Alexandre Rames67555f72014-11-18 10:55:16 +00001949 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001950 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001951 case DataType::Type::kInt16:
1952 __ Ldrsh(Register(dst), src);
1953 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001954 case DataType::Type::kInt32:
1955 case DataType::Type::kReference:
1956 case DataType::Type::kInt64:
1957 case DataType::Type::kFloat32:
1958 case DataType::Type::kFloat64:
1959 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001960 __ Ldr(dst, src);
1961 break;
Aart Bik66c158e2018-01-31 12:55:04 -08001962 case DataType::Type::kUint32:
1963 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001964 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001965 LOG(FATAL) << "Unreachable type " << type;
1966 }
1967}
1968
Calin Juravle77520bc2015-01-12 18:45:46 +00001969void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001970 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001971 const MemOperand& src,
1972 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001973 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001974 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001975 Register temp_base = temps.AcquireX();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001976 DataType::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001977
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001978 DCHECK(!src.IsPreIndex());
1979 DCHECK(!src.IsPostIndex());
1980
1981 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001982 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Artem Serov914d7a82017-02-07 14:33:49 +00001983 {
1984 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1985 MemOperand base = MemOperand(temp_base);
1986 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001987 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001988 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001989 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00001990 {
1991 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1992 __ ldarb(Register(dst), base);
1993 if (needs_null_check) {
1994 MaybeRecordImplicitNullCheck(instruction);
1995 }
1996 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001997 if (type == DataType::Type::kInt8) {
1998 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
Artem Serov914d7a82017-02-07 14:33:49 +00001999 }
2000 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002001 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002002 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00002003 {
2004 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2005 __ ldarh(Register(dst), base);
2006 if (needs_null_check) {
2007 MaybeRecordImplicitNullCheck(instruction);
2008 }
2009 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002010 if (type == DataType::Type::kInt16) {
2011 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
2012 }
Artem Serov914d7a82017-02-07 14:33:49 +00002013 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002014 case DataType::Type::kInt32:
2015 case DataType::Type::kReference:
2016 case DataType::Type::kInt64:
2017 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00002018 {
2019 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2020 __ ldar(Register(dst), base);
2021 if (needs_null_check) {
2022 MaybeRecordImplicitNullCheck(instruction);
2023 }
2024 }
2025 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002026 case DataType::Type::kFloat32:
2027 case DataType::Type::kFloat64: {
Artem Serov914d7a82017-02-07 14:33:49 +00002028 DCHECK(dst.IsFPRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002029 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002030
Artem Serov914d7a82017-02-07 14:33:49 +00002031 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
2032 {
2033 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2034 __ ldar(temp, base);
2035 if (needs_null_check) {
2036 MaybeRecordImplicitNullCheck(instruction);
2037 }
2038 }
2039 __ Fmov(FPRegister(dst), temp);
2040 break;
Roland Levillain44015862016-01-22 11:47:17 +00002041 }
Aart Bik66c158e2018-01-31 12:55:04 -08002042 case DataType::Type::kUint32:
2043 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002044 case DataType::Type::kVoid:
Artem Serov914d7a82017-02-07 14:33:49 +00002045 LOG(FATAL) << "Unreachable type " << type;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002046 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002047 }
2048}
2049
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002050void CodeGeneratorARM64::Store(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002051 CPURegister src,
2052 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002053 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002054 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002055 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002056 case DataType::Type::kInt8:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002057 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002058 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002059 case DataType::Type::kUint16:
2060 case DataType::Type::kInt16:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002061 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002062 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002063 case DataType::Type::kInt32:
2064 case DataType::Type::kReference:
2065 case DataType::Type::kInt64:
2066 case DataType::Type::kFloat32:
2067 case DataType::Type::kFloat64:
2068 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002069 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00002070 break;
Aart Bik66c158e2018-01-31 12:55:04 -08002071 case DataType::Type::kUint32:
2072 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002073 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002074 LOG(FATAL) << "Unreachable type " << type;
2075 }
2076}
2077
Artem Serov914d7a82017-02-07 14:33:49 +00002078void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002079 DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002080 CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +00002081 const MemOperand& dst,
2082 bool needs_null_check) {
2083 MacroAssembler* masm = GetVIXLAssembler();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002084 UseScratchRegisterScope temps(GetVIXLAssembler());
2085 Register temp_base = temps.AcquireX();
2086
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002087 DCHECK(!dst.IsPreIndex());
2088 DCHECK(!dst.IsPostIndex());
2089
2090 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08002091 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01002092 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002093 MemOperand base = MemOperand(temp_base);
Artem Serov914d7a82017-02-07 14:33:49 +00002094 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002095 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002096 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002097 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002098 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00002099 {
2100 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2101 __ stlrb(Register(src), base);
2102 if (needs_null_check) {
2103 MaybeRecordImplicitNullCheck(instruction);
2104 }
2105 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002106 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002107 case DataType::Type::kUint16:
2108 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00002109 {
2110 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2111 __ stlrh(Register(src), base);
2112 if (needs_null_check) {
2113 MaybeRecordImplicitNullCheck(instruction);
2114 }
2115 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002116 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002117 case DataType::Type::kInt32:
2118 case DataType::Type::kReference:
2119 case DataType::Type::kInt64:
2120 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00002121 {
2122 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2123 __ stlr(Register(src), base);
2124 if (needs_null_check) {
2125 MaybeRecordImplicitNullCheck(instruction);
2126 }
2127 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002128 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002129 case DataType::Type::kFloat32:
2130 case DataType::Type::kFloat64: {
2131 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002132 Register temp_src;
2133 if (src.IsZero()) {
2134 // The zero register is used to avoid synthesizing zero constants.
2135 temp_src = Register(src);
2136 } else {
2137 DCHECK(src.IsFPRegister());
2138 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
2139 __ Fmov(temp_src, FPRegister(src));
2140 }
Artem Serov914d7a82017-02-07 14:33:49 +00002141 {
2142 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2143 __ stlr(temp_src, base);
2144 if (needs_null_check) {
2145 MaybeRecordImplicitNullCheck(instruction);
2146 }
2147 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002148 break;
2149 }
Aart Bik66c158e2018-01-31 12:55:04 -08002150 case DataType::Type::kUint32:
2151 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002152 case DataType::Type::kVoid:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002153 LOG(FATAL) << "Unreachable type " << type;
2154 }
2155}
2156
Calin Juravle175dc732015-08-25 15:42:32 +01002157void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
2158 HInstruction* instruction,
2159 uint32_t dex_pc,
2160 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01002161 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00002162
2163 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value()));
2164 {
2165 // Ensure the pc position is recorded immediately after the `blr` instruction.
2166 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
2167 __ blr(lr);
2168 if (EntrypointRequiresStackMap(entrypoint)) {
2169 RecordPcInfo(instruction, dex_pc, slow_path);
2170 }
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00002171 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002172}
2173
Roland Levillaindec8f632016-07-22 17:10:06 +01002174void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
2175 HInstruction* instruction,
2176 SlowPathCode* slow_path) {
2177 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Roland Levillaindec8f632016-07-22 17:10:06 +01002178 __ Ldr(lr, MemOperand(tr, entry_point_offset));
2179 __ Blr(lr);
2180}
2181
Alexandre Rames67555f72014-11-18 10:55:16 +00002182void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002183 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002184 UseScratchRegisterScope temps(GetVIXLAssembler());
2185 Register temp = temps.AcquireW();
Vladimir Markodc682aa2018-01-04 18:42:57 +00002186 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
2187 const size_t status_byte_offset =
2188 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
2189 constexpr uint32_t shifted_initialized_value =
2190 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002191
Serban Constantinescu02164b32014-11-13 14:05:07 +00002192 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002193 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Vladimir Markodc682aa2018-01-04 18:42:57 +00002194 __ Add(temp, class_reg, status_byte_offset);
Igor Murashkin86083f72017-10-27 10:59:04 -07002195 __ Ldarb(temp, HeapOperand(temp));
Vladimir Markodc682aa2018-01-04 18:42:57 +00002196 __ Cmp(temp, shifted_initialized_value);
Vladimir Marko2c64a832018-01-04 11:31:56 +00002197 __ B(lo, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00002198 __ Bind(slow_path->GetExitLabel());
2199}
Alexandre Rames5319def2014-10-23 10:03:10 +01002200
Vladimir Marko175e7862018-03-27 09:03:13 +00002201void InstructionCodeGeneratorARM64::GenerateBitstringTypeCheckCompare(
2202 HTypeCheckInstruction* check, vixl::aarch64::Register temp) {
2203 uint32_t path_to_root = check->GetBitstringPathToRoot();
2204 uint32_t mask = check->GetBitstringMask();
2205 DCHECK(IsPowerOfTwo(mask + 1));
2206 size_t mask_bits = WhichPowerOf2(mask + 1);
2207
2208 if (mask_bits == 16u) {
2209 // Load only the bitstring part of the status word.
2210 __ Ldrh(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
2211 } else {
2212 // /* uint32_t */ temp = temp->status_
2213 __ Ldr(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
2214 // Extract the bitstring bits.
2215 __ Ubfx(temp, temp, 0, mask_bits);
2216 }
2217 // Compare the bitstring bits to `path_to_root`.
2218 __ Cmp(temp, path_to_root);
2219}
2220
Roland Levillain44015862016-01-22 11:47:17 +00002221void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002222 BarrierType type = BarrierAll;
2223
2224 switch (kind) {
2225 case MemBarrierKind::kAnyAny:
2226 case MemBarrierKind::kAnyStore: {
2227 type = BarrierAll;
2228 break;
2229 }
2230 case MemBarrierKind::kLoadAny: {
2231 type = BarrierReads;
2232 break;
2233 }
2234 case MemBarrierKind::kStoreStore: {
2235 type = BarrierWrites;
2236 break;
2237 }
2238 default:
2239 LOG(FATAL) << "Unexpected memory barrier " << kind;
2240 }
2241 __ Dmb(InnerShareable, type);
2242}
2243
Serban Constantinescu02164b32014-11-13 14:05:07 +00002244void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
2245 HBasicBlock* successor) {
2246 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002247 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
2248 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002249 slow_path =
2250 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathARM64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002251 instruction->SetSlowPath(slow_path);
2252 codegen_->AddSlowPath(slow_path);
2253 if (successor != nullptr) {
2254 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002255 }
2256 } else {
2257 DCHECK_EQ(slow_path->GetSuccessor(), successor);
2258 }
2259
Serban Constantinescu02164b32014-11-13 14:05:07 +00002260 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
2261 Register temp = temps.AcquireW();
2262
Andreas Gampe542451c2016-07-26 09:02:02 -07002263 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002264 if (successor == nullptr) {
2265 __ Cbnz(temp, slow_path->GetEntryLabel());
2266 __ Bind(slow_path->GetReturnLabel());
2267 } else {
2268 __ Cbz(temp, codegen_->GetLabelOf(successor));
2269 __ B(slow_path->GetEntryLabel());
2270 // slow_path will return to GetLabelOf(successor).
2271 }
2272}
2273
Alexandre Rames5319def2014-10-23 10:03:10 +01002274InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
2275 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08002276 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01002277 assembler_(codegen->GetAssembler()),
2278 codegen_(codegen) {}
2279
Alexandre Rames67555f72014-11-18 10:55:16 +00002280void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002281 DCHECK_EQ(instr->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01002282 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002283 DataType::Type type = instr->GetResultType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002284 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002285 case DataType::Type::kInt32:
2286 case DataType::Type::kInt64:
Alexandre Rames5319def2014-10-23 10:03:10 +01002287 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002288 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002289 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002290 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002291
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002292 case DataType::Type::kFloat32:
2293 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002294 locations->SetInAt(0, Location::RequiresFpuRegister());
2295 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002296 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002297 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002298
Alexandre Rames5319def2014-10-23 10:03:10 +01002299 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002300 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002301 }
2302}
2303
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002304void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction,
2305 const FieldInfo& field_info) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002306 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2307
2308 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002309 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Rames09a99962015-04-15 11:47:56 +01002310 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002311 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2312 object_field_get_with_read_barrier
2313 ? LocationSummary::kCallOnSlowPath
2314 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002315 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002316 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Roland Levillaind0b51832017-01-26 19:04:23 +00002317 // We need a temporary register for the read barrier marking slow
2318 // path in CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002319 if (kBakerReadBarrierLinkTimeThunksEnableForFields &&
2320 !Runtime::Current()->UseJitCompilation() &&
2321 !field_info.IsVolatile()) {
2322 // If link-time thunks for the Baker read barrier are enabled, for AOT
2323 // non-volatile loads we need a temporary only if the offset is too big.
2324 if (field_info.GetFieldOffset().Uint32Value() >= kReferenceLoadMinFarOffset) {
2325 locations->AddTemp(FixedTempLocation());
2326 }
2327 } else {
2328 locations->AddTemp(Location::RequiresRegister());
2329 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002330 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002331 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002332 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002333 locations->SetOut(Location::RequiresFpuRegister());
2334 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002335 // The output overlaps for an object field get when read barriers
2336 // are enabled: we do not want the load to overwrite the object's
2337 // location, as we need it to emit the read barrier.
2338 locations->SetOut(
2339 Location::RequiresRegister(),
2340 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01002341 }
2342}
2343
2344void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
2345 const FieldInfo& field_info) {
2346 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00002347 LocationSummary* locations = instruction->GetLocations();
2348 Location base_loc = locations->InAt(0);
2349 Location out = locations->Out();
2350 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Vladimir Marko61b92282017-10-11 13:23:17 +01002351 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
2352 DataType::Type load_type = instruction->GetType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002353 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01002354
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002355 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier &&
Vladimir Marko61b92282017-10-11 13:23:17 +01002356 load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002357 // Object FieldGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002358 // /* HeapReference<Object> */ out = *(base + offset)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002359 Register base = RegisterFrom(base_loc, DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002360 Location maybe_temp =
2361 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
Roland Levillain44015862016-01-22 11:47:17 +00002362 // Note that potential implicit null checks are handled in this
2363 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
2364 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2365 instruction,
2366 out,
2367 base,
2368 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002369 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00002370 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002371 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00002372 } else {
2373 // General case.
2374 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002375 // Note that a potential implicit null check is handled in this
2376 // CodeGeneratorARM64::LoadAcquire call.
2377 // NB: LoadAcquire will record the pc info if needed.
2378 codegen_->LoadAcquire(
2379 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01002380 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002381 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2382 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Vladimir Marko61b92282017-10-11 13:23:17 +01002383 codegen_->Load(load_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01002384 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01002385 }
Vladimir Marko61b92282017-10-11 13:23:17 +01002386 if (load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002387 // If read barriers are enabled, emit read barriers other than
2388 // Baker's using a slow path (and also unpoison the loaded
2389 // reference, if heap poisoning is enabled).
2390 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
2391 }
Roland Levillain4d027112015-07-01 15:41:14 +01002392 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002393}
2394
2395void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
2396 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002397 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01002398 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002399 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
2400 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002401 } else if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002402 locations->SetInAt(1, Location::RequiresFpuRegister());
2403 } else {
2404 locations->SetInAt(1, Location::RequiresRegister());
2405 }
2406}
2407
2408void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002409 const FieldInfo& field_info,
2410 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002411 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2412
2413 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002414 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01002415 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01002416 Offset offset = field_info.GetFieldOffset();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002417 DataType::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002418
Roland Levillain4d027112015-07-01 15:41:14 +01002419 {
2420 // We use a block to end the scratch scope before the write barrier, thus
2421 // freeing the temporary registers so they can be used in `MarkGCCard`.
2422 UseScratchRegisterScope temps(GetVIXLAssembler());
2423
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002424 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01002425 DCHECK(value.IsW());
2426 Register temp = temps.AcquireW();
2427 __ Mov(temp, value.W());
2428 GetAssembler()->PoisonHeapReference(temp.W());
2429 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01002430 }
Roland Levillain4d027112015-07-01 15:41:14 +01002431
2432 if (field_info.IsVolatile()) {
Artem Serov914d7a82017-02-07 14:33:49 +00002433 codegen_->StoreRelease(
2434 instruction, field_type, source, HeapOperand(obj, offset), /* needs_null_check */ true);
Roland Levillain4d027112015-07-01 15:41:14 +01002435 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002436 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2437 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002438 codegen_->Store(field_type, source, HeapOperand(obj, offset));
2439 codegen_->MaybeRecordImplicitNullCheck(instruction);
2440 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002441 }
2442
2443 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002444 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01002445 }
2446}
2447
Alexandre Rames67555f72014-11-18 10:55:16 +00002448void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002449 DataType::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002450
2451 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002452 case DataType::Type::kInt32:
2453 case DataType::Type::kInt64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002454 Register dst = OutputRegister(instr);
2455 Register lhs = InputRegisterAt(instr, 0);
2456 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01002457 if (instr->IsAdd()) {
2458 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002459 } else if (instr->IsAnd()) {
2460 __ And(dst, lhs, rhs);
2461 } else if (instr->IsOr()) {
2462 __ Orr(dst, lhs, rhs);
2463 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002464 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002465 } else if (instr->IsRor()) {
2466 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002467 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002468 __ Ror(dst, lhs, shift);
2469 } else {
2470 // Ensure shift distance is in the same size register as the result. If
2471 // we are rotating a long and the shift comes in a w register originally,
2472 // we don't need to sxtw for use as an x since the shift distances are
2473 // all & reg_bits - 1.
2474 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
2475 }
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002476 } else if (instr->IsMin() || instr->IsMax()) {
2477 __ Cmp(lhs, rhs);
2478 __ Csel(dst, lhs, rhs, instr->IsMin() ? lt : gt);
Alexandre Rames67555f72014-11-18 10:55:16 +00002479 } else {
2480 DCHECK(instr->IsXor());
2481 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002482 }
2483 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002484 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002485 case DataType::Type::kFloat32:
2486 case DataType::Type::kFloat64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002487 FPRegister dst = OutputFPRegister(instr);
2488 FPRegister lhs = InputFPRegisterAt(instr, 0);
2489 FPRegister rhs = InputFPRegisterAt(instr, 1);
2490 if (instr->IsAdd()) {
2491 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002492 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002493 __ Fsub(dst, lhs, rhs);
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002494 } else if (instr->IsMin()) {
2495 __ Fmin(dst, lhs, rhs);
2496 } else if (instr->IsMax()) {
2497 __ Fmax(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002498 } else {
2499 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002500 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002501 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002502 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002503 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002504 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002505 }
2506}
2507
Serban Constantinescu02164b32014-11-13 14:05:07 +00002508void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2509 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2510
Vladimir Markoca6fff82017-10-03 14:49:14 +01002511 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002512 DataType::Type type = instr->GetResultType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002513 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002514 case DataType::Type::kInt32:
2515 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002516 locations->SetInAt(0, Location::RequiresRegister());
2517 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Artem Serov87c97052016-09-23 13:34:31 +01002518 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002519 break;
2520 }
2521 default:
2522 LOG(FATAL) << "Unexpected shift type " << type;
2523 }
2524}
2525
2526void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2527 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2528
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002529 DataType::Type type = instr->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002530 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002531 case DataType::Type::kInt32:
2532 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002533 Register dst = OutputRegister(instr);
2534 Register lhs = InputRegisterAt(instr, 0);
2535 Operand rhs = InputOperandAt(instr, 1);
2536 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002537 uint32_t shift_value = rhs.GetImmediate() &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002538 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002539 if (instr->IsShl()) {
2540 __ Lsl(dst, lhs, shift_value);
2541 } else if (instr->IsShr()) {
2542 __ Asr(dst, lhs, shift_value);
2543 } else {
2544 __ Lsr(dst, lhs, shift_value);
2545 }
2546 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002547 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002548
2549 if (instr->IsShl()) {
2550 __ Lsl(dst, lhs, rhs_reg);
2551 } else if (instr->IsShr()) {
2552 __ Asr(dst, lhs, rhs_reg);
2553 } else {
2554 __ Lsr(dst, lhs, rhs_reg);
2555 }
2556 }
2557 break;
2558 }
2559 default:
2560 LOG(FATAL) << "Unexpected shift operation type " << type;
2561 }
2562}
2563
Alexandre Rames5319def2014-10-23 10:03:10 +01002564void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002565 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002566}
2567
2568void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002569 HandleBinaryOp(instruction);
2570}
2571
2572void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2573 HandleBinaryOp(instruction);
2574}
2575
2576void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2577 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002578}
2579
Artem Serov7fc63502016-02-09 17:15:29 +00002580void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002581 DCHECK(DataType::IsIntegralType(instr->GetType())) << instr->GetType();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002582 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002583 locations->SetInAt(0, Location::RequiresRegister());
2584 // There is no immediate variant of negated bitwise instructions in AArch64.
2585 locations->SetInAt(1, Location::RequiresRegister());
2586 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2587}
2588
Artem Serov7fc63502016-02-09 17:15:29 +00002589void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002590 Register dst = OutputRegister(instr);
2591 Register lhs = InputRegisterAt(instr, 0);
2592 Register rhs = InputRegisterAt(instr, 1);
2593
2594 switch (instr->GetOpKind()) {
2595 case HInstruction::kAnd:
2596 __ Bic(dst, lhs, rhs);
2597 break;
2598 case HInstruction::kOr:
2599 __ Orn(dst, lhs, rhs);
2600 break;
2601 case HInstruction::kXor:
2602 __ Eon(dst, lhs, rhs);
2603 break;
2604 default:
2605 LOG(FATAL) << "Unreachable";
2606 }
2607}
2608
Anton Kirilov74234da2017-01-13 14:42:47 +00002609void LocationsBuilderARM64::VisitDataProcWithShifterOp(
2610 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002611 DCHECK(instruction->GetType() == DataType::Type::kInt32 ||
2612 instruction->GetType() == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002613 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002614 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames8626b742015-11-25 16:28:08 +00002615 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2616 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2617 } else {
2618 locations->SetInAt(0, Location::RequiresRegister());
2619 }
2620 locations->SetInAt(1, Location::RequiresRegister());
2621 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2622}
2623
Anton Kirilov74234da2017-01-13 14:42:47 +00002624void InstructionCodeGeneratorARM64::VisitDataProcWithShifterOp(
2625 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002626 DataType::Type type = instruction->GetType();
Alexandre Rames8626b742015-11-25 16:28:08 +00002627 HInstruction::InstructionKind kind = instruction->GetInstrKind();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002628 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002629 Register out = OutputRegister(instruction);
2630 Register left;
2631 if (kind != HInstruction::kNeg) {
2632 left = InputRegisterAt(instruction, 0);
2633 }
Anton Kirilov74234da2017-01-13 14:42:47 +00002634 // If this `HDataProcWithShifterOp` was created by merging a type conversion as the
Alexandre Rames8626b742015-11-25 16:28:08 +00002635 // shifter operand operation, the IR generating `right_reg` (input to the type
2636 // conversion) can have a different type from the current instruction's type,
2637 // so we manually indicate the type.
2638 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Alexandre Rames8626b742015-11-25 16:28:08 +00002639 Operand right_operand(0);
2640
Anton Kirilov74234da2017-01-13 14:42:47 +00002641 HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2642 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002643 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2644 } else {
Anton Kirilov74234da2017-01-13 14:42:47 +00002645 right_operand = Operand(right_reg,
2646 helpers::ShiftFromOpKind(op_kind),
2647 instruction->GetShiftAmount());
Alexandre Rames8626b742015-11-25 16:28:08 +00002648 }
2649
2650 // Logical binary operations do not support extension operations in the
2651 // operand. Note that VIXL would still manage if it was passed by generating
2652 // the extension as a separate instruction.
2653 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2654 DCHECK(!right_operand.IsExtendedRegister() ||
2655 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2656 kind != HInstruction::kNeg));
2657 switch (kind) {
2658 case HInstruction::kAdd:
2659 __ Add(out, left, right_operand);
2660 break;
2661 case HInstruction::kAnd:
2662 __ And(out, left, right_operand);
2663 break;
2664 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002665 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002666 __ Neg(out, right_operand);
2667 break;
2668 case HInstruction::kOr:
2669 __ Orr(out, left, right_operand);
2670 break;
2671 case HInstruction::kSub:
2672 __ Sub(out, left, right_operand);
2673 break;
2674 case HInstruction::kXor:
2675 __ Eor(out, left, right_operand);
2676 break;
2677 default:
2678 LOG(FATAL) << "Unexpected operation kind: " << kind;
2679 UNREACHABLE();
2680 }
2681}
2682
Artem Serov328429f2016-07-06 16:23:04 +01002683void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002684 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002685 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002686 locations->SetInAt(0, Location::RequiresRegister());
2687 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
Artem Serov87c97052016-09-23 13:34:31 +01002688 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002689}
2690
Roland Levillain19c54192016-11-04 13:44:09 +00002691void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002692 __ Add(OutputRegister(instruction),
2693 InputRegisterAt(instruction, 0),
2694 Operand(InputOperandAt(instruction, 1)));
2695}
2696
Artem Serove1811ed2017-04-27 16:50:47 +01002697void LocationsBuilderARM64::VisitIntermediateAddressIndex(HIntermediateAddressIndex* instruction) {
2698 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002699 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serove1811ed2017-04-27 16:50:47 +01002700
2701 HIntConstant* shift = instruction->GetShift()->AsIntConstant();
2702
2703 locations->SetInAt(0, Location::RequiresRegister());
2704 // For byte case we don't need to shift the index variable so we can encode the data offset into
2705 // ADD instruction. For other cases we prefer the data_offset to be in register; that will hoist
2706 // data offset constant generation out of the loop and reduce the critical path length in the
2707 // loop.
2708 locations->SetInAt(1, shift->GetValue() == 0
2709 ? Location::ConstantLocation(instruction->GetOffset()->AsIntConstant())
2710 : Location::RequiresRegister());
2711 locations->SetInAt(2, Location::ConstantLocation(shift));
2712 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2713}
2714
2715void InstructionCodeGeneratorARM64::VisitIntermediateAddressIndex(
2716 HIntermediateAddressIndex* instruction) {
2717 Register index_reg = InputRegisterAt(instruction, 0);
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002718 uint32_t shift = Int64FromLocation(instruction->GetLocations()->InAt(2));
Artem Serove1811ed2017-04-27 16:50:47 +01002719 uint32_t offset = instruction->GetOffset()->AsIntConstant()->GetValue();
2720
2721 if (shift == 0) {
2722 __ Add(OutputRegister(instruction), index_reg, offset);
2723 } else {
2724 Register offset_reg = InputRegisterAt(instruction, 1);
2725 __ Add(OutputRegister(instruction), offset_reg, Operand(index_reg, LSL, shift));
2726 }
2727}
2728
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002729void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002730 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002731 new (GetGraph()->GetAllocator()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002732 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2733 if (instr->GetOpKind() == HInstruction::kSub &&
2734 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002735 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002736 // Don't allocate register for Mneg instruction.
2737 } else {
2738 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2739 Location::RequiresRegister());
2740 }
2741 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2742 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002743 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2744}
2745
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002746void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002747 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002748 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2749 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002750
2751 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2752 // This fixup should be carried out for all multiply-accumulate instructions:
2753 // madd, msub, smaddl, smsubl, umaddl and umsubl.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002754 if (instr->GetType() == DataType::Type::kInt64 &&
Alexandre Rames418318f2015-11-20 15:55:47 +00002755 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2756 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002757 vixl::aarch64::Instruction* prev =
2758 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002759 if (prev->IsLoadOrStore()) {
2760 // Make sure we emit only exactly one nop.
Artem Serov914d7a82017-02-07 14:33:49 +00002761 ExactAssemblyScope scope(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002762 __ nop();
2763 }
2764 }
2765
2766 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002767 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002768 __ Madd(res, mul_left, mul_right, accumulator);
2769 } else {
2770 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002771 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002772 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002773 __ Mneg(res, mul_left, mul_right);
2774 } else {
2775 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2776 __ Msub(res, mul_left, mul_right, accumulator);
2777 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002778 }
2779}
2780
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002781void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002782 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002783 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002784 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002785 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2786 object_array_get_with_read_barrier
2787 ? LocationSummary::kCallOnSlowPath
2788 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002789 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002790 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Roland Levillain54f869e2017-03-06 13:54:11 +00002791 // We need a temporary register for the read barrier marking slow
2792 // path in CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002793 if (kBakerReadBarrierLinkTimeThunksEnableForFields &&
2794 !Runtime::Current()->UseJitCompilation() &&
2795 instruction->GetIndex()->IsConstant()) {
2796 // Array loads with constant index are treated as field loads.
2797 // If link-time thunks for the Baker read barrier are enabled, for AOT
2798 // constant index loads we need a temporary only if the offset is too big.
2799 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
2800 uint32_t index = instruction->GetIndex()->AsIntConstant()->GetValue();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002801 offset += index << DataType::SizeShift(DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002802 if (offset >= kReferenceLoadMinFarOffset) {
2803 locations->AddTemp(FixedTempLocation());
2804 }
2805 } else {
2806 locations->AddTemp(Location::RequiresRegister());
2807 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002808 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002809 locations->SetInAt(0, Location::RequiresRegister());
2810 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002811 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002812 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2813 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002814 // The output overlaps in the case of an object array get with
2815 // read barriers enabled: we do not want the move to overwrite the
2816 // array's location, as we need it to emit the read barrier.
2817 locations->SetOut(
2818 Location::RequiresRegister(),
2819 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002820 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002821}
2822
2823void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002824 DataType::Type type = instruction->GetType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002825 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002826 LocationSummary* locations = instruction->GetLocations();
2827 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002828 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002829 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002830 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2831 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002832 MacroAssembler* masm = GetVIXLAssembler();
2833 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002834
Roland Levillain19c54192016-11-04 13:44:09 +00002835 // The read barrier instrumentation of object ArrayGet instructions
2836 // does not support the HIntermediateAddress instruction.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002837 DCHECK(!((type == DataType::Type::kReference) &&
Roland Levillain19c54192016-11-04 13:44:09 +00002838 instruction->GetArray()->IsIntermediateAddress() &&
2839 kEmitCompilerReadBarrier));
2840
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002841 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00002842 // Object ArrayGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002843 // Note that a potential implicit null check is handled in the
2844 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
Vladimir Marko66d691d2017-04-07 17:53:39 +01002845 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002846 if (index.IsConstant()) {
2847 // Array load with a constant index can be treated as a field load.
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002848 offset += Int64FromLocation(index) << DataType::SizeShift(type);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002849 Location maybe_temp =
2850 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
2851 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2852 out,
2853 obj.W(),
2854 offset,
2855 maybe_temp,
Vladimir Marko66d691d2017-04-07 17:53:39 +01002856 /* needs_null_check */ false,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002857 /* use_load_acquire */ false);
2858 } else {
2859 Register temp = WRegisterFrom(locations->GetTemp(0));
2860 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko66d691d2017-04-07 17:53:39 +01002861 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ false);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002862 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002863 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002864 // General case.
2865 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002866 Register length;
2867 if (maybe_compressed_char_at) {
2868 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2869 length = temps.AcquireW();
Artem Serov914d7a82017-02-07 14:33:49 +00002870 {
2871 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2872 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2873
2874 if (instruction->GetArray()->IsIntermediateAddress()) {
2875 DCHECK_LT(count_offset, offset);
2876 int64_t adjusted_offset =
2877 static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2878 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2879 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2880 } else {
2881 __ Ldr(length, HeapOperand(obj, count_offset));
2882 }
2883 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002884 }
jessicahandojo05765752016-09-09 19:01:32 -07002885 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002886 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002887 if (maybe_compressed_char_at) {
2888 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002889 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2890 "Expecting 0=compressed, 1=uncompressed");
2891 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002892 __ Ldrb(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002893 HeapOperand(obj, offset + Int64FromLocation(index)));
jessicahandojo05765752016-09-09 19:01:32 -07002894 __ B(&done);
2895 __ Bind(&uncompressed_load);
2896 __ Ldrh(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002897 HeapOperand(obj, offset + (Int64FromLocation(index) << 1)));
jessicahandojo05765752016-09-09 19:01:32 -07002898 __ Bind(&done);
2899 } else {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002900 offset += Int64FromLocation(index) << DataType::SizeShift(type);
jessicahandojo05765752016-09-09 19:01:32 -07002901 source = HeapOperand(obj, offset);
2902 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002903 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002904 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002905 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002906 // We do not need to compute the intermediate address from the array: the
2907 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002908 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002909 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002910 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002911 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2912 }
2913 temp = obj;
2914 } else {
2915 __ Add(temp, obj, offset);
2916 }
jessicahandojo05765752016-09-09 19:01:32 -07002917 if (maybe_compressed_char_at) {
2918 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002919 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2920 "Expecting 0=compressed, 1=uncompressed");
2921 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002922 __ Ldrb(Register(OutputCPURegister(instruction)),
2923 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2924 __ B(&done);
2925 __ Bind(&uncompressed_load);
2926 __ Ldrh(Register(OutputCPURegister(instruction)),
2927 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2928 __ Bind(&done);
2929 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002930 source = HeapOperand(temp, XRegisterFrom(index), LSL, DataType::SizeShift(type));
jessicahandojo05765752016-09-09 19:01:32 -07002931 }
Roland Levillain44015862016-01-22 11:47:17 +00002932 }
jessicahandojo05765752016-09-09 19:01:32 -07002933 if (!maybe_compressed_char_at) {
Artem Serov914d7a82017-02-07 14:33:49 +00002934 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2935 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
jessicahandojo05765752016-09-09 19:01:32 -07002936 codegen_->Load(type, OutputCPURegister(instruction), source);
2937 codegen_->MaybeRecordImplicitNullCheck(instruction);
2938 }
Roland Levillain44015862016-01-22 11:47:17 +00002939
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002940 if (type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002941 static_assert(
2942 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2943 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2944 Location obj_loc = locations->InAt(0);
2945 if (index.IsConstant()) {
2946 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2947 } else {
2948 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2949 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002950 }
Roland Levillain4d027112015-07-01 15:41:14 +01002951 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002952}
2953
Alexandre Rames5319def2014-10-23 10:03:10 +01002954void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002955 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002956 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002957 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002958}
2959
2960void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002961 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002962 vixl::aarch64::Register out = OutputRegister(instruction);
Artem Serov914d7a82017-02-07 14:33:49 +00002963 {
2964 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2965 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2966 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
2967 codegen_->MaybeRecordImplicitNullCheck(instruction);
2968 }
jessicahandojo05765752016-09-09 19:01:32 -07002969 // Mask out compression flag from String's array length.
2970 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002971 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002972 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002973}
2974
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002975void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002976 DataType::Type value_type = instruction->GetComponentType();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002977
2978 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002979 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002980 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002981 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002982 LocationSummary::kCallOnSlowPath :
2983 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002984 locations->SetInAt(0, Location::RequiresRegister());
2985 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002986 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2987 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002988 } else if (DataType::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002989 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002990 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002991 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002992 }
2993}
2994
2995void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002996 DataType::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002997 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002998 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002999 bool needs_write_barrier =
3000 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01003001
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003002 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01003003 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003004 CPURegister source = value;
3005 Location index = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003006 size_t offset = mirror::Array::DataOffset(DataType::Size(value_type)).Uint32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003007 MemOperand destination = HeapOperand(array);
3008 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003009
3010 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003011 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003012 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003013 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003014 destination = HeapOperand(array, offset);
3015 } else {
3016 UseScratchRegisterScope temps(masm);
3017 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01003018 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01003019 // We do not need to compute the intermediate address from the array: the
3020 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01003021 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01003022 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01003023 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01003024 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
3025 }
3026 temp = array;
3027 } else {
3028 __ Add(temp, array, offset);
3029 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003030 destination = HeapOperand(temp,
3031 XRegisterFrom(index),
3032 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003033 DataType::SizeShift(value_type));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003034 }
Artem Serov914d7a82017-02-07 14:33:49 +00003035 {
3036 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
3037 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3038 codegen_->Store(value_type, value, destination);
3039 codegen_->MaybeRecordImplicitNullCheck(instruction);
3040 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003041 } else {
Artem Serov328429f2016-07-06 16:23:04 +01003042 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003043 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003044 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01003045 {
3046 // We use a block to end the scratch scope before the write barrier, thus
3047 // freeing the temporary registers so they can be used in `MarkGCCard`.
3048 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003049 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01003050 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003051 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003052 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01003053 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01003054 destination = HeapOperand(temp,
3055 XRegisterFrom(index),
3056 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003057 DataType::SizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01003058 }
3059
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003060 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3061 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3062 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3063
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003064 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01003065 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathARM64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003066 codegen_->AddSlowPath(slow_path);
3067 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003068 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003069 __ Cbnz(Register(value), &non_zero);
3070 if (!index.IsConstant()) {
3071 __ Add(temp, array, offset);
3072 }
Artem Serov914d7a82017-02-07 14:33:49 +00003073 {
3074 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools
3075 // emitted.
3076 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3077 __ Str(wzr, destination);
3078 codegen_->MaybeRecordImplicitNullCheck(instruction);
3079 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003080 __ B(&done);
3081 __ Bind(&non_zero);
3082 }
3083
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003084 // Note that when Baker read barriers are enabled, the type
3085 // checks are performed without read barriers. This is fine,
3086 // even in the case where a class object is in the from-space
3087 // after the flip, as a comparison involving such a type would
3088 // not produce a false positive; it may of course produce a
3089 // false negative, in which case we would take the ArraySet
3090 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01003091
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003092 Register temp2 = temps.AcquireSameSizeAs(array);
3093 // /* HeapReference<Class> */ temp = array->klass_
Artem Serov914d7a82017-02-07 14:33:49 +00003094 {
3095 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
3096 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3097 __ Ldr(temp, HeapOperand(array, class_offset));
3098 codegen_->MaybeRecordImplicitNullCheck(instruction);
3099 }
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003100 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01003101
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003102 // /* HeapReference<Class> */ temp = temp->component_type_
3103 __ Ldr(temp, HeapOperand(temp, component_offset));
3104 // /* HeapReference<Class> */ temp2 = value->klass_
3105 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
3106 // If heap poisoning is enabled, no need to unpoison `temp`
3107 // nor `temp2`, as we are comparing two poisoned references.
3108 __ Cmp(temp, temp2);
3109 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01003110
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003111 if (instruction->StaticTypeOfArrayIsObjectArray()) {
3112 vixl::aarch64::Label do_put;
3113 __ B(eq, &do_put);
3114 // If heap poisoning is enabled, the `temp` reference has
3115 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003116 GetAssembler()->MaybeUnpoisonHeapReference(temp);
3117
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003118 // /* HeapReference<Class> */ temp = temp->super_class_
3119 __ Ldr(temp, HeapOperand(temp, super_offset));
3120 // If heap poisoning is enabled, no need to unpoison
3121 // `temp`, as we are comparing against null below.
3122 __ Cbnz(temp, slow_path->GetEntryLabel());
3123 __ Bind(&do_put);
3124 } else {
3125 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003126 }
3127 }
3128
3129 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01003130 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003131 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01003132 __ Mov(temp2, value.W());
3133 GetAssembler()->PoisonHeapReference(temp2);
3134 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003135 }
3136
3137 if (!index.IsConstant()) {
3138 __ Add(temp, array, offset);
Vladimir Markod1ef8732017-04-18 13:55:13 +01003139 } else {
3140 // We no longer need the `temp` here so release it as the store below may
3141 // need a scratch register (if the constant index makes the offset too large)
3142 // and the poisoned `source` could be using the other scratch register.
3143 temps.Release(temp);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003144 }
Artem Serov914d7a82017-02-07 14:33:49 +00003145 {
3146 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
3147 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3148 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003149
Artem Serov914d7a82017-02-07 14:33:49 +00003150 if (!may_need_runtime_call_for_type_check) {
3151 codegen_->MaybeRecordImplicitNullCheck(instruction);
3152 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003153 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003154 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003155
3156 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
3157
3158 if (done.IsLinked()) {
3159 __ Bind(&done);
3160 }
3161
3162 if (slow_path != nullptr) {
3163 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01003164 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003165 }
3166}
3167
Alexandre Rames67555f72014-11-18 10:55:16 +00003168void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003169 RegisterSet caller_saves = RegisterSet::Empty();
3170 InvokeRuntimeCallingConvention calling_convention;
3171 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
3172 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
3173 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00003174 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00003175 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00003176}
3177
3178void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01003179 BoundsCheckSlowPathARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003180 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003181 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00003182 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
3183 __ B(slow_path->GetEntryLabel(), hs);
3184}
3185
Alexandre Rames67555f72014-11-18 10:55:16 +00003186void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
3187 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003188 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexandre Rames67555f72014-11-18 10:55:16 +00003189 locations->SetInAt(0, Location::RequiresRegister());
3190 if (check->HasUses()) {
3191 locations->SetOut(Location::SameAsFirstInput());
3192 }
3193}
3194
3195void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
3196 // We assume the class is not null.
Vladimir Marko174b2e22017-10-12 13:34:49 +01003197 SlowPathCodeARM64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(
Alexandre Rames67555f72014-11-18 10:55:16 +00003198 check->GetLoadClass(), check, check->GetDexPc(), true);
3199 codegen_->AddSlowPath(slow_path);
3200 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
3201}
3202
Roland Levillain1a653882016-03-18 18:05:57 +00003203static bool IsFloatingPointZeroConstant(HInstruction* inst) {
3204 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
3205 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
3206}
3207
3208void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
3209 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
3210 Location rhs_loc = instruction->GetLocations()->InAt(1);
3211 if (rhs_loc.IsConstant()) {
3212 // 0.0 is the only immediate that can be encoded directly in
3213 // an FCMP instruction.
3214 //
3215 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
3216 // specify that in a floating-point comparison, positive zero
3217 // and negative zero are considered equal, so we can use the
3218 // literal 0.0 for both cases here.
3219 //
3220 // Note however that some methods (Float.equal, Float.compare,
3221 // Float.compareTo, Double.equal, Double.compare,
3222 // Double.compareTo, Math.max, Math.min, StrictMath.max,
3223 // StrictMath.min) consider 0.0 to be (strictly) greater than
3224 // -0.0. So if we ever translate calls to these methods into a
3225 // HCompare instruction, we must handle the -0.0 case with
3226 // care here.
3227 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
3228 __ Fcmp(lhs_reg, 0.0);
3229 } else {
3230 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
3231 }
Roland Levillain7f63c522015-07-13 15:54:55 +00003232}
3233
Serban Constantinescu02164b32014-11-13 14:05:07 +00003234void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003235 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003236 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003237 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01003238 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003239 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003240 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003241 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003242 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003243 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003244 case DataType::Type::kInt32:
3245 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003246 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00003247 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00003248 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3249 break;
3250 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003251 case DataType::Type::kFloat32:
3252 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003253 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00003254 locations->SetInAt(1,
3255 IsFloatingPointZeroConstant(compare->InputAt(1))
3256 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
3257 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00003258 locations->SetOut(Location::RequiresRegister());
3259 break;
3260 }
3261 default:
3262 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3263 }
3264}
3265
3266void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003267 DataType::Type in_type = compare->InputAt(0)->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00003268
3269 // 0 if: left == right
3270 // 1 if: left > right
3271 // -1 if: left < right
3272 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003273 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003274 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003275 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003276 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003277 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003278 case DataType::Type::kInt32:
3279 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003280 Register result = OutputRegister(compare);
3281 Register left = InputRegisterAt(compare, 0);
3282 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003283 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08003284 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
3285 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00003286 break;
3287 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003288 case DataType::Type::kFloat32:
3289 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003290 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00003291 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003292 __ Cset(result, ne);
3293 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01003294 break;
3295 }
3296 default:
3297 LOG(FATAL) << "Unimplemented compare type " << in_type;
3298 }
3299}
3300
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003301void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003302 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00003303
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003304 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003305 locations->SetInAt(0, Location::RequiresFpuRegister());
3306 locations->SetInAt(1,
3307 IsFloatingPointZeroConstant(instruction->InputAt(1))
3308 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
3309 : Location::RequiresFpuRegister());
3310 } else {
3311 // Integer cases.
3312 locations->SetInAt(0, Location::RequiresRegister());
3313 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
3314 }
3315
David Brazdilb3e773e2016-01-26 11:28:37 +00003316 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00003317 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01003318 }
3319}
3320
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003321void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003322 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003323 return;
3324 }
3325
3326 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01003327 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00003328 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01003329
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003330 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00003331 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003332 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00003333 } else {
3334 // Integer cases.
3335 Register lhs = InputRegisterAt(instruction, 0);
3336 Operand rhs = InputOperandAt(instruction, 1);
3337 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003338 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00003339 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003340}
3341
3342#define FOR_EACH_CONDITION_INSTRUCTION(M) \
3343 M(Equal) \
3344 M(NotEqual) \
3345 M(LessThan) \
3346 M(LessThanOrEqual) \
3347 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07003348 M(GreaterThanOrEqual) \
3349 M(Below) \
3350 M(BelowOrEqual) \
3351 M(Above) \
3352 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01003353#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003354void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
3355void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01003356FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00003357#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01003358#undef FOR_EACH_CONDITION_INSTRUCTION
3359
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003360void InstructionCodeGeneratorARM64::GenerateIntDivForPower2Denom(HDiv* instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003361 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003362 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003363 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
3364
3365 Register out = OutputRegister(instruction);
3366 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01003367
3368 if (abs_imm == 2) {
3369 int bits = DataType::Size(instruction->GetResultType()) * kBitsPerByte;
3370 __ Add(out, dividend, Operand(dividend, LSR, bits - 1));
3371 } else {
3372 UseScratchRegisterScope temps(GetVIXLAssembler());
3373 Register temp = temps.AcquireSameSizeAs(out);
3374 __ Add(temp, dividend, abs_imm - 1);
3375 __ Cmp(dividend, 0);
3376 __ Csel(out, temp, dividend, lt);
3377 }
3378
Zheng Xuc6667102015-05-15 16:08:45 +08003379 int ctz_imm = CTZ(abs_imm);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003380 if (imm > 0) {
3381 __ Asr(out, out, ctz_imm);
Zheng Xuc6667102015-05-15 16:08:45 +08003382 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003383 __ Neg(out, Operand(out, ASR, ctz_imm));
Zheng Xuc6667102015-05-15 16:08:45 +08003384 }
3385}
3386
3387void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3388 DCHECK(instruction->IsDiv() || instruction->IsRem());
3389
3390 LocationSummary* locations = instruction->GetLocations();
3391 Location second = locations->InAt(1);
3392 DCHECK(second.IsConstant());
3393
3394 Register out = OutputRegister(instruction);
3395 Register dividend = InputRegisterAt(instruction, 0);
3396 int64_t imm = Int64FromConstant(second.GetConstant());
3397
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003398 DataType::Type type = instruction->GetResultType();
3399 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Zheng Xuc6667102015-05-15 16:08:45 +08003400
3401 int64_t magic;
3402 int shift;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003403 CalculateMagicAndShiftForDivRem(
3404 imm, type == DataType::Type::kInt64 /* is_long */, &magic, &shift);
Zheng Xuc6667102015-05-15 16:08:45 +08003405
3406 UseScratchRegisterScope temps(GetVIXLAssembler());
3407 Register temp = temps.AcquireSameSizeAs(out);
3408
3409 // temp = get_high(dividend * magic)
3410 __ Mov(temp, magic);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003411 if (type == DataType::Type::kInt64) {
Zheng Xuc6667102015-05-15 16:08:45 +08003412 __ Smulh(temp, dividend, temp);
3413 } else {
3414 __ Smull(temp.X(), dividend, temp);
3415 __ Lsr(temp.X(), temp.X(), 32);
3416 }
3417
3418 if (imm > 0 && magic < 0) {
3419 __ Add(temp, temp, dividend);
3420 } else if (imm < 0 && magic > 0) {
3421 __ Sub(temp, temp, dividend);
3422 }
3423
3424 if (shift != 0) {
3425 __ Asr(temp, temp, shift);
3426 }
3427
3428 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003429 __ Sub(out, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08003430 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003431 __ Sub(temp, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08003432 // TODO: Strength reduction for msub.
3433 Register temp_imm = temps.AcquireSameSizeAs(out);
3434 __ Mov(temp_imm, imm);
3435 __ Msub(out, temp, temp_imm, dividend);
3436 }
3437}
3438
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003439void InstructionCodeGeneratorARM64::GenerateIntDivForConstDenom(HDiv *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003440 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Zheng Xuc6667102015-05-15 16:08:45 +08003441
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003442 if (imm == 0) {
3443 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3444 return;
3445 }
Zheng Xuc6667102015-05-15 16:08:45 +08003446
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003447 if (IsPowerOfTwo(AbsOrMin(imm))) {
3448 GenerateIntDivForPower2Denom(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003449 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003450 // Cases imm == -1 or imm == 1 are handled by InstructionSimplifier.
3451 DCHECK(imm < -2 || imm > 2) << imm;
3452 GenerateDivRemWithAnyConstant(instruction);
3453 }
3454}
3455
3456void InstructionCodeGeneratorARM64::GenerateIntDiv(HDiv *instruction) {
3457 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
3458 << instruction->GetResultType();
3459
3460 if (instruction->GetLocations()->InAt(1).IsConstant()) {
3461 GenerateIntDivForConstDenom(instruction);
3462 } else {
3463 Register out = OutputRegister(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003464 Register dividend = InputRegisterAt(instruction, 0);
3465 Register divisor = InputRegisterAt(instruction, 1);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003466 __ Sdiv(out, dividend, divisor);
Zheng Xuc6667102015-05-15 16:08:45 +08003467 }
3468}
3469
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003470void LocationsBuilderARM64::VisitDiv(HDiv* div) {
3471 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003472 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003473 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003474 case DataType::Type::kInt32:
3475 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003476 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08003477 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003478 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3479 break;
3480
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003481 case DataType::Type::kFloat32:
3482 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003483 locations->SetInAt(0, Location::RequiresFpuRegister());
3484 locations->SetInAt(1, Location::RequiresFpuRegister());
3485 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3486 break;
3487
3488 default:
3489 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3490 }
3491}
3492
3493void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003494 DataType::Type type = div->GetResultType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003495 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003496 case DataType::Type::kInt32:
3497 case DataType::Type::kInt64:
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003498 GenerateIntDiv(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003499 break;
3500
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003501 case DataType::Type::kFloat32:
3502 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003503 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3504 break;
3505
3506 default:
3507 LOG(FATAL) << "Unexpected div type " << type;
3508 }
3509}
3510
Alexandre Rames67555f72014-11-18 10:55:16 +00003511void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003512 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003513 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003514}
3515
3516void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3517 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003518 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003519 codegen_->AddSlowPath(slow_path);
3520 Location value = instruction->GetLocations()->InAt(0);
3521
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003522 DataType::Type type = instruction->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +00003523
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003524 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003525 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00003526 return;
3527 }
3528
Alexandre Rames67555f72014-11-18 10:55:16 +00003529 if (value.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003530 int64_t divisor = Int64FromLocation(value);
Alexandre Rames67555f72014-11-18 10:55:16 +00003531 if (divisor == 0) {
3532 __ B(slow_path->GetEntryLabel());
3533 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003534 // A division by a non-null constant is valid. We don't need to perform
3535 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003536 }
3537 } else {
3538 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3539 }
3540}
3541
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003542void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3543 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003544 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003545 locations->SetOut(Location::ConstantLocation(constant));
3546}
3547
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003548void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3549 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003550 // Will be generated at use site.
3551}
3552
Alexandre Rames5319def2014-10-23 10:03:10 +01003553void LocationsBuilderARM64::VisitExit(HExit* exit) {
3554 exit->SetLocations(nullptr);
3555}
3556
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003557void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003558}
3559
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003560void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3561 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003562 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003563 locations->SetOut(Location::ConstantLocation(constant));
3564}
3565
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003566void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003567 // Will be generated at use site.
3568}
3569
David Brazdilfc6a86a2015-06-26 10:33:45 +00003570void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003571 if (successor->IsExitBlock()) {
3572 DCHECK(got->GetPrevious()->AlwaysThrows());
3573 return; // no code needed
3574 }
3575
Serban Constantinescu02164b32014-11-13 14:05:07 +00003576 HBasicBlock* block = got->GetBlock();
3577 HInstruction* previous = got->GetPrevious();
3578 HLoopInformation* info = block->GetLoopInformation();
3579
David Brazdil46e2a392015-03-16 17:31:52 +00003580 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00003581 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
3582 UseScratchRegisterScope temps(GetVIXLAssembler());
3583 Register temp1 = temps.AcquireX();
3584 Register temp2 = temps.AcquireX();
3585 __ Ldr(temp1, MemOperand(sp, 0));
3586 __ Ldrh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3587 __ Add(temp2, temp2, 1);
3588 __ Strh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3589 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003590 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3591 return;
3592 }
3593 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3594 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01003595 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003596 }
3597 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003598 __ B(codegen_->GetLabelOf(successor));
3599 }
3600}
3601
David Brazdilfc6a86a2015-06-26 10:33:45 +00003602void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3603 got->SetLocations(nullptr);
3604}
3605
3606void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3607 HandleGoto(got, got->GetSuccessor());
3608}
3609
3610void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3611 try_boundary->SetLocations(nullptr);
3612}
3613
3614void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3615 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3616 if (!successor->IsExitBlock()) {
3617 HandleGoto(try_boundary, successor);
3618 }
3619}
3620
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003621void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003622 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003623 vixl::aarch64::Label* true_target,
3624 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003625 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003626
David Brazdil0debae72015-11-12 18:37:00 +00003627 if (true_target == nullptr && false_target == nullptr) {
3628 // Nothing to do. The code always falls through.
3629 return;
3630 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003631 // Constant condition, statically compared against "true" (integer value 1).
3632 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003633 if (true_target != nullptr) {
3634 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003635 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003636 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003637 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003638 if (false_target != nullptr) {
3639 __ B(false_target);
3640 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003641 }
David Brazdil0debae72015-11-12 18:37:00 +00003642 return;
3643 }
3644
3645 // The following code generates these patterns:
3646 // (1) true_target == nullptr && false_target != nullptr
3647 // - opposite condition true => branch to false_target
3648 // (2) true_target != nullptr && false_target == nullptr
3649 // - condition true => branch to true_target
3650 // (3) true_target != nullptr && false_target != nullptr
3651 // - condition true => branch to true_target
3652 // - branch to false_target
3653 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003654 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003655 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003656 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003657 if (true_target == nullptr) {
3658 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3659 } else {
3660 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3661 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003662 } else {
3663 // The condition instruction has not been materialized, use its inputs as
3664 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003665 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003666
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003667 DataType::Type type = condition->InputAt(0)->GetType();
3668 if (DataType::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003669 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003670 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003671 IfCondition opposite_condition = condition->GetOppositeCondition();
3672 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003673 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003674 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003675 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003676 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003677 // Integer cases.
3678 Register lhs = InputRegisterAt(condition, 0);
3679 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003680
3681 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003682 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003683 if (true_target == nullptr) {
3684 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3685 non_fallthrough_target = false_target;
3686 } else {
3687 arm64_cond = ARM64Condition(condition->GetCondition());
3688 non_fallthrough_target = true_target;
3689 }
3690
Aart Bik086d27e2016-01-20 17:02:00 -08003691 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003692 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003693 switch (arm64_cond) {
3694 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003695 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003696 break;
3697 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003698 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003699 break;
3700 case lt:
3701 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003702 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003703 break;
3704 case ge:
3705 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003706 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003707 break;
3708 default:
3709 // Without the `static_cast` the compiler throws an error for
3710 // `-Werror=sign-promo`.
3711 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3712 }
3713 } else {
3714 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003715 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003716 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003717 }
3718 }
David Brazdil0debae72015-11-12 18:37:00 +00003719
3720 // If neither branch falls through (case 3), the conditional branch to `true_target`
3721 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3722 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003723 __ B(false_target);
3724 }
3725}
3726
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003727void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003728 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003729 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003730 locations->SetInAt(0, Location::RequiresRegister());
3731 }
3732}
3733
3734void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003735 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3736 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003737 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3738 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3739 true_target = nullptr;
3740 }
3741 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3742 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3743 false_target = nullptr;
3744 }
David Brazdil0debae72015-11-12 18:37:00 +00003745 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003746}
3747
3748void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003749 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003750 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01003751 InvokeRuntimeCallingConvention calling_convention;
3752 RegisterSet caller_saves = RegisterSet::Empty();
3753 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
3754 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00003755 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003756 locations->SetInAt(0, Location::RequiresRegister());
3757 }
3758}
3759
3760void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003761 SlowPathCodeARM64* slow_path =
3762 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003763 GenerateTestAndBranch(deoptimize,
3764 /* condition_input_index */ 0,
3765 slow_path->GetEntryLabel(),
3766 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003767}
3768
Mingyao Yang063fc772016-08-02 11:02:54 -07003769void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003770 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07003771 LocationSummary(flag, LocationSummary::kNoCall);
3772 locations->SetOut(Location::RequiresRegister());
3773}
3774
3775void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3776 __ Ldr(OutputRegister(flag),
3777 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3778}
3779
David Brazdilc0b601b2016-02-08 14:20:45 +00003780static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3781 return condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003782 DataType::IsFloatingPointType(condition->InputAt(0)->GetType());
David Brazdilc0b601b2016-02-08 14:20:45 +00003783}
3784
Alexandre Rames880f1192016-06-13 16:04:50 +01003785static inline Condition GetConditionForSelect(HCondition* condition) {
3786 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003787 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3788 : ARM64Condition(cond);
3789}
3790
David Brazdil74eb1b22015-12-14 11:44:01 +00003791void LocationsBuilderARM64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003792 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003793 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003794 locations->SetInAt(0, Location::RequiresFpuRegister());
3795 locations->SetInAt(1, Location::RequiresFpuRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08003796 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames880f1192016-06-13 16:04:50 +01003797 } else {
3798 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3799 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3800 bool is_true_value_constant = cst_true_value != nullptr;
3801 bool is_false_value_constant = cst_false_value != nullptr;
3802 // Ask VIXL whether we should synthesize constants in registers.
3803 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3804 Operand true_op = is_true_value_constant ?
3805 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3806 Operand false_op = is_false_value_constant ?
3807 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3808 bool true_value_in_register = false;
3809 bool false_value_in_register = false;
3810 MacroAssembler::GetCselSynthesisInformation(
3811 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3812 true_value_in_register |= !is_true_value_constant;
3813 false_value_in_register |= !is_false_value_constant;
3814
3815 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3816 : Location::ConstantLocation(cst_true_value));
3817 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3818 : Location::ConstantLocation(cst_false_value));
Donghui Bai426b49c2016-11-08 14:55:38 +08003819 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
David Brazdil74eb1b22015-12-14 11:44:01 +00003820 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003821
David Brazdil74eb1b22015-12-14 11:44:01 +00003822 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3823 locations->SetInAt(2, Location::RequiresRegister());
3824 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003825}
3826
3827void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003828 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003829 Condition csel_cond;
3830
3831 if (IsBooleanValueOrMaterializedCondition(cond)) {
3832 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003833 // Use the condition flags set by the previous instruction.
3834 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003835 } else {
3836 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003837 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003838 }
3839 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003840 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003841 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003842 } else {
3843 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003844 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003845 }
3846
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003847 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003848 __ Fcsel(OutputFPRegister(select),
3849 InputFPRegisterAt(select, 1),
3850 InputFPRegisterAt(select, 0),
3851 csel_cond);
3852 } else {
3853 __ Csel(OutputRegister(select),
3854 InputOperandAt(select, 1),
3855 InputOperandAt(select, 0),
3856 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003857 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003858}
3859
David Srbecky0cf44932015-12-09 14:09:59 +00003860void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003861 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00003862}
3863
David Srbeckyd28f4a02016-03-14 17:14:24 +00003864void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3865 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003866}
3867
3868void CodeGeneratorARM64::GenerateNop() {
3869 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003870}
3871
Alexandre Rames5319def2014-10-23 10:03:10 +01003872void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00003873 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003874}
3875
3876void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003877 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003878}
3879
3880void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003881 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003882}
3883
3884void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003885 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003886}
3887
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003888// Temp is used for read barrier.
3889static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3890 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003891 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003892 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3893 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3894 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3895 return 1;
3896 }
3897 return 0;
3898}
3899
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003900// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003901// interface pointer, one for loading the current interface.
3902// The other checks have one temp for loading the object's class.
3903static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3904 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3905 return 3;
3906 }
3907 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003908}
3909
Alexandre Rames67555f72014-11-18 10:55:16 +00003910void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003911 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003912 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003913 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003914 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003915 case TypeCheckKind::kExactCheck:
3916 case TypeCheckKind::kAbstractClassCheck:
3917 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00003918 case TypeCheckKind::kArrayObjectCheck: {
3919 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
3920 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
3921 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003922 break;
Vladimir Marko87584542017-12-12 17:47:52 +00003923 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003924 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003925 case TypeCheckKind::kUnresolvedCheck:
3926 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003927 call_kind = LocationSummary::kCallOnSlowPath;
3928 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00003929 case TypeCheckKind::kBitstringCheck:
3930 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003931 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003932
Vladimir Markoca6fff82017-10-03 14:49:14 +01003933 LocationSummary* locations =
3934 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003935 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003936 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003937 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003938 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00003939 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
3940 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
3941 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
3942 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
3943 } else {
3944 locations->SetInAt(1, Location::RequiresRegister());
3945 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003946 // The "out" register is used as a temporary, so it overlaps with the inputs.
3947 // Note that TypeCheckSlowPathARM64 uses this register too.
3948 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003949 // Add temps if necessary for read barriers.
3950 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003951}
3952
3953void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003954 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003955 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003956 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003957 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00003958 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
3959 ? Register()
3960 : InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003961 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003962 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003963 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3964 DCHECK_LE(num_temps, 1u);
3965 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003966 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3967 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3968 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3969 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003970
Scott Wakeling97c72b72016-06-24 16:19:36 +01003971 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003972 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003973
3974 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003975 // Avoid null check if we know `obj` is not null.
3976 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003977 __ Cbz(obj, &zero);
3978 }
3979
Roland Levillain44015862016-01-22 11:47:17 +00003980 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003981 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003982 ReadBarrierOption read_barrier_option =
3983 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003984 // /* HeapReference<Class> */ out = obj->klass_
3985 GenerateReferenceLoadTwoRegisters(instruction,
3986 out_loc,
3987 obj_loc,
3988 class_offset,
3989 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003990 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003991 __ Cmp(out, cls);
3992 __ Cset(out, eq);
3993 if (zero.IsLinked()) {
3994 __ B(&done);
3995 }
3996 break;
3997 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003998
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003999 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00004000 ReadBarrierOption read_barrier_option =
4001 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08004002 // /* HeapReference<Class> */ out = obj->klass_
4003 GenerateReferenceLoadTwoRegisters(instruction,
4004 out_loc,
4005 obj_loc,
4006 class_offset,
4007 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004008 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004009 // If the class is abstract, we eagerly fetch the super class of the
4010 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004011 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004012 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004013 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004014 GenerateReferenceLoadOneRegister(instruction,
4015 out_loc,
4016 super_offset,
4017 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004018 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004019 // If `out` is null, we use it for the result, and jump to `done`.
4020 __ Cbz(out, &done);
4021 __ Cmp(out, cls);
4022 __ B(ne, &loop);
4023 __ Mov(out, 1);
4024 if (zero.IsLinked()) {
4025 __ B(&done);
4026 }
4027 break;
4028 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004029
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004030 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00004031 ReadBarrierOption read_barrier_option =
4032 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08004033 // /* HeapReference<Class> */ out = obj->klass_
4034 GenerateReferenceLoadTwoRegisters(instruction,
4035 out_loc,
4036 obj_loc,
4037 class_offset,
4038 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004039 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004040 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004041 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004042 __ Bind(&loop);
4043 __ Cmp(out, cls);
4044 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004045 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004046 GenerateReferenceLoadOneRegister(instruction,
4047 out_loc,
4048 super_offset,
4049 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004050 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004051 __ Cbnz(out, &loop);
4052 // If `out` is null, we use it for the result, and jump to `done`.
4053 __ B(&done);
4054 __ Bind(&success);
4055 __ Mov(out, 1);
4056 if (zero.IsLinked()) {
4057 __ B(&done);
4058 }
4059 break;
4060 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004061
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004062 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00004063 ReadBarrierOption read_barrier_option =
4064 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08004065 // /* HeapReference<Class> */ out = obj->klass_
4066 GenerateReferenceLoadTwoRegisters(instruction,
4067 out_loc,
4068 obj_loc,
4069 class_offset,
4070 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004071 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004072 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004073 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004074 __ Cmp(out, cls);
4075 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004076 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004077 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004078 GenerateReferenceLoadOneRegister(instruction,
4079 out_loc,
4080 component_offset,
4081 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004082 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004083 // If `out` is null, we use it for the result, and jump to `done`.
4084 __ Cbz(out, &done);
4085 __ Ldrh(out, HeapOperand(out, primitive_offset));
4086 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
4087 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004088 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004089 __ Mov(out, 1);
4090 __ B(&done);
4091 break;
4092 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004093
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004094 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08004095 // No read barrier since the slow path will retry upon failure.
4096 // /* HeapReference<Class> */ out = obj->klass_
4097 GenerateReferenceLoadTwoRegisters(instruction,
4098 out_loc,
4099 obj_loc,
4100 class_offset,
4101 maybe_temp_loc,
4102 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004103 __ Cmp(out, cls);
4104 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01004105 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4106 instruction, /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004107 codegen_->AddSlowPath(slow_path);
4108 __ B(ne, slow_path->GetEntryLabel());
4109 __ Mov(out, 1);
4110 if (zero.IsLinked()) {
4111 __ B(&done);
4112 }
4113 break;
4114 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004115
Calin Juravle98893e12015-10-02 21:05:03 +01004116 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004117 case TypeCheckKind::kInterfaceCheck: {
4118 // Note that we indeed only call on slow path, but we always go
4119 // into the slow path for the unresolved and interface check
4120 // cases.
4121 //
4122 // We cannot directly call the InstanceofNonTrivial runtime
4123 // entry point without resorting to a type checking slow path
4124 // here (i.e. by calling InvokeRuntime directly), as it would
4125 // require to assign fixed registers for the inputs of this
4126 // HInstanceOf instruction (following the runtime calling
4127 // convention), which might be cluttered by the potential first
4128 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00004129 //
4130 // TODO: Introduce a new runtime entry point taking the object
4131 // to test (instead of its class) as argument, and let it deal
4132 // with the read barrier issues. This will let us refactor this
4133 // case of the `switch` code as it was previously (with a direct
4134 // call to the runtime not using a type checking slow path).
4135 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004136 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01004137 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4138 instruction, /* is_fatal */ false);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004139 codegen_->AddSlowPath(slow_path);
4140 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004141 if (zero.IsLinked()) {
4142 __ B(&done);
4143 }
4144 break;
4145 }
Vladimir Marko175e7862018-03-27 09:03:13 +00004146
4147 case TypeCheckKind::kBitstringCheck: {
4148 // /* HeapReference<Class> */ temp = obj->klass_
4149 GenerateReferenceLoadTwoRegisters(instruction,
4150 out_loc,
4151 obj_loc,
4152 class_offset,
4153 maybe_temp_loc,
4154 kWithoutReadBarrier);
4155
4156 GenerateBitstringTypeCheckCompare(instruction, out);
4157 __ Cset(out, eq);
4158 if (zero.IsLinked()) {
4159 __ B(&done);
4160 }
4161 break;
4162 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004163 }
4164
4165 if (zero.IsLinked()) {
4166 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01004167 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004168 }
4169
4170 if (done.IsLinked()) {
4171 __ Bind(&done);
4172 }
4173
4174 if (slow_path != nullptr) {
4175 __ Bind(slow_path->GetExitLabel());
4176 }
4177}
4178
4179void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004180 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00004181 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004182 LocationSummary* locations =
4183 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004184 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00004185 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
4186 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
4187 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
4188 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
4189 } else {
4190 locations->SetInAt(1, Location::RequiresRegister());
4191 }
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004192 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
4193 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004194}
4195
4196void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00004197 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004198 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004199 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004200 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00004201 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
4202 ? Register()
4203 : InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004204 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
4205 DCHECK_GE(num_temps, 1u);
4206 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004207 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004208 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
4209 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004210 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004211 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4212 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4213 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
4214 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
4215 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
4216 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
4217 const uint32_t object_array_data_offset =
4218 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004219
Vladimir Marko87584542017-12-12 17:47:52 +00004220 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004221 SlowPathCodeARM64* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004222 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4223 instruction, is_type_check_slow_path_fatal);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004224 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004225
Scott Wakeling97c72b72016-06-24 16:19:36 +01004226 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004227 // Avoid null check if we know obj is not null.
4228 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01004229 __ Cbz(obj, &done);
4230 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004231
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004232 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004233 case TypeCheckKind::kExactCheck:
4234 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004235 // /* HeapReference<Class> */ temp = obj->klass_
4236 GenerateReferenceLoadTwoRegisters(instruction,
4237 temp_loc,
4238 obj_loc,
4239 class_offset,
4240 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004241 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004242
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004243 __ Cmp(temp, cls);
4244 // Jump to slow path for throwing the exception or doing a
4245 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004246 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004247 break;
4248 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004249
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004250 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004251 // /* HeapReference<Class> */ temp = obj->klass_
4252 GenerateReferenceLoadTwoRegisters(instruction,
4253 temp_loc,
4254 obj_loc,
4255 class_offset,
4256 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004257 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004258
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004259 // If the class is abstract, we eagerly fetch the super class of the
4260 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004261 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004262 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004263 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004264 GenerateReferenceLoadOneRegister(instruction,
4265 temp_loc,
4266 super_offset,
4267 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004268 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004269
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004270 // If the class reference currently in `temp` is null, jump to the slow path to throw the
4271 // exception.
4272 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
4273 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004274 __ Cmp(temp, cls);
4275 __ B(ne, &loop);
4276 break;
4277 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004278
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004279 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004280 // /* HeapReference<Class> */ temp = obj->klass_
4281 GenerateReferenceLoadTwoRegisters(instruction,
4282 temp_loc,
4283 obj_loc,
4284 class_offset,
4285 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004286 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004287
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004288 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004289 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004290 __ Bind(&loop);
4291 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004292 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004293
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004294 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004295 GenerateReferenceLoadOneRegister(instruction,
4296 temp_loc,
4297 super_offset,
4298 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004299 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004300
4301 // If the class reference currently in `temp` is not null, jump
4302 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004303 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004304 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004305 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004306 break;
4307 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004308
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004309 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004310 // /* HeapReference<Class> */ temp = obj->klass_
4311 GenerateReferenceLoadTwoRegisters(instruction,
4312 temp_loc,
4313 obj_loc,
4314 class_offset,
4315 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004316 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004317
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004318 // Do an exact check.
4319 __ Cmp(temp, cls);
4320 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004321
4322 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004323 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004324 GenerateReferenceLoadOneRegister(instruction,
4325 temp_loc,
4326 component_offset,
4327 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004328 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004329
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004330 // If the component type is null, jump to the slow path to throw the exception.
4331 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
4332 // Otherwise, the object is indeed an array. Further check that this component type is not a
4333 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004334 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
4335 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004336 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004337 break;
4338 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004339
Calin Juravle98893e12015-10-02 21:05:03 +01004340 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004341 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004342 //
4343 // We cannot directly call the CheckCast runtime entry point
4344 // without resorting to a type checking slow path here (i.e. by
4345 // calling InvokeRuntime directly), as it would require to
4346 // assign fixed registers for the inputs of this HInstanceOf
4347 // instruction (following the runtime calling convention), which
4348 // might be cluttered by the potential first read barrier
4349 // emission at the beginning of this method.
4350 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004351 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004352 case TypeCheckKind::kInterfaceCheck: {
4353 // /* HeapReference<Class> */ temp = obj->klass_
4354 GenerateReferenceLoadTwoRegisters(instruction,
4355 temp_loc,
4356 obj_loc,
4357 class_offset,
4358 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004359 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004360
4361 // /* HeapReference<Class> */ temp = temp->iftable_
4362 GenerateReferenceLoadTwoRegisters(instruction,
4363 temp_loc,
4364 temp_loc,
4365 iftable_offset,
4366 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004367 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08004368 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004369 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08004370 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004371 vixl::aarch64::Label start_loop;
4372 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004373 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004374 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
4375 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004376 // Go to next interface.
4377 __ Add(temp, temp, 2 * kHeapReferenceSize);
4378 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004379 // Compare the classes and continue the loop if they do not match.
4380 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
4381 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004382 break;
4383 }
Vladimir Marko175e7862018-03-27 09:03:13 +00004384
4385 case TypeCheckKind::kBitstringCheck: {
4386 // /* HeapReference<Class> */ temp = obj->klass_
4387 GenerateReferenceLoadTwoRegisters(instruction,
4388 temp_loc,
4389 obj_loc,
4390 class_offset,
4391 maybe_temp2_loc,
4392 kWithoutReadBarrier);
4393
4394 GenerateBitstringTypeCheckCompare(instruction, temp);
4395 __ B(ne, type_check_slow_path->GetEntryLabel());
4396 break;
4397 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004398 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00004399 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004400
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004401 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004402}
4403
Alexandre Rames5319def2014-10-23 10:03:10 +01004404void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004405 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01004406 locations->SetOut(Location::ConstantLocation(constant));
4407}
4408
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004409void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004410 // Will be generated at use site.
4411}
4412
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004413void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004414 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004415 locations->SetOut(Location::ConstantLocation(constant));
4416}
4417
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004418void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004419 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004420}
4421
Calin Juravle175dc732015-08-25 15:42:32 +01004422void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4423 // The trampoline uses the same calling convention as dex calling conventions,
4424 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
4425 // the method_idx.
4426 HandleInvoke(invoke);
4427}
4428
4429void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4430 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004431 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Calin Juravle175dc732015-08-25 15:42:32 +01004432}
4433
Alexandre Rames5319def2014-10-23 10:03:10 +01004434void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01004435 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01004436 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01004437}
4438
Alexandre Rames67555f72014-11-18 10:55:16 +00004439void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4440 HandleInvoke(invoke);
4441}
4442
4443void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4444 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004445 LocationSummary* locations = invoke->GetLocations();
4446 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004447 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00004448 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004449 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00004450
4451 // The register ip1 is required to be used for the hidden argument in
4452 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01004453 MacroAssembler* masm = GetVIXLAssembler();
4454 UseScratchRegisterScope scratch_scope(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00004455 scratch_scope.Exclude(ip1);
4456 __ Mov(ip1, invoke->GetDexMethodIndex());
4457
Artem Serov914d7a82017-02-07 14:33:49 +00004458 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
Alexandre Rames67555f72014-11-18 10:55:16 +00004459 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004460 __ Ldr(temp.W(), StackOperandFrom(receiver));
Artem Serov914d7a82017-02-07 14:33:49 +00004461 {
4462 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4463 // /* HeapReference<Class> */ temp = temp->klass_
4464 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
4465 codegen_->MaybeRecordImplicitNullCheck(invoke);
4466 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004467 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00004468 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004469 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07004470 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Artem Serov914d7a82017-02-07 14:33:49 +00004471 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00004472 }
Artem Serov914d7a82017-02-07 14:33:49 +00004473
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004474 // Instead of simply (possibly) unpoisoning `temp` here, we should
4475 // emit a read barrier for the previous class reference load.
4476 // However this is not required in practice, as this is an
4477 // intermediate/temporary reference and because the current
4478 // concurrent copying collector keeps the from-space memory
4479 // intact/accessible until the end of the marking phase (the
4480 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01004481 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004482 __ Ldr(temp,
4483 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
4484 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004485 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00004486 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004487 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00004488 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07004489 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004490
4491 {
4492 // Ensure the pc position is recorded immediately after the `blr` instruction.
4493 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4494
4495 // lr();
4496 __ blr(lr);
4497 DCHECK(!codegen_->IsLeafMethod());
4498 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4499 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004500
4501 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004502}
4503
4504void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004505 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004506 if (intrinsic.TryDispatch(invoke)) {
4507 return;
4508 }
4509
Alexandre Rames67555f72014-11-18 10:55:16 +00004510 HandleInvoke(invoke);
4511}
4512
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00004513void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004514 // Explicit clinit checks triggered by static invokes must have been pruned by
4515 // art::PrepareForRegisterAllocation.
4516 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004517
Vladimir Markoca6fff82017-10-03 14:49:14 +01004518 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004519 if (intrinsic.TryDispatch(invoke)) {
4520 return;
4521 }
4522
Alexandre Rames67555f72014-11-18 10:55:16 +00004523 HandleInvoke(invoke);
4524}
4525
Andreas Gampe878d58c2015-01-15 23:24:00 -08004526static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
4527 if (invoke->GetLocations()->Intrinsified()) {
4528 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
4529 intrinsic.Dispatch(invoke);
4530 return true;
4531 }
4532 return false;
4533}
4534
Vladimir Markodc151b22015-10-15 18:02:30 +01004535HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
4536 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004537 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00004538 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01004539 return desired_dispatch_info;
4540}
4541
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004542void CodeGeneratorARM64::GenerateStaticOrDirectCall(
4543 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004544 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004545 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4546 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004547 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4548 uint32_t offset =
4549 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004550 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004551 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004552 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004553 }
Vladimir Marko58155012015-08-19 12:49:41 +00004554 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004555 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004556 break;
Vladimir Marko65979462017-05-19 17:25:12 +01004557 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
4558 DCHECK(GetCompilerOptions().IsBootImage());
4559 // Add ADRP with its PC-relative method patch.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004560 vixl::aarch64::Label* adrp_label = NewBootImageMethodPatch(invoke->GetTargetMethod());
Vladimir Marko65979462017-05-19 17:25:12 +01004561 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4562 // Add ADD with its PC-relative method patch.
4563 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004564 NewBootImageMethodPatch(invoke->GetTargetMethod(), adrp_label);
Vladimir Marko65979462017-05-19 17:25:12 +01004565 EmitAddPlaceholder(add_label, XRegisterFrom(temp), XRegisterFrom(temp));
4566 break;
4567 }
Vladimir Markob066d432018-01-03 13:14:37 +00004568 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
4569 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004570 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00004571 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_offset);
4572 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4573 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
4574 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_offset, adrp_label);
4575 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
4576 EmitLdrOffsetPlaceholder(ldr_label, WRegisterFrom(temp), XRegisterFrom(temp));
4577 break;
4578 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004579 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Markob066d432018-01-03 13:14:37 +00004580 // Add ADRP with its PC-relative .bss entry patch.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004581 MethodReference target_method(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
4582 vixl::aarch64::Label* adrp_label = NewMethodBssEntryPatch(target_method);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004583 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Markob066d432018-01-03 13:14:37 +00004584 // Add LDR with its PC-relative .bss entry patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004585 vixl::aarch64::Label* ldr_label =
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004586 NewMethodBssEntryPatch(target_method, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004587 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004588 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004589 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004590 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
4591 // Load method address from literal pool.
4592 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
4593 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004594 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
4595 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
4596 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko58155012015-08-19 12:49:41 +00004597 }
4598 }
4599
4600 switch (invoke->GetCodePtrLocation()) {
4601 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004602 {
4603 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
4604 ExactAssemblyScope eas(GetVIXLAssembler(),
4605 kInstructionSize,
4606 CodeBufferCheckScope::kExactSize);
4607 __ bl(&frame_entry_label_);
4608 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
4609 }
Vladimir Marko58155012015-08-19 12:49:41 +00004610 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004611 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4612 // LR = callee_method->entry_point_from_quick_compiled_code_;
4613 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004614 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004615 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004616 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004617 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004618 ExactAssemblyScope eas(GetVIXLAssembler(),
4619 kInstructionSize,
4620 CodeBufferCheckScope::kExactSize);
4621 // lr()
4622 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004623 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004624 }
Vladimir Marko58155012015-08-19 12:49:41 +00004625 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004626 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004627
Andreas Gampe878d58c2015-01-15 23:24:00 -08004628 DCHECK(!IsLeafMethod());
4629}
4630
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004631void CodeGeneratorARM64::GenerateVirtualCall(
4632 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004633 // Use the calling convention instead of the location of the receiver, as
4634 // intrinsics may have put the receiver in a different register. In the intrinsics
4635 // slow path, the arguments have been moved to the right place, so here we are
4636 // guaranteed that the receiver is the first register of the calling convention.
4637 InvokeDexCallingConvention calling_convention;
4638 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004639 Register temp = XRegisterFrom(temp_in);
4640 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4641 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4642 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004643 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004644
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004645 DCHECK(receiver.IsRegister());
Artem Serov914d7a82017-02-07 14:33:49 +00004646
4647 {
4648 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4649 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4650 // /* HeapReference<Class> */ temp = receiver->klass_
4651 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
4652 MaybeRecordImplicitNullCheck(invoke);
4653 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004654 // Instead of simply (possibly) unpoisoning `temp` here, we should
4655 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004656 // intermediate/temporary reference and because the current
4657 // concurrent copying collector keeps the from-space memory
4658 // intact/accessible until the end of the marking phase (the
4659 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004660 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
4661 // temp = temp->GetMethodAt(method_offset);
4662 __ Ldr(temp, MemOperand(temp, method_offset));
4663 // lr = temp->GetEntryPoint();
4664 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
Artem Serov914d7a82017-02-07 14:33:49 +00004665 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004666 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004667 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4668 // lr();
4669 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004670 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004671 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004672}
4673
Orion Hodsonac141392017-01-13 11:53:47 +00004674void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4675 HandleInvoke(invoke);
4676}
4677
4678void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4679 codegen_->GenerateInvokePolymorphicCall(invoke);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004680 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Orion Hodsonac141392017-01-13 11:53:47 +00004681}
4682
Orion Hodson4c8e12e2018-05-18 08:33:20 +01004683void LocationsBuilderARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4684 HandleInvoke(invoke);
4685}
4686
4687void InstructionCodeGeneratorARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4688 codegen_->GenerateInvokeCustomCall(invoke);
4689 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
4690}
4691
Vladimir Marko6fd16062018-06-26 11:02:04 +01004692vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageIntrinsicPatch(
4693 uint32_t intrinsic_data,
4694 vixl::aarch64::Label* adrp_label) {
4695 return NewPcRelativePatch(
4696 /* dex_file */ nullptr, intrinsic_data, adrp_label, &boot_image_intrinsic_patches_);
4697}
4698
Vladimir Markob066d432018-01-03 13:14:37 +00004699vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageRelRoPatch(
4700 uint32_t boot_image_offset,
4701 vixl::aarch64::Label* adrp_label) {
4702 return NewPcRelativePatch(
4703 /* dex_file */ nullptr, boot_image_offset, adrp_label, &boot_image_method_patches_);
4704}
4705
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004706vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageMethodPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004707 MethodReference target_method,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004708 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004709 return NewPcRelativePatch(
4710 target_method.dex_file, target_method.index, adrp_label, &boot_image_method_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004711}
4712
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004713vixl::aarch64::Label* CodeGeneratorARM64::NewMethodBssEntryPatch(
4714 MethodReference target_method,
4715 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004716 return NewPcRelativePatch(
4717 target_method.dex_file, target_method.index, adrp_label, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004718}
4719
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004720vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageTypePatch(
Scott Wakeling97c72b72016-06-24 16:19:36 +01004721 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004722 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004723 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004724 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &boot_image_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004725}
4726
Vladimir Marko1998cd02017-01-13 13:02:58 +00004727vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4728 const DexFile& dex_file,
4729 dex::TypeIndex type_index,
4730 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004731 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004732}
4733
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004734vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageStringPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004735 const DexFile& dex_file,
4736 dex::StringIndex string_index,
4737 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004738 return NewPcRelativePatch(
4739 &dex_file, string_index.index_, adrp_label, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01004740}
4741
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004742vixl::aarch64::Label* CodeGeneratorARM64::NewStringBssEntryPatch(
4743 const DexFile& dex_file,
4744 dex::StringIndex string_index,
4745 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004746 return NewPcRelativePatch(&dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004747}
4748
Vladimir Marko450f1d02018-04-25 17:27:45 +01004749void CodeGeneratorARM64::EmitBakerReadBarrierCbnz(uint32_t custom_data) {
4750 ExactAssemblyScope guard(GetVIXLAssembler(), 1 * vixl::aarch64::kInstructionSize);
4751 if (Runtime::Current()->UseJitCompilation()) {
4752 auto it = jit_baker_read_barrier_slow_paths_.FindOrAdd(custom_data);
4753 vixl::aarch64::Label* slow_path_entry = &it->second.label;
4754 __ cbnz(mr, slow_path_entry);
4755 } else {
4756 baker_read_barrier_patches_.emplace_back(custom_data);
4757 vixl::aarch64::Label* cbnz_label = &baker_read_barrier_patches_.back().label;
4758 __ bind(cbnz_label);
4759 __ cbnz(mr, static_cast<int64_t>(0)); // Placeholder, patched at link-time.
4760 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004761}
4762
Scott Wakeling97c72b72016-06-24 16:19:36 +01004763vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004764 const DexFile* dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004765 uint32_t offset_or_index,
4766 vixl::aarch64::Label* adrp_label,
4767 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004768 // Add a patch entry and return the label.
4769 patches->emplace_back(dex_file, offset_or_index);
4770 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004771 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004772 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4773 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4774 return label;
4775}
4776
Scott Wakeling97c72b72016-06-24 16:19:36 +01004777vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4778 uint64_t address) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004779 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004780}
4781
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004782vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004783 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004784 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004785 return jit_string_patches_.GetOrCreate(
4786 StringReference(&dex_file, string_index),
4787 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4788}
4789
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004790vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004791 const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004792 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004793 return jit_class_patches_.GetOrCreate(
4794 TypeReference(&dex_file, type_index),
4795 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4796}
4797
Vladimir Markoaad75c62016-10-03 08:46:48 +00004798void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4799 vixl::aarch64::Register reg) {
4800 DCHECK(reg.IsX());
4801 SingleEmissionCheckScope guard(GetVIXLAssembler());
4802 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004803 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004804}
4805
4806void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4807 vixl::aarch64::Register out,
4808 vixl::aarch64::Register base) {
4809 DCHECK(out.IsX());
4810 DCHECK(base.IsX());
4811 SingleEmissionCheckScope guard(GetVIXLAssembler());
4812 __ Bind(fixup_label);
4813 __ add(out, base, Operand(/* offset placeholder */ 0));
4814}
4815
4816void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4817 vixl::aarch64::Register out,
4818 vixl::aarch64::Register base) {
4819 DCHECK(base.IsX());
4820 SingleEmissionCheckScope guard(GetVIXLAssembler());
4821 __ Bind(fixup_label);
4822 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4823}
4824
Vladimir Markoeebb8212018-06-05 14:57:24 +01004825void CodeGeneratorARM64::LoadBootImageAddress(vixl::aarch64::Register reg,
Vladimir Marko6fd16062018-06-26 11:02:04 +01004826 uint32_t boot_image_reference) {
4827 if (GetCompilerOptions().IsBootImage()) {
4828 // Add ADRP with its PC-relative type patch.
4829 vixl::aarch64::Label* adrp_label = NewBootImageIntrinsicPatch(boot_image_reference);
4830 EmitAdrpPlaceholder(adrp_label, reg.X());
4831 // Add ADD with its PC-relative type patch.
4832 vixl::aarch64::Label* add_label = NewBootImageIntrinsicPatch(boot_image_reference, adrp_label);
4833 EmitAddPlaceholder(add_label, reg.X(), reg.X());
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004834 } else if (Runtime::Current()->IsAotCompiler()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01004835 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004836 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004837 EmitAdrpPlaceholder(adrp_label, reg.X());
4838 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004839 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_reference, adrp_label);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004840 EmitLdrOffsetPlaceholder(ldr_label, reg.W(), reg.X());
4841 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004842 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01004843 gc::Heap* heap = Runtime::Current()->GetHeap();
4844 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004845 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01004846 __ Ldr(reg.W(), DeduplicateBootImageAddressLiteral(reinterpret_cast<uintptr_t>(address)));
4847 }
4848}
4849
Vladimir Marko6fd16062018-06-26 11:02:04 +01004850void CodeGeneratorARM64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
4851 uint32_t boot_image_offset) {
4852 DCHECK(invoke->IsStatic());
4853 InvokeRuntimeCallingConvention calling_convention;
4854 Register argument = calling_convention.GetRegisterAt(0);
4855 if (GetCompilerOptions().IsBootImage()) {
4856 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
4857 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
4858 MethodReference target_method = invoke->GetTargetMethod();
4859 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
4860 // Add ADRP with its PC-relative type patch.
4861 vixl::aarch64::Label* adrp_label = NewBootImageTypePatch(*target_method.dex_file, type_idx);
4862 EmitAdrpPlaceholder(adrp_label, argument.X());
4863 // Add ADD with its PC-relative type patch.
4864 vixl::aarch64::Label* add_label =
4865 NewBootImageTypePatch(*target_method.dex_file, type_idx, adrp_label);
4866 EmitAddPlaceholder(add_label, argument.X(), argument.X());
4867 } else {
4868 LoadBootImageAddress(argument, boot_image_offset);
4869 }
4870 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
4871 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4872}
4873
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004874template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00004875inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4876 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004877 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00004878 for (const PcRelativePatchInfo& info : infos) {
4879 linker_patches->push_back(Factory(info.label.GetLocation(),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004880 info.target_dex_file,
Vladimir Markoaad75c62016-10-03 08:46:48 +00004881 info.pc_insn_label->GetLocation(),
4882 info.offset_or_index));
4883 }
4884}
4885
Vladimir Marko6fd16062018-06-26 11:02:04 +01004886template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
4887linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
4888 const DexFile* target_dex_file,
4889 uint32_t pc_insn_offset,
4890 uint32_t boot_image_offset) {
4891 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
4892 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00004893}
4894
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004895void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00004896 DCHECK(linker_patches->empty());
4897 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004898 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004899 method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004900 boot_image_type_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004901 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004902 boot_image_string_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004903 string_bss_entry_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01004904 boot_image_intrinsic_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004905 baker_read_barrier_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004906 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01004907 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004908 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004909 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004910 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004911 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004912 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004913 boot_image_string_patches_, linker_patches);
Vladimir Marko6fd16062018-06-26 11:02:04 +01004914 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
4915 boot_image_intrinsic_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01004916 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +01004917 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
Vladimir Markob066d432018-01-03 13:14:37 +00004918 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004919 DCHECK(boot_image_type_patches_.empty());
4920 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004921 DCHECK(boot_image_intrinsic_patches_.empty());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004922 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004923 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
4924 method_bss_entry_patches_, linker_patches);
4925 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
4926 type_bss_entry_patches_, linker_patches);
4927 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
4928 string_bss_entry_patches_, linker_patches);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004929 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004930 linker_patches->push_back(linker::LinkerPatch::BakerReadBarrierBranchPatch(
4931 info.label.GetLocation(), info.custom_data));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004932 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004933 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004934}
4935
Vladimir Markoca1e0382018-04-11 09:58:41 +00004936bool CodeGeneratorARM64::NeedsThunkCode(const linker::LinkerPatch& patch) const {
4937 return patch.GetType() == linker::LinkerPatch::Type::kBakerReadBarrierBranch ||
4938 patch.GetType() == linker::LinkerPatch::Type::kCallRelative;
4939}
4940
4941void CodeGeneratorARM64::EmitThunkCode(const linker::LinkerPatch& patch,
4942 /*out*/ ArenaVector<uint8_t>* code,
4943 /*out*/ std::string* debug_name) {
4944 Arm64Assembler assembler(GetGraph()->GetAllocator());
4945 switch (patch.GetType()) {
4946 case linker::LinkerPatch::Type::kCallRelative: {
4947 // The thunk just uses the entry point in the ArtMethod. This works even for calls
4948 // to the generic JNI and interpreter trampolines.
4949 Offset offset(ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4950 kArm64PointerSize).Int32Value());
4951 assembler.JumpTo(ManagedRegister(arm64::X0), offset, ManagedRegister(arm64::IP0));
4952 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
4953 *debug_name = "MethodCallThunk";
4954 }
4955 break;
4956 }
4957 case linker::LinkerPatch::Type::kBakerReadBarrierBranch: {
4958 DCHECK_EQ(patch.GetBakerCustomValue2(), 0u);
4959 CompileBakerReadBarrierThunk(assembler, patch.GetBakerCustomValue1(), debug_name);
4960 break;
4961 }
4962 default:
4963 LOG(FATAL) << "Unexpected patch type " << patch.GetType();
4964 UNREACHABLE();
4965 }
4966
4967 // Ensure we emit the literal pool if any.
4968 assembler.FinalizeCode();
4969 code->resize(assembler.CodeSize());
4970 MemoryRegion code_region(code->data(), code->size());
4971 assembler.FinalizeInstructions(code_region);
4972}
4973
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004974vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value) {
4975 return uint32_literals_.GetOrCreate(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004976 value,
4977 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4978}
4979
Scott Wakeling97c72b72016-06-24 16:19:36 +01004980vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004981 return uint64_literals_.GetOrCreate(
4982 value,
4983 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004984}
4985
Andreas Gampe878d58c2015-01-15 23:24:00 -08004986void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004987 // Explicit clinit checks triggered by static invokes must have been pruned by
4988 // art::PrepareForRegisterAllocation.
4989 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004990
Andreas Gampe878d58c2015-01-15 23:24:00 -08004991 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004992 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004993 return;
4994 }
4995
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004996 {
4997 // Ensure that between the BLR (emitted by GenerateStaticOrDirectCall) and RecordPcInfo there
4998 // are no pools emitted.
4999 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
5000 LocationSummary* locations = invoke->GetLocations();
5001 codegen_->GenerateStaticOrDirectCall(
5002 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
5003 }
5004
5005 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005006}
5007
5008void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08005009 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005010 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08005011 return;
5012 }
5013
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005014 {
5015 // Ensure that between the BLR (emitted by GenerateVirtualCall) and RecordPcInfo there
5016 // are no pools emitted.
5017 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
5018 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
5019 DCHECK(!codegen_->IsLeafMethod());
5020 }
5021
5022 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005023}
5024
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005025HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
5026 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005027 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005028 case HLoadClass::LoadKind::kInvalid:
5029 LOG(FATAL) << "UNREACHABLE";
5030 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005031 case HLoadClass::LoadKind::kReferrersClass:
5032 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005033 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005034 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005035 case HLoadClass::LoadKind::kBssEntry:
5036 DCHECK(!Runtime::Current()->UseJitCompilation());
5037 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005038 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005039 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005040 DCHECK(Runtime::Current()->UseJitCompilation());
5041 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005042 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005043 break;
5044 }
5045 return desired_class_load_kind;
5046}
5047
Alexandre Rames67555f72014-11-18 10:55:16 +00005048void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005049 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005050 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005051 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00005052 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005053 cls,
5054 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00005055 LocationFrom(vixl::aarch64::x0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00005056 DCHECK(calling_convention.GetRegisterAt(0).Is(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005057 return;
5058 }
Vladimir Marko41559982017-01-06 14:04:23 +00005059 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005060
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005061 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5062 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005063 ? LocationSummary::kCallOnSlowPath
5064 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005065 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005066 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005067 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005068 }
5069
Vladimir Marko41559982017-01-06 14:04:23 +00005070 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005071 locations->SetInAt(0, Location::RequiresRegister());
5072 }
5073 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005074 if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
5075 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5076 // Rely on the type resolution or initialization and marking to save everything we need.
Vladimir Markoea4c1262017-02-06 19:59:33 +00005077 RegisterSet caller_saves = RegisterSet::Empty();
5078 InvokeRuntimeCallingConvention calling_convention;
5079 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
5080 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005081 RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference),
5082 DataType::Type::kReference).GetCode());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005083 locations->SetCustomSlowPathCallerSaves(caller_saves);
5084 } else {
5085 // For non-Baker read barrier we have a temp-clobbering call.
5086 }
5087 }
Alexandre Rames67555f72014-11-18 10:55:16 +00005088}
5089
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005090// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5091// move.
5092void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005093 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005094 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005095 codegen_->GenerateLoadClassRuntimeCall(cls);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005096 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Calin Juravle580b6092015-10-06 17:35:58 +01005097 return;
5098 }
Vladimir Marko41559982017-01-06 14:04:23 +00005099 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005100
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005101 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01005102 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00005103
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005104 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5105 ? kWithoutReadBarrier
5106 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005107 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00005108 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005109 case HLoadClass::LoadKind::kReferrersClass: {
5110 DCHECK(!cls->CanCallRuntime());
5111 DCHECK(!cls->MustGenerateClinitCheck());
5112 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5113 Register current_method = InputRegisterAt(cls, 0);
Vladimir Markoca1e0382018-04-11 09:58:41 +00005114 codegen_->GenerateGcRootFieldLoad(cls,
5115 out_loc,
5116 current_method,
5117 ArtMethod::DeclaringClassOffset().Int32Value(),
5118 /* fixup_label */ nullptr,
5119 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005120 break;
5121 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005122 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005123 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005124 // Add ADRP with its PC-relative type patch.
5125 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08005126 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005127 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005128 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005129 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005130 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005131 codegen_->NewBootImageTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005132 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005133 break;
5134 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005135 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005136 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005137 uint32_t boot_image_offset = codegen_->GetBootImageOffset(cls);
5138 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
5139 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005140 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005141 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005142 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005143 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005144 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005145 break;
5146 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005147 case HLoadClass::LoadKind::kBssEntry: {
5148 // Add ADRP with its PC-relative Class .bss entry patch.
5149 const DexFile& dex_file = cls->GetDexFile();
5150 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Markof3c52b42017-11-17 17:32:12 +00005151 vixl::aarch64::Register temp = XRegisterFrom(out_loc);
5152 vixl::aarch64::Label* adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
5153 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005154 // Add LDR with its PC-relative Class .bss entry patch.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005155 vixl::aarch64::Label* ldr_label =
Vladimir Markof3c52b42017-11-17 17:32:12 +00005156 codegen_->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005157 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoca1e0382018-04-11 09:58:41 +00005158 codegen_->GenerateGcRootFieldLoad(cls,
5159 out_loc,
5160 temp,
5161 /* offset placeholder */ 0u,
5162 ldr_label,
5163 read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005164 generate_null_check = true;
5165 break;
5166 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005167 case HLoadClass::LoadKind::kJitBootImageAddress: {
5168 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
5169 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
5170 DCHECK_NE(address, 0u);
5171 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
5172 break;
5173 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005174 case HLoadClass::LoadKind::kJitTableAddress: {
5175 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
5176 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005177 cls->GetClass()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00005178 codegen_->GenerateGcRootFieldLoad(cls,
5179 out_loc,
5180 out.X(),
5181 /* offset */ 0,
5182 /* fixup_label */ nullptr,
5183 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005184 break;
5185 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005186 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005187 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00005188 LOG(FATAL) << "UNREACHABLE";
5189 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005190 }
5191
Vladimir Markoea4c1262017-02-06 19:59:33 +00005192 bool do_clinit = cls->MustGenerateClinitCheck();
5193 if (generate_null_check || do_clinit) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005194 DCHECK(cls->CanCallRuntime());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005195 SlowPathCodeARM64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(
Vladimir Markof3c52b42017-11-17 17:32:12 +00005196 cls, cls, cls->GetDexPc(), do_clinit);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005197 codegen_->AddSlowPath(slow_path);
5198 if (generate_null_check) {
5199 __ Cbz(out, slow_path->GetEntryLabel());
5200 }
5201 if (cls->MustGenerateClinitCheck()) {
5202 GenerateClassInitializationCheck(slow_path, out);
5203 } else {
5204 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00005205 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005206 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005207 }
5208}
5209
Orion Hodsondbaa5c72018-05-10 08:22:46 +01005210void LocationsBuilderARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
5211 InvokeRuntimeCallingConvention calling_convention;
5212 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
5213 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
5214}
5215
5216void InstructionCodeGeneratorARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
5217 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
5218}
5219
Orion Hodson18259d72018-04-12 11:18:23 +01005220void LocationsBuilderARM64::VisitLoadMethodType(HLoadMethodType* load) {
5221 InvokeRuntimeCallingConvention calling_convention;
5222 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
5223 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
5224}
5225
5226void InstructionCodeGeneratorARM64::VisitLoadMethodType(HLoadMethodType* load) {
5227 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
5228}
5229
David Brazdilcb1c0552015-08-04 16:22:25 +01005230static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005231 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01005232}
5233
Alexandre Rames67555f72014-11-18 10:55:16 +00005234void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
5235 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005236 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexandre Rames67555f72014-11-18 10:55:16 +00005237 locations->SetOut(Location::RequiresRegister());
5238}
5239
5240void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005241 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
5242}
5243
5244void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005245 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01005246}
5247
5248void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5249 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00005250}
5251
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005252HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
5253 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005254 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005255 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005256 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00005257 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005258 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005259 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005260 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005261 case HLoadString::LoadKind::kJitTableAddress:
5262 DCHECK(Runtime::Current()->UseJitCompilation());
5263 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005264 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005265 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005266 }
5267 return desired_string_load_kind;
5268}
5269
Alexandre Rames67555f72014-11-18 10:55:16 +00005270void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005271 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01005272 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005273 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005274 InvokeRuntimeCallingConvention calling_convention;
5275 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
5276 } else {
5277 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005278 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5279 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005280 // Rely on the pResolveString and marking to save everything we need.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005281 RegisterSet caller_saves = RegisterSet::Empty();
5282 InvokeRuntimeCallingConvention calling_convention;
5283 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
5284 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005285 RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference),
5286 DataType::Type::kReference).GetCode());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005287 locations->SetCustomSlowPathCallerSaves(caller_saves);
5288 } else {
5289 // For non-Baker read barrier we have a temp-clobbering call.
5290 }
5291 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005292 }
Alexandre Rames67555f72014-11-18 10:55:16 +00005293}
5294
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005295// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5296// move.
5297void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00005298 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005299 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005300
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005301 switch (load->GetLoadKind()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005302 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005303 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005304 // Add ADRP with its PC-relative String patch.
5305 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005306 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005307 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005308 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005309 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005310 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005311 codegen_->NewBootImageStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005312 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005313 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005314 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005315 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005316 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005317 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
5318 uint32_t boot_image_offset = codegen_->GetBootImageOffset(load);
5319 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005320 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005321 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005322 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005323 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005324 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
5325 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005326 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005327 case HLoadString::LoadKind::kBssEntry: {
5328 // Add ADRP with its PC-relative String .bss entry patch.
5329 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005330 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00005331 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markof3c52b42017-11-17 17:32:12 +00005332 Register temp = XRegisterFrom(out_loc);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005333 vixl::aarch64::Label* adrp_label = codegen_->NewStringBssEntryPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005334 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005335 // Add LDR with its PC-relative String .bss entry patch.
Vladimir Markoaad75c62016-10-03 08:46:48 +00005336 vixl::aarch64::Label* ldr_label =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005337 codegen_->NewStringBssEntryPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005338 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoca1e0382018-04-11 09:58:41 +00005339 codegen_->GenerateGcRootFieldLoad(load,
5340 out_loc,
5341 temp,
5342 /* offset placeholder */ 0u,
5343 ldr_label,
5344 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005345 SlowPathCodeARM64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00005346 new (codegen_->GetScopedAllocator()) LoadStringSlowPathARM64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005347 codegen_->AddSlowPath(slow_path);
5348 __ Cbz(out.X(), slow_path->GetEntryLabel());
5349 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005350 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005351 return;
5352 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005353 case HLoadString::LoadKind::kJitBootImageAddress: {
5354 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
5355 DCHECK_NE(address, 0u);
5356 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
5357 return;
5358 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005359 case HLoadString::LoadKind::kJitTableAddress: {
5360 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005361 load->GetStringIndex(),
5362 load->GetString()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00005363 codegen_->GenerateGcRootFieldLoad(load,
5364 out_loc,
5365 out.X(),
5366 /* offset */ 0,
5367 /* fixup_label */ nullptr,
5368 kCompilerReadBarrierOption);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005369 return;
5370 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005371 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005372 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005373 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005374
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005375 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005376 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005377 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005378 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005379 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
5380 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005381 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005382}
5383
Alexandre Rames5319def2014-10-23 10:03:10 +01005384void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005385 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01005386 locations->SetOut(Location::ConstantLocation(constant));
5387}
5388
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005389void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005390 // Will be generated at use site.
5391}
5392
Alexandre Rames67555f72014-11-18 10:55:16 +00005393void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005394 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5395 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005396 InvokeRuntimeCallingConvention calling_convention;
5397 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5398}
5399
5400void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01005401 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005402 instruction,
5403 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005404 if (instruction->IsEnter()) {
5405 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5406 } else {
5407 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5408 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005409 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005410}
5411
Alexandre Rames42d641b2014-10-27 14:00:51 +00005412void LocationsBuilderARM64::VisitMul(HMul* mul) {
5413 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005414 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005415 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005416 case DataType::Type::kInt32:
5417 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005418 locations->SetInAt(0, Location::RequiresRegister());
5419 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005420 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005421 break;
5422
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005423 case DataType::Type::kFloat32:
5424 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005425 locations->SetInAt(0, Location::RequiresFpuRegister());
5426 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00005427 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005428 break;
5429
5430 default:
5431 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5432 }
5433}
5434
5435void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
5436 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005437 case DataType::Type::kInt32:
5438 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005439 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
5440 break;
5441
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005442 case DataType::Type::kFloat32:
5443 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005444 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00005445 break;
5446
5447 default:
5448 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5449 }
5450}
5451
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005452void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
5453 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005454 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005455 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005456 case DataType::Type::kInt32:
5457 case DataType::Type::kInt64:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00005458 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00005459 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005460 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005461
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005462 case DataType::Type::kFloat32:
5463 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005464 locations->SetInAt(0, Location::RequiresFpuRegister());
5465 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005466 break;
5467
5468 default:
5469 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5470 }
5471}
5472
5473void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
5474 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005475 case DataType::Type::kInt32:
5476 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005477 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
5478 break;
5479
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005480 case DataType::Type::kFloat32:
5481 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005482 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005483 break;
5484
5485 default:
5486 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5487 }
5488}
5489
5490void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005491 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5492 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005493 InvokeRuntimeCallingConvention calling_convention;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005494 locations->SetOut(LocationFrom(x0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005495 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5496 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005497}
5498
5499void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01005500 // Note: if heap poisoning is enabled, the entry point takes cares
5501 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00005502 QuickEntrypointEnum entrypoint =
5503 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
5504 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005505 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005506 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005507}
5508
Alexandre Rames5319def2014-10-23 10:03:10 +01005509void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005510 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5511 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01005512 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07005513 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005514 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexandre Rames5319def2014-10-23 10:03:10 +01005515}
5516
5517void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07005518 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
5519 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005520 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005521}
5522
5523void LocationsBuilderARM64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005524 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00005525 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005526 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01005527}
5528
5529void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00005530 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005531 case DataType::Type::kInt32:
5532 case DataType::Type::kInt64:
Roland Levillain55dcfb52014-10-24 18:09:09 +01005533 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01005534 break;
5535
5536 default:
5537 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5538 }
5539}
5540
David Brazdil66d126e2015-04-03 16:02:44 +01005541void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005542 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
David Brazdil66d126e2015-04-03 16:02:44 +01005543 locations->SetInAt(0, Location::RequiresRegister());
5544 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5545}
5546
5547void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005548 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01005549}
5550
Alexandre Rames5319def2014-10-23 10:03:10 +01005551void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005552 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5553 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01005554}
5555
Calin Juravle2ae48182016-03-16 14:05:09 +00005556void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5557 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005558 return;
5559 }
Artem Serov914d7a82017-02-07 14:33:49 +00005560 {
5561 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
5562 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5563 Location obj = instruction->GetLocations()->InAt(0);
5564 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
5565 RecordPcInfo(instruction, instruction->GetDexPc());
5566 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005567}
5568
Calin Juravle2ae48182016-03-16 14:05:09 +00005569void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005570 SlowPathCodeARM64* slow_path = new (GetScopedAllocator()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005571 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01005572
5573 LocationSummary* locations = instruction->GetLocations();
5574 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005575
5576 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01005577}
5578
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005579void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005580 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005581}
5582
Alexandre Rames67555f72014-11-18 10:55:16 +00005583void LocationsBuilderARM64::VisitOr(HOr* instruction) {
5584 HandleBinaryOp(instruction);
5585}
5586
5587void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
5588 HandleBinaryOp(instruction);
5589}
5590
Alexandre Rames3e69f162014-12-10 10:36:50 +00005591void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5592 LOG(FATAL) << "Unreachable";
5593}
5594
5595void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005596 if (instruction->GetNext()->IsSuspendCheck() &&
5597 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5598 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5599 // The back edge will generate the suspend check.
5600 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5601 }
5602
Alexandre Rames3e69f162014-12-10 10:36:50 +00005603 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5604}
5605
Alexandre Rames5319def2014-10-23 10:03:10 +01005606void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005607 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005608 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5609 if (location.IsStackSlot()) {
5610 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5611 } else if (location.IsDoubleStackSlot()) {
5612 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5613 }
5614 locations->SetOut(location);
5615}
5616
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005617void InstructionCodeGeneratorARM64::VisitParameterValue(
5618 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005619 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005620}
5621
5622void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
5623 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005624 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01005625 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005626}
5627
5628void InstructionCodeGeneratorARM64::VisitCurrentMethod(
5629 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5630 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01005631}
5632
5633void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005634 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005635 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005636 locations->SetInAt(i, Location::Any());
5637 }
5638 locations->SetOut(Location::Any());
5639}
5640
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005641void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005642 LOG(FATAL) << "Unreachable";
5643}
5644
Serban Constantinescu02164b32014-11-13 14:05:07 +00005645void LocationsBuilderARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005646 DataType::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00005647 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005648 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005649 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005650 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005651
5652 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005653 case DataType::Type::kInt32:
5654 case DataType::Type::kInt64:
Serban Constantinescu02164b32014-11-13 14:05:07 +00005655 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08005656 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00005657 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5658 break;
5659
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005660 case DataType::Type::kFloat32:
5661 case DataType::Type::kFloat64: {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005662 InvokeRuntimeCallingConvention calling_convention;
5663 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
5664 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
5665 locations->SetOut(calling_convention.GetReturnLocation(type));
5666
5667 break;
5668 }
5669
Serban Constantinescu02164b32014-11-13 14:05:07 +00005670 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005671 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00005672 }
5673}
5674
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005675void InstructionCodeGeneratorARM64::GenerateIntRemForPower2Denom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005676 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005677 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
5678 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
5679
5680 Register out = OutputRegister(instruction);
5681 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005682
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005683 if (abs_imm == 2) {
5684 __ Cmp(dividend, 0);
5685 __ And(out, dividend, 1);
5686 __ Csneg(out, out, out, ge);
5687 } else {
5688 UseScratchRegisterScope temps(GetVIXLAssembler());
5689 Register temp = temps.AcquireSameSizeAs(out);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005690
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005691 __ Negs(temp, dividend);
5692 __ And(out, dividend, abs_imm - 1);
5693 __ And(temp, temp, abs_imm - 1);
5694 __ Csneg(out, out, temp, mi);
5695 }
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005696}
5697
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005698void InstructionCodeGeneratorARM64::GenerateIntRemForConstDenom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005699 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005700
5701 if (imm == 0) {
5702 // Do not generate anything.
5703 // DivZeroCheck would prevent any code to be executed.
5704 return;
5705 }
5706
Evgeny Astigeevichf58dc652018-06-25 17:54:07 +01005707 if (IsPowerOfTwo(AbsOrMin(imm))) {
5708 // Cases imm == -1 or imm == 1 are handled in constant folding by
5709 // InstructionWithAbsorbingInputSimplifier.
5710 // If the cases have survided till code generation they are handled in
5711 // GenerateIntRemForPower2Denom becauses -1 and 1 are the power of 2 (2^0).
5712 // The correct code is generated for them, just more instructions.
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005713 GenerateIntRemForPower2Denom(instruction);
5714 } else {
5715 DCHECK(imm < -2 || imm > 2) << imm;
5716 GenerateDivRemWithAnyConstant(instruction);
5717 }
5718}
5719
5720void InstructionCodeGeneratorARM64::GenerateIntRem(HRem* instruction) {
5721 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
5722 << instruction->GetResultType();
5723
5724 if (instruction->GetLocations()->InAt(1).IsConstant()) {
5725 GenerateIntRemForConstDenom(instruction);
5726 } else {
5727 Register out = OutputRegister(instruction);
5728 Register dividend = InputRegisterAt(instruction, 0);
5729 Register divisor = InputRegisterAt(instruction, 1);
5730 UseScratchRegisterScope temps(GetVIXLAssembler());
5731 Register temp = temps.AcquireSameSizeAs(out);
5732 __ Sdiv(temp, dividend, divisor);
5733 __ Msub(out, temp, divisor, dividend);
5734 }
5735}
5736
Serban Constantinescu02164b32014-11-13 14:05:07 +00005737void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005738 DataType::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005739
Serban Constantinescu02164b32014-11-13 14:05:07 +00005740 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005741 case DataType::Type::kInt32:
5742 case DataType::Type::kInt64: {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005743 GenerateIntRem(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005744 break;
5745 }
5746
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005747 case DataType::Type::kFloat32:
5748 case DataType::Type::kFloat64: {
5749 QuickEntrypointEnum entrypoint =
5750 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005751 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005752 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00005753 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5754 } else {
5755 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5756 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005757 break;
5758 }
5759
Serban Constantinescu02164b32014-11-13 14:05:07 +00005760 default:
5761 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005762 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00005763 }
5764}
5765
Aart Bik1f8d51b2018-02-15 10:42:37 -08005766void LocationsBuilderARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005767 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005768}
5769
Aart Bik1f8d51b2018-02-15 10:42:37 -08005770void InstructionCodeGeneratorARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005771 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005772}
5773
5774void LocationsBuilderARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005775 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005776}
5777
5778void InstructionCodeGeneratorARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005779 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005780}
5781
Aart Bik3dad3412018-02-28 12:01:46 -08005782void LocationsBuilderARM64::VisitAbs(HAbs* abs) {
5783 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
5784 switch (abs->GetResultType()) {
5785 case DataType::Type::kInt32:
5786 case DataType::Type::kInt64:
5787 locations->SetInAt(0, Location::RequiresRegister());
5788 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5789 break;
5790 case DataType::Type::kFloat32:
5791 case DataType::Type::kFloat64:
5792 locations->SetInAt(0, Location::RequiresFpuRegister());
5793 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5794 break;
5795 default:
5796 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5797 }
5798}
5799
5800void InstructionCodeGeneratorARM64::VisitAbs(HAbs* abs) {
5801 switch (abs->GetResultType()) {
5802 case DataType::Type::kInt32:
5803 case DataType::Type::kInt64: {
5804 Register in_reg = InputRegisterAt(abs, 0);
5805 Register out_reg = OutputRegister(abs);
5806 __ Cmp(in_reg, Operand(0));
5807 __ Cneg(out_reg, in_reg, lt);
5808 break;
5809 }
5810 case DataType::Type::kFloat32:
5811 case DataType::Type::kFloat64: {
5812 FPRegister in_reg = InputFPRegisterAt(abs, 0);
5813 FPRegister out_reg = OutputFPRegister(abs);
5814 __ Fabs(out_reg, in_reg);
5815 break;
5816 }
5817 default:
5818 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5819 }
5820}
5821
Igor Murashkind01745e2017-04-05 16:40:31 -07005822void LocationsBuilderARM64::VisitConstructorFence(HConstructorFence* constructor_fence) {
5823 constructor_fence->SetLocations(nullptr);
5824}
5825
5826void InstructionCodeGeneratorARM64::VisitConstructorFence(
5827 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
5828 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
5829}
5830
Calin Juravle27df7582015-04-17 19:12:31 +01005831void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5832 memory_barrier->SetLocations(nullptr);
5833}
5834
5835void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005836 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01005837}
5838
Alexandre Rames5319def2014-10-23 10:03:10 +01005839void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005840 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005841 DataType::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005842 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01005843}
5844
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005845void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005846 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005847}
5848
5849void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
5850 instruction->SetLocations(nullptr);
5851}
5852
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005853void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005854 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005855}
5856
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005857void LocationsBuilderARM64::VisitRor(HRor* ror) {
5858 HandleBinaryOp(ror);
5859}
5860
5861void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
5862 HandleBinaryOp(ror);
5863}
5864
Serban Constantinescu02164b32014-11-13 14:05:07 +00005865void LocationsBuilderARM64::VisitShl(HShl* shl) {
5866 HandleShift(shl);
5867}
5868
5869void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5870 HandleShift(shl);
5871}
5872
5873void LocationsBuilderARM64::VisitShr(HShr* shr) {
5874 HandleShift(shr);
5875}
5876
5877void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5878 HandleShift(shr);
5879}
5880
Alexandre Rames5319def2014-10-23 10:03:10 +01005881void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005882 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005883}
5884
5885void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005886 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005887}
5888
Alexandre Rames67555f72014-11-18 10:55:16 +00005889void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005890 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005891}
5892
5893void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005894 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005895}
5896
5897void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005898 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005899}
5900
Alexandre Rames67555f72014-11-18 10:55:16 +00005901void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005902 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005903}
5904
Calin Juravlee460d1d2015-09-29 04:52:17 +01005905void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5906 HUnresolvedInstanceFieldGet* instruction) {
5907 FieldAccessCallingConventionARM64 calling_convention;
5908 codegen_->CreateUnresolvedFieldLocationSummary(
5909 instruction, instruction->GetFieldType(), calling_convention);
5910}
5911
5912void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5913 HUnresolvedInstanceFieldGet* instruction) {
5914 FieldAccessCallingConventionARM64 calling_convention;
5915 codegen_->GenerateUnresolvedFieldAccess(instruction,
5916 instruction->GetFieldType(),
5917 instruction->GetFieldIndex(),
5918 instruction->GetDexPc(),
5919 calling_convention);
5920}
5921
5922void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5923 HUnresolvedInstanceFieldSet* instruction) {
5924 FieldAccessCallingConventionARM64 calling_convention;
5925 codegen_->CreateUnresolvedFieldLocationSummary(
5926 instruction, instruction->GetFieldType(), calling_convention);
5927}
5928
5929void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5930 HUnresolvedInstanceFieldSet* instruction) {
5931 FieldAccessCallingConventionARM64 calling_convention;
5932 codegen_->GenerateUnresolvedFieldAccess(instruction,
5933 instruction->GetFieldType(),
5934 instruction->GetFieldIndex(),
5935 instruction->GetDexPc(),
5936 calling_convention);
5937}
5938
5939void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5940 HUnresolvedStaticFieldGet* instruction) {
5941 FieldAccessCallingConventionARM64 calling_convention;
5942 codegen_->CreateUnresolvedFieldLocationSummary(
5943 instruction, instruction->GetFieldType(), calling_convention);
5944}
5945
5946void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5947 HUnresolvedStaticFieldGet* instruction) {
5948 FieldAccessCallingConventionARM64 calling_convention;
5949 codegen_->GenerateUnresolvedFieldAccess(instruction,
5950 instruction->GetFieldType(),
5951 instruction->GetFieldIndex(),
5952 instruction->GetDexPc(),
5953 calling_convention);
5954}
5955
5956void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5957 HUnresolvedStaticFieldSet* instruction) {
5958 FieldAccessCallingConventionARM64 calling_convention;
5959 codegen_->CreateUnresolvedFieldLocationSummary(
5960 instruction, instruction->GetFieldType(), calling_convention);
5961}
5962
5963void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5964 HUnresolvedStaticFieldSet* instruction) {
5965 FieldAccessCallingConventionARM64 calling_convention;
5966 codegen_->GenerateUnresolvedFieldAccess(instruction,
5967 instruction->GetFieldType(),
5968 instruction->GetFieldIndex(),
5969 instruction->GetDexPc(),
5970 calling_convention);
5971}
5972
Alexandre Rames5319def2014-10-23 10:03:10 +01005973void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005974 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5975 instruction, LocationSummary::kCallOnSlowPath);
Artem Serov7957d952017-04-04 15:44:09 +01005976 // In suspend check slow path, usually there are no caller-save registers at all.
5977 // If SIMD instructions are present, however, we force spilling all live SIMD
5978 // registers in full width (since the runtime only saves/restores lower part).
5979 locations->SetCustomSlowPathCallerSaves(
5980 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexandre Rames5319def2014-10-23 10:03:10 +01005981}
5982
5983void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005984 HBasicBlock* block = instruction->GetBlock();
5985 if (block->GetLoopInformation() != nullptr) {
5986 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5987 // The back edge will generate the suspend check.
5988 return;
5989 }
5990 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5991 // The goto will generate the suspend check.
5992 return;
5993 }
5994 GenerateSuspendCheck(instruction, nullptr);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005995 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005996}
5997
Alexandre Rames67555f72014-11-18 10:55:16 +00005998void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005999 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6000 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00006001 InvokeRuntimeCallingConvention calling_convention;
6002 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
6003}
6004
6005void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00006006 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08006007 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00006008}
6009
6010void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
6011 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006012 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006013 DataType::Type input_type = conversion->GetInputType();
6014 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006015 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
6016 << input_type << " -> " << result_type;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006017 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
6018 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00006019 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
6020 }
6021
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006022 if (DataType::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00006023 locations->SetInAt(0, Location::RequiresFpuRegister());
6024 } else {
6025 locations->SetInAt(0, Location::RequiresRegister());
6026 }
6027
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006028 if (DataType::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00006029 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6030 } else {
6031 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6032 }
6033}
6034
6035void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006036 DataType::Type result_type = conversion->GetResultType();
6037 DataType::Type input_type = conversion->GetInputType();
Alexandre Rames67555f72014-11-18 10:55:16 +00006038
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006039 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
6040 << input_type << " -> " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00006041
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006042 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
6043 int result_size = DataType::Size(result_type);
6044 int input_size = DataType::Size(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00006045 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00006046 Register output = OutputRegister(conversion);
6047 Register source = InputRegisterAt(conversion, 0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006048 if (result_type == DataType::Type::kInt32 && input_type == DataType::Type::kInt64) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01006049 // 'int' values are used directly as W registers, discarding the top
6050 // bits, so we don't need to sign-extend and can just perform a move.
6051 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
6052 // top 32 bits of the target register. We theoretically could leave those
6053 // bits unchanged, but we would have to make sure that no code uses a
6054 // 32bit input value as a 64bit value assuming that the top 32 bits are
6055 // zero.
6056 __ Mov(output.W(), source.W());
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006057 } else if (DataType::IsUnsignedType(result_type) ||
6058 (DataType::IsUnsignedType(input_type) && input_size < result_size)) {
6059 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, result_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00006060 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00006061 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00006062 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006063 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00006064 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006065 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
6066 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Serban Constantinescu02164b32014-11-13 14:05:07 +00006067 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006068 } else if (DataType::IsFloatingPointType(result_type) &&
6069 DataType::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00006070 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
6071 } else {
6072 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
6073 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00006074 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00006075}
Alexandre Rames67555f72014-11-18 10:55:16 +00006076
Serban Constantinescu02164b32014-11-13 14:05:07 +00006077void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
6078 HandleShift(ushr);
6079}
6080
6081void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
6082 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00006083}
6084
6085void LocationsBuilderARM64::VisitXor(HXor* instruction) {
6086 HandleBinaryOp(instruction);
6087}
6088
6089void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
6090 HandleBinaryOp(instruction);
6091}
6092
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006093void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006094 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006095 LOG(FATAL) << "Unreachable";
6096}
6097
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006098void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006099 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006100 LOG(FATAL) << "Unreachable";
6101}
6102
Mark Mendellfe57faa2015-09-18 09:26:15 -04006103// Simple implementation of packed switch - generate cascaded compare/jumps.
6104void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6105 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006106 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006107 locations->SetInAt(0, Location::RequiresRegister());
6108}
6109
6110void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6111 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08006112 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006113 Register value_reg = InputRegisterAt(switch_instr, 0);
6114 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6115
Zheng Xu3927c8b2015-11-18 17:46:25 +08006116 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01006117 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08006118 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
6119 // make sure we don't emit it if the target may run out of range.
6120 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
6121 // ranges and emit the tables only as required.
6122 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04006123
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006124 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08006125 // Current instruction id is an upper bound of the number of HIRs in the graph.
6126 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
6127 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006128 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
6129 Register temp = temps.AcquireW();
6130 __ Subs(temp, value_reg, Operand(lower_bound));
6131
Zheng Xu3927c8b2015-11-18 17:46:25 +08006132 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006133 // Jump to successors[0] if value == lower_bound.
6134 __ B(eq, codegen_->GetLabelOf(successors[0]));
6135 int32_t last_index = 0;
6136 for (; num_entries - last_index > 2; last_index += 2) {
6137 __ Subs(temp, temp, Operand(2));
6138 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
6139 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
6140 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
6141 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
6142 }
6143 if (num_entries - last_index == 2) {
6144 // The last missing case_value.
6145 __ Cmp(temp, Operand(1));
6146 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08006147 }
6148
6149 // And the default for any other value.
6150 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6151 __ B(codegen_->GetLabelOf(default_block));
6152 }
6153 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01006154 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08006155
6156 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
6157
6158 // Below instructions should use at most one blocked register. Since there are two blocked
6159 // registers, we are free to block one.
6160 Register temp_w = temps.AcquireW();
6161 Register index;
6162 // Remove the bias.
6163 if (lower_bound != 0) {
6164 index = temp_w;
6165 __ Sub(index, value_reg, Operand(lower_bound));
6166 } else {
6167 index = value_reg;
6168 }
6169
6170 // Jump to default block if index is out of the range.
6171 __ Cmp(index, Operand(num_entries));
6172 __ B(hs, codegen_->GetLabelOf(default_block));
6173
6174 // In current VIXL implementation, it won't require any blocked registers to encode the
6175 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
6176 // register pressure.
6177 Register table_base = temps.AcquireX();
6178 // Load jump offset from the table.
6179 __ Adr(table_base, jump_table->GetTableStartLabel());
6180 Register jump_offset = temp_w;
6181 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
6182
6183 // Jump to target block by branching to table_base(pc related) + offset.
6184 Register target_address = table_base;
6185 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
6186 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006187 }
6188}
6189
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006190void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
6191 HInstruction* instruction,
6192 Location out,
6193 uint32_t offset,
6194 Location maybe_temp,
6195 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006196 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006197 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006198 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006199 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006200 if (kUseBakerReadBarrier) {
6201 // Load with fast path based Baker's read barrier.
6202 // /* HeapReference<Object> */ out = *(out + offset)
6203 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6204 out,
6205 out_reg,
6206 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006207 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006208 /* needs_null_check */ false,
6209 /* use_load_acquire */ false);
6210 } else {
6211 // Load with slow path based read barrier.
6212 // Save the value of `out` into `maybe_temp` before overwriting it
6213 // in the following move operation, as we will need it for the
6214 // read barrier below.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006215 Register temp_reg = RegisterFrom(maybe_temp, type);
Roland Levillain44015862016-01-22 11:47:17 +00006216 __ Mov(temp_reg, out_reg);
6217 // /* HeapReference<Object> */ out = *(out + offset)
6218 __ Ldr(out_reg, HeapOperand(out_reg, offset));
6219 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6220 }
6221 } else {
6222 // Plain load with no read barrier.
6223 // /* HeapReference<Object> */ out = *(out + offset)
6224 __ Ldr(out_reg, HeapOperand(out_reg, offset));
6225 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
6226 }
6227}
6228
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006229void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
6230 HInstruction* instruction,
6231 Location out,
6232 Location obj,
6233 uint32_t offset,
6234 Location maybe_temp,
6235 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006236 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006237 Register out_reg = RegisterFrom(out, type);
6238 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006239 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006240 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006241 if (kUseBakerReadBarrier) {
6242 // Load with fast path based Baker's read barrier.
Roland Levillain44015862016-01-22 11:47:17 +00006243 // /* HeapReference<Object> */ out = *(obj + offset)
6244 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6245 out,
6246 obj_reg,
6247 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006248 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006249 /* needs_null_check */ false,
6250 /* use_load_acquire */ false);
6251 } else {
6252 // Load with slow path based read barrier.
6253 // /* HeapReference<Object> */ out = *(obj + offset)
6254 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
6255 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6256 }
6257 } else {
6258 // Plain load with no read barrier.
6259 // /* HeapReference<Object> */ out = *(obj + offset)
6260 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
6261 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
6262 }
6263}
6264
Vladimir Markoca1e0382018-04-11 09:58:41 +00006265void CodeGeneratorARM64::GenerateGcRootFieldLoad(
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006266 HInstruction* instruction,
6267 Location root,
6268 Register obj,
6269 uint32_t offset,
6270 vixl::aarch64::Label* fixup_label,
6271 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00006272 DCHECK(fixup_label == nullptr || offset == 0u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006273 Register root_reg = RegisterFrom(root, DataType::Type::kReference);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006274 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006275 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006276 if (kUseBakerReadBarrier) {
6277 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillainba650a42017-03-06 13:52:32 +00006278 // Baker's read barrier are used.
Vladimir Marko450f1d02018-04-25 17:27:45 +01006279 if (kBakerReadBarrierLinkTimeThunksEnableForGcRoots) {
Roland Levillain97c46462017-05-11 14:04:03 +01006280 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in
6281 // the Marking Register) to decide whether we need to enter
6282 // the slow path to mark the GC root.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006283 //
Vladimir Marko450f1d02018-04-25 17:27:45 +01006284 // We use shared thunks for the slow path; shared within the method
6285 // for JIT, across methods for AOT. That thunk checks the reference
6286 // and jumps to the entrypoint if needed.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006287 //
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006288 // lr = &return_address;
6289 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
Roland Levillain97c46462017-05-11 14:04:03 +01006290 // if (mr) { // Thread::Current()->GetIsGcMarking()
6291 // goto gc_root_thunk<root_reg>(lr)
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006292 // }
6293 // return_address:
Roland Levillain44015862016-01-22 11:47:17 +00006294
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006295 UseScratchRegisterScope temps(GetVIXLAssembler());
6296 DCHECK(temps.IsAvailable(ip0));
6297 DCHECK(temps.IsAvailable(ip1));
6298 temps.Exclude(ip0, ip1);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006299 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(root_reg.GetCode());
Roland Levillainba650a42017-03-06 13:52:32 +00006300
Vladimir Marko450f1d02018-04-25 17:27:45 +01006301 ExactAssemblyScope guard(GetVIXLAssembler(), 3 * vixl::aarch64::kInstructionSize);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006302 vixl::aarch64::Label return_address;
6303 __ adr(lr, &return_address);
6304 if (fixup_label != nullptr) {
Vladimir Marko450f1d02018-04-25 17:27:45 +01006305 __ bind(fixup_label);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006306 }
6307 static_assert(BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_OFFSET == -8,
6308 "GC root LDR must be 2 instruction (8B) before the return address label.");
6309 __ ldr(root_reg, MemOperand(obj.X(), offset));
Vladimir Marko450f1d02018-04-25 17:27:45 +01006310 EmitBakerReadBarrierCbnz(custom_data);
6311 __ bind(&return_address);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006312 } else {
Roland Levillain97c46462017-05-11 14:04:03 +01006313 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in
6314 // the Marking Register) to decide whether we need to enter
6315 // the slow path to mark the GC root.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006316 //
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006317 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
Roland Levillain97c46462017-05-11 14:04:03 +01006318 // if (mr) { // Thread::Current()->GetIsGcMarking()
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006319 // // Slow path.
Roland Levillain97c46462017-05-11 14:04:03 +01006320 // entrypoint = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6321 // root = entrypoint(root); // root = ReadBarrier::Mark(root); // Entry point call.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006322 // }
Roland Levillain44015862016-01-22 11:47:17 +00006323
Roland Levillain97c46462017-05-11 14:04:03 +01006324 // Slow path marking the GC root `root`. The entrypoint will
6325 // be loaded by the slow path code.
6326 SlowPathCodeARM64* slow_path =
Vladimir Markoca1e0382018-04-11 09:58:41 +00006327 new (GetScopedAllocator()) ReadBarrierMarkSlowPathARM64(instruction, root);
6328 AddSlowPath(slow_path);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006329
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006330 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6331 if (fixup_label == nullptr) {
6332 __ Ldr(root_reg, MemOperand(obj, offset));
6333 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006334 EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006335 }
6336 static_assert(
6337 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6338 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6339 "have different sizes.");
6340 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6341 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6342 "have different sizes.");
6343
Roland Levillain97c46462017-05-11 14:04:03 +01006344 __ Cbnz(mr, slow_path->GetEntryLabel());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006345 __ Bind(slow_path->GetExitLabel());
6346 }
Roland Levillain44015862016-01-22 11:47:17 +00006347 } else {
6348 // GC root loaded through a slow path for read barriers other
6349 // than Baker's.
6350 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006351 if (fixup_label == nullptr) {
6352 __ Add(root_reg.X(), obj.X(), offset);
6353 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006354 EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006355 }
Roland Levillain44015862016-01-22 11:47:17 +00006356 // /* mirror::Object* */ root = root->Read()
Vladimir Markoca1e0382018-04-11 09:58:41 +00006357 GenerateReadBarrierForRootSlow(instruction, root, root);
Roland Levillain44015862016-01-22 11:47:17 +00006358 }
6359 } else {
6360 // Plain GC root load with no read barrier.
6361 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006362 if (fixup_label == nullptr) {
6363 __ Ldr(root_reg, MemOperand(obj, offset));
6364 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006365 EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006366 }
Roland Levillain44015862016-01-22 11:47:17 +00006367 // Note that GC roots are not affected by heap poisoning, thus we
6368 // do not have to unpoison `root_reg` here.
6369 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006370 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillain44015862016-01-22 11:47:17 +00006371}
6372
6373void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6374 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006375 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006376 uint32_t offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006377 Location maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006378 bool needs_null_check,
6379 bool use_load_acquire) {
6380 DCHECK(kEmitCompilerReadBarrier);
6381 DCHECK(kUseBakerReadBarrier);
6382
Vladimir Marko450f1d02018-04-25 17:27:45 +01006383 if (kBakerReadBarrierLinkTimeThunksEnableForFields && !use_load_acquire) {
Roland Levillain97c46462017-05-11 14:04:03 +01006384 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6385 // Marking Register) to decide whether we need to enter the slow
6386 // path to mark the reference. Then, in the slow path, check the
6387 // gray bit in the lock word of the reference's holder (`obj`) to
6388 // decide whether to mark `ref` or not.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006389 //
Vladimir Marko450f1d02018-04-25 17:27:45 +01006390 // We use shared thunks for the slow path; shared within the method
6391 // for JIT, across methods for AOT. That thunk checks the holder
6392 // and jumps to the entrypoint if needed. If the holder is not gray,
6393 // it creates a fake dependency and returns to the LDR instruction.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006394 //
Vladimir Marko66d691d2017-04-07 17:53:39 +01006395 // lr = &gray_return_address;
Roland Levillain97c46462017-05-11 14:04:03 +01006396 // if (mr) { // Thread::Current()->GetIsGcMarking()
6397 // goto field_thunk<holder_reg, base_reg>(lr)
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006398 // }
6399 // not_gray_return_address:
6400 // // Original reference load. If the offset is too large to fit
6401 // // into LDR, we use an adjusted base register here.
Vladimir Marko88abba22017-05-03 17:09:25 +01006402 // HeapReference<mirror::Object> reference = *(obj+offset);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006403 // gray_return_address:
6404
6405 DCHECK_ALIGNED(offset, sizeof(mirror::HeapReference<mirror::Object>));
6406 Register base = obj;
6407 if (offset >= kReferenceLoadMinFarOffset) {
6408 DCHECK(maybe_temp.IsRegister());
6409 base = WRegisterFrom(maybe_temp);
6410 static_assert(IsPowerOfTwo(kReferenceLoadMinFarOffset), "Expecting a power of 2.");
6411 __ Add(base, obj, Operand(offset & ~(kReferenceLoadMinFarOffset - 1u)));
6412 offset &= (kReferenceLoadMinFarOffset - 1u);
6413 }
6414 UseScratchRegisterScope temps(GetVIXLAssembler());
6415 DCHECK(temps.IsAvailable(ip0));
6416 DCHECK(temps.IsAvailable(ip1));
6417 temps.Exclude(ip0, ip1);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006418 uint32_t custom_data = EncodeBakerReadBarrierFieldData(base.GetCode(), obj.GetCode());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006419
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006420 {
Vladimir Marko450f1d02018-04-25 17:27:45 +01006421 ExactAssemblyScope guard(GetVIXLAssembler(),
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006422 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6423 vixl::aarch64::Label return_address;
6424 __ adr(lr, &return_address);
Vladimir Marko450f1d02018-04-25 17:27:45 +01006425 EmitBakerReadBarrierCbnz(custom_data);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006426 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6427 "Field LDR must be 1 instruction (4B) before the return address label; "
6428 " 2 instructions (8B) for heap poisoning.");
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006429 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006430 __ ldr(ref_reg, MemOperand(base.X(), offset));
6431 if (needs_null_check) {
6432 MaybeRecordImplicitNullCheck(instruction);
6433 }
6434 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
Vladimir Marko450f1d02018-04-25 17:27:45 +01006435 __ bind(&return_address);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006436 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006437 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__, /* temp_loc */ LocationFrom(ip1));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006438 return;
6439 }
6440
Roland Levillain44015862016-01-22 11:47:17 +00006441 // /* HeapReference<Object> */ ref = *(obj + offset)
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006442 Register temp = WRegisterFrom(maybe_temp);
Roland Levillain44015862016-01-22 11:47:17 +00006443 Location no_index = Location::NoLocation();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006444 size_t no_scale_factor = 0u;
Roland Levillainbfea3352016-06-23 13:48:47 +01006445 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6446 ref,
6447 obj,
6448 offset,
6449 no_index,
6450 no_scale_factor,
6451 temp,
6452 needs_null_check,
6453 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00006454}
6455
6456void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6457 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006458 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006459 uint32_t data_offset,
6460 Location index,
6461 Register temp,
6462 bool needs_null_check) {
6463 DCHECK(kEmitCompilerReadBarrier);
6464 DCHECK(kUseBakerReadBarrier);
6465
Vladimir Marko66d691d2017-04-07 17:53:39 +01006466 static_assert(
6467 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6468 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006469 size_t scale_factor = DataType::SizeShift(DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006470
Vladimir Marko450f1d02018-04-25 17:27:45 +01006471 if (kBakerReadBarrierLinkTimeThunksEnableForArrays) {
Roland Levillain97c46462017-05-11 14:04:03 +01006472 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6473 // Marking Register) to decide whether we need to enter the slow
6474 // path to mark the reference. Then, in the slow path, check the
6475 // gray bit in the lock word of the reference's holder (`obj`) to
6476 // decide whether to mark `ref` or not.
Vladimir Marko66d691d2017-04-07 17:53:39 +01006477 //
Vladimir Marko450f1d02018-04-25 17:27:45 +01006478 // We use shared thunks for the slow path; shared within the method
6479 // for JIT, across methods for AOT. That thunk checks the holder
6480 // and jumps to the entrypoint if needed. If the holder is not gray,
6481 // it creates a fake dependency and returns to the LDR instruction.
Vladimir Marko66d691d2017-04-07 17:53:39 +01006482 //
Vladimir Marko66d691d2017-04-07 17:53:39 +01006483 // lr = &gray_return_address;
Roland Levillain97c46462017-05-11 14:04:03 +01006484 // if (mr) { // Thread::Current()->GetIsGcMarking()
6485 // goto array_thunk<base_reg>(lr)
Vladimir Marko66d691d2017-04-07 17:53:39 +01006486 // }
6487 // not_gray_return_address:
6488 // // Original reference load. If the offset is too large to fit
6489 // // into LDR, we use an adjusted base register here.
Vladimir Marko88abba22017-05-03 17:09:25 +01006490 // HeapReference<mirror::Object> reference = data[index];
Vladimir Marko66d691d2017-04-07 17:53:39 +01006491 // gray_return_address:
6492
6493 DCHECK(index.IsValid());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006494 Register index_reg = RegisterFrom(index, DataType::Type::kInt32);
6495 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006496
6497 UseScratchRegisterScope temps(GetVIXLAssembler());
6498 DCHECK(temps.IsAvailable(ip0));
6499 DCHECK(temps.IsAvailable(ip1));
6500 temps.Exclude(ip0, ip1);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006501 uint32_t custom_data = EncodeBakerReadBarrierArrayData(temp.GetCode());
Vladimir Marko66d691d2017-04-07 17:53:39 +01006502
Vladimir Marko66d691d2017-04-07 17:53:39 +01006503 __ Add(temp.X(), obj.X(), Operand(data_offset));
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006504 {
Vladimir Marko450f1d02018-04-25 17:27:45 +01006505 ExactAssemblyScope guard(GetVIXLAssembler(),
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006506 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6507 vixl::aarch64::Label return_address;
6508 __ adr(lr, &return_address);
Vladimir Marko450f1d02018-04-25 17:27:45 +01006509 EmitBakerReadBarrierCbnz(custom_data);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006510 static_assert(BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6511 "Array LDR must be 1 instruction (4B) before the return address label; "
6512 " 2 instructions (8B) for heap poisoning.");
6513 __ ldr(ref_reg, MemOperand(temp.X(), index_reg.X(), LSL, scale_factor));
6514 DCHECK(!needs_null_check); // The thunk cannot handle the null check.
6515 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
Vladimir Marko450f1d02018-04-25 17:27:45 +01006516 __ bind(&return_address);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006517 }
6518 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__, /* temp_loc */ LocationFrom(ip1));
Vladimir Marko66d691d2017-04-07 17:53:39 +01006519 return;
6520 }
6521
Roland Levillain44015862016-01-22 11:47:17 +00006522 // Array cells are never volatile variables, therefore array loads
6523 // never use Load-Acquire instructions on ARM64.
6524 const bool use_load_acquire = false;
6525
6526 // /* HeapReference<Object> */ ref =
6527 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01006528 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6529 ref,
6530 obj,
6531 data_offset,
6532 index,
6533 scale_factor,
6534 temp,
6535 needs_null_check,
6536 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00006537}
6538
6539void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6540 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006541 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006542 uint32_t offset,
6543 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01006544 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00006545 Register temp,
6546 bool needs_null_check,
Roland Levillainff487002017-03-07 16:50:01 +00006547 bool use_load_acquire) {
Roland Levillain44015862016-01-22 11:47:17 +00006548 DCHECK(kEmitCompilerReadBarrier);
6549 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01006550 // If we are emitting an array load, we should not be using a
6551 // Load Acquire instruction. In other words:
6552 // `instruction->IsArrayGet()` => `!use_load_acquire`.
6553 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00006554
Roland Levillain97c46462017-05-11 14:04:03 +01006555 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6556 // Marking Register) to decide whether we need to enter the slow
6557 // path to mark the reference. Then, in the slow path, check the
6558 // gray bit in the lock word of the reference's holder (`obj`) to
6559 // decide whether to mark `ref` or not.
Roland Levillain44015862016-01-22 11:47:17 +00006560 //
Roland Levillain97c46462017-05-11 14:04:03 +01006561 // if (mr) { // Thread::Current()->GetIsGcMarking()
Roland Levillainba650a42017-03-06 13:52:32 +00006562 // // Slow path.
Roland Levillain54f869e2017-03-06 13:54:11 +00006563 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6564 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6565 // HeapReference<mirror::Object> ref = *src; // Original reference load.
6566 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6567 // if (is_gray) {
Roland Levillain97c46462017-05-11 14:04:03 +01006568 // entrypoint = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6569 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
Roland Levillain54f869e2017-03-06 13:54:11 +00006570 // }
6571 // } else {
6572 // HeapReference<mirror::Object> ref = *src; // Original reference load.
Roland Levillain44015862016-01-22 11:47:17 +00006573 // }
Roland Levillain44015862016-01-22 11:47:17 +00006574
Roland Levillainba650a42017-03-06 13:52:32 +00006575 // Slow path marking the object `ref` when the GC is marking. The
Roland Levillain97c46462017-05-11 14:04:03 +01006576 // entrypoint will be loaded by the slow path code.
Roland Levillainff487002017-03-07 16:50:01 +00006577 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006578 new (GetScopedAllocator()) LoadReferenceWithBakerReadBarrierSlowPathARM64(
Roland Levillainff487002017-03-07 16:50:01 +00006579 instruction,
6580 ref,
6581 obj,
6582 offset,
6583 index,
6584 scale_factor,
6585 needs_null_check,
6586 use_load_acquire,
Roland Levillain97c46462017-05-11 14:04:03 +01006587 temp);
Roland Levillainba650a42017-03-06 13:52:32 +00006588 AddSlowPath(slow_path);
6589
Roland Levillain97c46462017-05-11 14:04:03 +01006590 __ Cbnz(mr, slow_path->GetEntryLabel());
Roland Levillainff487002017-03-07 16:50:01 +00006591 // Fast path: the GC is not marking: just load the reference.
Roland Levillain54f869e2017-03-06 13:54:11 +00006592 GenerateRawReferenceLoad(
6593 instruction, ref, obj, offset, index, scale_factor, needs_null_check, use_load_acquire);
Roland Levillainba650a42017-03-06 13:52:32 +00006594 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006595 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillainba650a42017-03-06 13:52:32 +00006596}
6597
Roland Levillainff487002017-03-07 16:50:01 +00006598void CodeGeneratorARM64::UpdateReferenceFieldWithBakerReadBarrier(HInstruction* instruction,
6599 Location ref,
6600 Register obj,
6601 Location field_offset,
6602 Register temp,
6603 bool needs_null_check,
6604 bool use_load_acquire) {
6605 DCHECK(kEmitCompilerReadBarrier);
6606 DCHECK(kUseBakerReadBarrier);
6607 // If we are emitting an array load, we should not be using a
6608 // Load Acquire instruction. In other words:
6609 // `instruction->IsArrayGet()` => `!use_load_acquire`.
6610 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
6611
Roland Levillain97c46462017-05-11 14:04:03 +01006612 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6613 // Marking Register) to decide whether we need to enter the slow
6614 // path to update the reference field within `obj`. Then, in the
6615 // slow path, check the gray bit in the lock word of the reference's
6616 // holder (`obj`) to decide whether to mark `ref` and update the
6617 // field or not.
Roland Levillainff487002017-03-07 16:50:01 +00006618 //
Roland Levillain97c46462017-05-11 14:04:03 +01006619 // if (mr) { // Thread::Current()->GetIsGcMarking()
Roland Levillainff487002017-03-07 16:50:01 +00006620 // // Slow path.
6621 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6622 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6623 // HeapReference<mirror::Object> ref = *(obj + field_offset); // Reference load.
6624 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6625 // if (is_gray) {
6626 // old_ref = ref;
Roland Levillain97c46462017-05-11 14:04:03 +01006627 // entrypoint = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6628 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
Roland Levillainff487002017-03-07 16:50:01 +00006629 // compareAndSwapObject(obj, field_offset, old_ref, ref);
6630 // }
6631 // }
6632
6633 // Slow path updating the object reference at address `obj + field_offset`
Roland Levillain97c46462017-05-11 14:04:03 +01006634 // when the GC is marking. The entrypoint will be loaded by the slow path code.
Roland Levillainff487002017-03-07 16:50:01 +00006635 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006636 new (GetScopedAllocator()) LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64(
Roland Levillainff487002017-03-07 16:50:01 +00006637 instruction,
6638 ref,
6639 obj,
6640 /* offset */ 0u,
6641 /* index */ field_offset,
6642 /* scale_factor */ 0u /* "times 1" */,
6643 needs_null_check,
6644 use_load_acquire,
Roland Levillain97c46462017-05-11 14:04:03 +01006645 temp);
Roland Levillainff487002017-03-07 16:50:01 +00006646 AddSlowPath(slow_path);
6647
Roland Levillain97c46462017-05-11 14:04:03 +01006648 __ Cbnz(mr, slow_path->GetEntryLabel());
Roland Levillainff487002017-03-07 16:50:01 +00006649 // Fast path: the GC is not marking: nothing to do (the field is
6650 // up-to-date, and we don't need to load the reference).
6651 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006652 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillainff487002017-03-07 16:50:01 +00006653}
6654
Roland Levillainba650a42017-03-06 13:52:32 +00006655void CodeGeneratorARM64::GenerateRawReferenceLoad(HInstruction* instruction,
6656 Location ref,
6657 Register obj,
6658 uint32_t offset,
6659 Location index,
6660 size_t scale_factor,
6661 bool needs_null_check,
6662 bool use_load_acquire) {
6663 DCHECK(obj.IsW());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006664 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006665 Register ref_reg = RegisterFrom(ref, type);
Roland Levillain44015862016-01-22 11:47:17 +00006666
Roland Levillainba650a42017-03-06 13:52:32 +00006667 // If needed, vixl::EmissionCheckScope guards are used to ensure
6668 // that no pools are emitted between the load (macro) instruction
6669 // and MaybeRecordImplicitNullCheck.
Roland Levillain44015862016-01-22 11:47:17 +00006670
Roland Levillain44015862016-01-22 11:47:17 +00006671 if (index.IsValid()) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006672 // Load types involving an "index": ArrayGet,
6673 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6674 // intrinsics.
Roland Levillainbfea3352016-06-23 13:48:47 +01006675 if (use_load_acquire) {
6676 // UnsafeGetObjectVolatile intrinsic case.
6677 // Register `index` is not an index in an object array, but an
6678 // offset to an object reference field within object `obj`.
6679 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
6680 DCHECK(instruction->GetLocations()->Intrinsified());
6681 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
6682 << instruction->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006683 DCHECK_EQ(offset, 0u);
6684 DCHECK_EQ(scale_factor, 0u);
Roland Levillainba650a42017-03-06 13:52:32 +00006685 DCHECK_EQ(needs_null_check, false);
6686 // /* HeapReference<mirror::Object> */ ref = *(obj + index)
Roland Levillainbfea3352016-06-23 13:48:47 +01006687 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
6688 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00006689 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006690 // ArrayGet and UnsafeGetObject and UnsafeCASObject intrinsics cases.
6691 // /* HeapReference<mirror::Object> */ ref = *(obj + offset + (index << scale_factor))
Roland Levillainbfea3352016-06-23 13:48:47 +01006692 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01006693 uint32_t computed_offset = offset + (Int64FromLocation(index) << scale_factor);
Roland Levillainba650a42017-03-06 13:52:32 +00006694 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillainbfea3352016-06-23 13:48:47 +01006695 Load(type, ref_reg, HeapOperand(obj, computed_offset));
Roland Levillainba650a42017-03-06 13:52:32 +00006696 if (needs_null_check) {
6697 MaybeRecordImplicitNullCheck(instruction);
6698 }
Roland Levillainbfea3352016-06-23 13:48:47 +01006699 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006700 UseScratchRegisterScope temps(GetVIXLAssembler());
6701 Register temp = temps.AcquireW();
6702 __ Add(temp, obj, offset);
6703 {
6704 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
6705 Load(type, ref_reg, HeapOperand(temp, XRegisterFrom(index), LSL, scale_factor));
6706 if (needs_null_check) {
6707 MaybeRecordImplicitNullCheck(instruction);
6708 }
6709 }
Roland Levillainbfea3352016-06-23 13:48:47 +01006710 }
Roland Levillain44015862016-01-22 11:47:17 +00006711 }
Roland Levillain44015862016-01-22 11:47:17 +00006712 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006713 // /* HeapReference<mirror::Object> */ ref = *(obj + offset)
Roland Levillain44015862016-01-22 11:47:17 +00006714 MemOperand field = HeapOperand(obj, offset);
6715 if (use_load_acquire) {
Roland Levillainba650a42017-03-06 13:52:32 +00006716 // Implicit null checks are handled by CodeGeneratorARM64::LoadAcquire.
6717 LoadAcquire(instruction, ref_reg, field, needs_null_check);
Roland Levillain44015862016-01-22 11:47:17 +00006718 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006719 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain44015862016-01-22 11:47:17 +00006720 Load(type, ref_reg, field);
Roland Levillainba650a42017-03-06 13:52:32 +00006721 if (needs_null_check) {
6722 MaybeRecordImplicitNullCheck(instruction);
6723 }
Roland Levillain44015862016-01-22 11:47:17 +00006724 }
6725 }
6726
6727 // Object* ref = ref_addr->AsMirrorPtr()
6728 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
Roland Levillain44015862016-01-22 11:47:17 +00006729}
6730
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006731void CodeGeneratorARM64::MaybeGenerateMarkingRegisterCheck(int code, Location temp_loc) {
6732 // The following condition is a compile-time one, so it does not have a run-time cost.
6733 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && kIsDebugBuild) {
6734 // The following condition is a run-time one; it is executed after the
6735 // previous compile-time test, to avoid penalizing non-debug builds.
6736 if (GetCompilerOptions().EmitRunTimeChecksInDebugMode()) {
6737 UseScratchRegisterScope temps(GetVIXLAssembler());
6738 Register temp = temp_loc.IsValid() ? WRegisterFrom(temp_loc) : temps.AcquireW();
6739 GetAssembler()->GenerateMarkingRegisterCheck(temp, code);
6740 }
6741 }
6742}
6743
Roland Levillain44015862016-01-22 11:47:17 +00006744void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
6745 Location out,
6746 Location ref,
6747 Location obj,
6748 uint32_t offset,
6749 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006750 DCHECK(kEmitCompilerReadBarrier);
6751
Roland Levillain44015862016-01-22 11:47:17 +00006752 // Insert a slow path based read barrier *after* the reference load.
6753 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006754 // If heap poisoning is enabled, the unpoisoning of the loaded
6755 // reference will be carried out by the runtime within the slow
6756 // path.
6757 //
6758 // Note that `ref` currently does not get unpoisoned (when heap
6759 // poisoning is enabled), which is alright as the `ref` argument is
6760 // not used by the artReadBarrierSlow entry point.
6761 //
6762 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006763 SlowPathCodeARM64* slow_path = new (GetScopedAllocator())
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006764 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
6765 AddSlowPath(slow_path);
6766
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006767 __ B(slow_path->GetEntryLabel());
6768 __ Bind(slow_path->GetExitLabel());
6769}
6770
Roland Levillain44015862016-01-22 11:47:17 +00006771void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6772 Location out,
6773 Location ref,
6774 Location obj,
6775 uint32_t offset,
6776 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006777 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00006778 // Baker's read barriers shall be handled by the fast path
6779 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
6780 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006781 // If heap poisoning is enabled, unpoisoning will be taken care of
6782 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00006783 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006784 } else if (kPoisonHeapReferences) {
6785 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
6786 }
6787}
6788
Roland Levillain44015862016-01-22 11:47:17 +00006789void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6790 Location out,
6791 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006792 DCHECK(kEmitCompilerReadBarrier);
6793
Roland Levillain44015862016-01-22 11:47:17 +00006794 // Insert a slow path based read barrier *after* the GC root load.
6795 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006796 // Note that GC roots are not affected by heap poisoning, so we do
6797 // not need to do anything special for this here.
6798 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006799 new (GetScopedAllocator()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006800 AddSlowPath(slow_path);
6801
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006802 __ B(slow_path->GetEntryLabel());
6803 __ Bind(slow_path->GetExitLabel());
6804}
6805
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006806void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
6807 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006808 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006809 locations->SetInAt(0, Location::RequiresRegister());
6810 locations->SetOut(Location::RequiresRegister());
6811}
6812
6813void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
6814 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00006815 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006816 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006817 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006818 __ Ldr(XRegisterFrom(locations->Out()),
6819 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006820 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006821 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006822 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006823 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
6824 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006825 __ Ldr(XRegisterFrom(locations->Out()),
6826 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006827 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006828}
6829
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006830static void PatchJitRootUse(uint8_t* code,
6831 const uint8_t* roots_data,
6832 vixl::aarch64::Literal<uint32_t>* literal,
6833 uint64_t index_in_table) {
6834 uint32_t literal_offset = literal->GetOffset();
6835 uintptr_t address =
6836 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
6837 uint8_t* data = code + literal_offset;
6838 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
6839}
6840
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006841void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
6842 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006843 const StringReference& string_reference = entry.first;
6844 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006845 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006846 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006847 }
6848 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006849 const TypeReference& type_reference = entry.first;
6850 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006851 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006852 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006853 }
6854}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006855
Alexandre Rames67555f72014-11-18 10:55:16 +00006856#undef __
6857#undef QUICK_ENTRY_POINT
6858
Vladimir Markoca1e0382018-04-11 09:58:41 +00006859#define __ assembler.GetVIXLAssembler()->
6860
6861static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler,
6862 vixl::aarch64::Register base_reg,
6863 vixl::aarch64::MemOperand& lock_word,
Vladimir Marko7a695052018-04-12 10:26:50 +01006864 vixl::aarch64::Label* slow_path,
6865 vixl::aarch64::Label* throw_npe = nullptr) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006866 // Load the lock word containing the rb_state.
6867 __ Ldr(ip0.W(), lock_word);
6868 // Given the numeric representation, it's enough to check the low bit of the rb_state.
6869 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6870 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6871 __ Tbnz(ip0.W(), LockWord::kReadBarrierStateShift, slow_path);
6872 static_assert(
6873 BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET,
6874 "Field and array LDR offsets must be the same to reuse the same code.");
Vladimir Marko7a695052018-04-12 10:26:50 +01006875 // To throw NPE, we return to the fast path; the artificial dependence below does not matter.
6876 if (throw_npe != nullptr) {
6877 __ Bind(throw_npe);
6878 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006879 // Adjust the return address back to the LDR (1 instruction; 2 for heap poisoning).
6880 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6881 "Field LDR must be 1 instruction (4B) before the return address label; "
6882 " 2 instructions (8B) for heap poisoning.");
6883 __ Add(lr, lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6884 // Introduce a dependency on the lock_word including rb_state,
6885 // to prevent load-load reordering, and without using
6886 // a memory barrier (which would be more expensive).
6887 __ Add(base_reg, base_reg, Operand(ip0, LSR, 32));
6888 __ Br(lr); // And return back to the function.
6889 // Note: The fake dependency is unnecessary for the slow path.
6890}
6891
6892// Load the read barrier introspection entrypoint in register `entrypoint`.
6893static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler,
6894 vixl::aarch64::Register entrypoint) {
6895 // entrypoint = Thread::Current()->pReadBarrierMarkReg16, i.e. pReadBarrierMarkIntrospection.
6896 DCHECK_EQ(ip0.GetCode(), 16u);
6897 const int32_t entry_point_offset =
6898 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ip0.GetCode());
6899 __ Ldr(entrypoint, MemOperand(tr, entry_point_offset));
6900}
6901
6902void CodeGeneratorARM64::CompileBakerReadBarrierThunk(Arm64Assembler& assembler,
6903 uint32_t encoded_data,
6904 /*out*/ std::string* debug_name) {
6905 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
6906 switch (kind) {
6907 case BakerReadBarrierKind::kField: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006908 auto base_reg =
6909 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6910 CheckValidReg(base_reg.GetCode());
6911 auto holder_reg =
6912 Register::GetXRegFromCode(BakerReadBarrierSecondRegField::Decode(encoded_data));
6913 CheckValidReg(holder_reg.GetCode());
6914 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6915 temps.Exclude(ip0, ip1);
Vladimir Marko7a695052018-04-12 10:26:50 +01006916 // If base_reg differs from holder_reg, the offset was too large and we must have emitted
6917 // an explicit null check before the load. Otherwise, for implicit null checks, we need to
6918 // null-check the holder as we do not necessarily do that check before going to the thunk.
6919 vixl::aarch64::Label throw_npe_label;
6920 vixl::aarch64::Label* throw_npe = nullptr;
6921 if (GetCompilerOptions().GetImplicitNullChecks() && holder_reg.Is(base_reg)) {
6922 throw_npe = &throw_npe_label;
6923 __ Cbz(holder_reg.W(), throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006924 }
Vladimir Marko7a695052018-04-12 10:26:50 +01006925 // Check if the holder is gray and, if not, add fake dependency to the base register
6926 // and return to the LDR instruction to load the reference. Otherwise, use introspection
6927 // to load the reference and call the entrypoint that performs further checks on the
6928 // reference and marks it if needed.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006929 vixl::aarch64::Label slow_path;
6930 MemOperand lock_word(holder_reg, mirror::Object::MonitorOffset().Int32Value());
Vladimir Marko7a695052018-04-12 10:26:50 +01006931 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006932 __ Bind(&slow_path);
6933 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6934 __ Ldr(ip0.W(), ldr_address); // Load the LDR (immediate) unsigned offset.
6935 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6936 __ Ubfx(ip0.W(), ip0.W(), 10, 12); // Extract the offset.
6937 __ Ldr(ip0.W(), MemOperand(base_reg, ip0, LSL, 2)); // Load the reference.
6938 // Do not unpoison. With heap poisoning enabled, the entrypoint expects a poisoned reference.
6939 __ Br(ip1); // Jump to the entrypoint.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006940 break;
6941 }
6942 case BakerReadBarrierKind::kArray: {
6943 auto base_reg =
6944 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6945 CheckValidReg(base_reg.GetCode());
6946 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6947 BakerReadBarrierSecondRegField::Decode(encoded_data));
6948 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6949 temps.Exclude(ip0, ip1);
6950 vixl::aarch64::Label slow_path;
6951 int32_t data_offset =
6952 mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
6953 MemOperand lock_word(base_reg, mirror::Object::MonitorOffset().Int32Value() - data_offset);
6954 DCHECK_LT(lock_word.GetOffset(), 0);
6955 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path);
6956 __ Bind(&slow_path);
6957 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET);
6958 __ Ldr(ip0.W(), ldr_address); // Load the LDR (register) unsigned offset.
6959 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6960 __ Ubfx(ip0, ip0, 16, 6); // Extract the index register, plus 32 (bit 21 is set).
6961 __ Bfi(ip1, ip0, 3, 6); // Insert ip0 to the entrypoint address to create
6962 // a switch case target based on the index register.
6963 __ Mov(ip0, base_reg); // Move the base register to ip0.
6964 __ Br(ip1); // Jump to the entrypoint's array switch case.
6965 break;
6966 }
6967 case BakerReadBarrierKind::kGcRoot: {
6968 // Check if the reference needs to be marked and if so (i.e. not null, not marked yet
6969 // and it does not have a forwarding address), call the correct introspection entrypoint;
6970 // otherwise return the reference (or the extracted forwarding address).
6971 // There is no gray bit check for GC roots.
6972 auto root_reg =
6973 Register::GetWRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6974 CheckValidReg(root_reg.GetCode());
6975 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6976 BakerReadBarrierSecondRegField::Decode(encoded_data));
6977 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6978 temps.Exclude(ip0, ip1);
6979 vixl::aarch64::Label return_label, not_marked, forwarding_address;
6980 __ Cbz(root_reg, &return_label);
6981 MemOperand lock_word(root_reg.X(), mirror::Object::MonitorOffset().Int32Value());
6982 __ Ldr(ip0.W(), lock_word);
6983 __ Tbz(ip0.W(), LockWord::kMarkBitStateShift, &not_marked);
6984 __ Bind(&return_label);
6985 __ Br(lr);
6986 __ Bind(&not_marked);
6987 __ Tst(ip0.W(), Operand(ip0.W(), LSL, 1));
6988 __ B(&forwarding_address, mi);
6989 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6990 // Adjust the art_quick_read_barrier_mark_introspection address in IP1 to
6991 // art_quick_read_barrier_mark_introspection_gc_roots.
6992 __ Add(ip1, ip1, Operand(BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRYPOINT_OFFSET));
6993 __ Mov(ip0.W(), root_reg);
6994 __ Br(ip1);
6995 __ Bind(&forwarding_address);
6996 __ Lsl(root_reg, ip0.W(), LockWord::kForwardingAddressShift);
6997 __ Br(lr);
6998 break;
6999 }
7000 default:
7001 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
7002 UNREACHABLE();
7003 }
7004
Vladimir Marko450f1d02018-04-25 17:27:45 +01007005 // For JIT, the slow path is considered part of the compiled method,
7006 // so JIT should pass null as `debug_name`. Tests may not have a runtime.
7007 DCHECK(Runtime::Current() == nullptr ||
7008 !Runtime::Current()->UseJitCompilation() ||
7009 debug_name == nullptr);
7010 if (debug_name != nullptr && GetCompilerOptions().GenerateAnyDebugInfo()) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00007011 std::ostringstream oss;
7012 oss << "BakerReadBarrierThunk";
7013 switch (kind) {
7014 case BakerReadBarrierKind::kField:
7015 oss << "Field_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
7016 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
7017 break;
7018 case BakerReadBarrierKind::kArray:
7019 oss << "Array_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
7020 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
7021 BakerReadBarrierSecondRegField::Decode(encoded_data));
7022 break;
7023 case BakerReadBarrierKind::kGcRoot:
7024 oss << "GcRoot_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
7025 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
7026 BakerReadBarrierSecondRegField::Decode(encoded_data));
7027 break;
7028 }
7029 *debug_name = oss.str();
7030 }
7031}
7032
7033#undef __
7034
Alexandre Rames5319def2014-10-23 10:03:10 +01007035} // namespace arm64
7036} // namespace art