blob: d1c83ce625aa6403498addef0839a53684f2bc7c [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Vladimir Markof4f2daa2017-03-20 18:26:59 +000019#include "arch/arm64/asm_support_arm64.h"
Serban Constantinescu579885a2015-02-22 20:51:33 +000020#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070021#include "art_method.h"
Andreas Gampe5678db52017-06-08 14:11:18 -070022#include "base/bit_utils.h"
23#include "base/bit_utils_iterator.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010024#include "class_table.h"
Zheng Xuc6667102015-05-15 16:08:45 +080025#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000026#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010027#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080028#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010029#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010030#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070031#include "heap_poisoning.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080032#include "intrinsics.h"
33#include "intrinsics_arm64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010034#include "linker/linker_patch.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070035#include "lock_word.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010036#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070037#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000038#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010039#include "thread.h"
40#include "utils/arm64/assembler_arm64.h"
41#include "utils/assembler.h"
42#include "utils/stack_checks.h"
43
Scott Wakeling97c72b72016-06-24 16:19:36 +010044using namespace vixl::aarch64; // NOLINT(build/namespaces)
Artem Serov914d7a82017-02-07 14:33:49 +000045using vixl::ExactAssemblyScope;
46using vixl::CodeBufferCheckScope;
47using vixl::EmissionCheckScope;
Alexandre Rames5319def2014-10-23 10:03:10 +010048
49#ifdef __
50#error "ARM64 Codegen VIXL macro-assembler macro already defined."
51#endif
52
Alexandre Rames5319def2014-10-23 10:03:10 +010053namespace art {
54
Roland Levillain22ccc3a2015-11-24 13:10:05 +000055template<class MirrorType>
56class GcRoot;
57
Alexandre Rames5319def2014-10-23 10:03:10 +010058namespace arm64 {
59
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::ARM64EncodableConstantOrRegister;
61using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080062using helpers::CPURegisterFrom;
63using helpers::DRegisterFrom;
64using helpers::FPRegisterFrom;
65using helpers::HeapOperand;
66using helpers::HeapOperandFrom;
67using helpers::InputCPURegisterAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010068using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080069using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080070using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010071using helpers::InputRegisterAt;
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +010072using helpers::Int64FromLocation;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010073using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080074using helpers::LocationFrom;
75using helpers::OperandFromMemOperand;
76using helpers::OutputCPURegister;
77using helpers::OutputFPRegister;
78using helpers::OutputRegister;
Artem Serovd4bccf12017-04-03 18:47:32 +010079using helpers::QRegisterFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080080using helpers::RegisterFrom;
Aart Bik1f8d51b2018-02-15 10:42:37 -080081using helpers::SRegisterFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080082using helpers::StackOperandFrom;
83using helpers::VIXLRegCodeFromART;
84using helpers::WRegisterFrom;
85using helpers::XRegisterFrom;
86
Vladimir Markof3e0ee22015-12-17 15:23:13 +000087// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080088// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
89// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000090static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010091
Vladimir Markof4f2daa2017-03-20 18:26:59 +000092// Reference load (except object array loads) is using LDR Wt, [Xn, #offset] which can handle
93// offset < 16KiB. For offsets >= 16KiB, the load shall be emitted as two or more instructions.
94// For the Baker read barrier implementation using link-generated thunks we need to split
95// the offset explicitly.
96constexpr uint32_t kReferenceLoadMinFarOffset = 16 * KB;
97
98// Flags controlling the use of link-time generated thunks for Baker read barriers.
Vladimir Markod1ef8732017-04-18 13:55:13 +010099constexpr bool kBakerReadBarrierLinkTimeThunksEnableForFields = true;
Vladimir Marko66d691d2017-04-07 17:53:39 +0100100constexpr bool kBakerReadBarrierLinkTimeThunksEnableForArrays = true;
Vladimir Markod1ef8732017-04-18 13:55:13 +0100101constexpr bool kBakerReadBarrierLinkTimeThunksEnableForGcRoots = true;
Vladimir Markof4f2daa2017-03-20 18:26:59 +0000102
103// Some instructions have special requirements for a temporary, for example
104// LoadClass/kBssEntry and LoadString/kBssEntry for Baker read barrier require
105// temp that's not an R0 (to avoid an extra move) and Baker read barrier field
106// loads with large offsets need a fixed register to limit the number of link-time
107// thunks we generate. For these and similar cases, we want to reserve a specific
108// register that's neither callee-save nor an argument register. We choose x15.
109inline Location FixedTempLocation() {
110 return Location::RegisterLocation(x15.GetCode());
111}
112
Alexandre Rames5319def2014-10-23 10:03:10 +0100113inline Condition ARM64Condition(IfCondition cond) {
114 switch (cond) {
115 case kCondEQ: return eq;
116 case kCondNE: return ne;
117 case kCondLT: return lt;
118 case kCondLE: return le;
119 case kCondGT: return gt;
120 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -0700121 case kCondB: return lo;
122 case kCondBE: return ls;
123 case kCondA: return hi;
124 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +0100125 }
Roland Levillain7f63c522015-07-13 15:54:55 +0000126 LOG(FATAL) << "Unreachable";
127 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +0100128}
129
Vladimir Markod6e069b2016-01-18 11:11:01 +0000130inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
131 // The ARM64 condition codes can express all the necessary branches, see the
132 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
133 // There is no dex instruction or HIR that would need the missing conditions
134 // "equal or unordered" or "not equal".
135 switch (cond) {
136 case kCondEQ: return eq;
137 case kCondNE: return ne /* unordered */;
138 case kCondLT: return gt_bias ? cc : lt /* unordered */;
139 case kCondLE: return gt_bias ? ls : le /* unordered */;
140 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
141 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
142 default:
143 LOG(FATAL) << "UNREACHABLE";
144 UNREACHABLE();
145 }
146}
147
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100148Location ARM64ReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000149 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
150 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
151 // but we use the exact registers for clarity.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100152 if (return_type == DataType::Type::kFloat32) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000153 return LocationFrom(s0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100154 } else if (return_type == DataType::Type::kFloat64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000155 return LocationFrom(d0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100156 } else if (return_type == DataType::Type::kInt64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000157 return LocationFrom(x0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100158 } else if (return_type == DataType::Type::kVoid) {
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100159 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000160 } else {
161 return LocationFrom(w0);
162 }
163}
164
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100165Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000166 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100167}
168
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100169// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
170#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700171#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100172
Zheng Xuda403092015-04-24 17:35:39 +0800173// Calculate memory accessing operand for save/restore live registers.
174static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100175 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800176 int64_t spill_offset,
177 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100178 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
179 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
180 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800181 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100182 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800183 codegen->GetNumberOfFloatingPointRegisters()));
184
Vladimir Marko804b03f2016-09-14 16:26:36 +0100185 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
Artem Serov7957d952017-04-04 15:44:09 +0100186 unsigned v_reg_size = codegen->GetGraph()->HasSIMD() ? kQRegSize : kDRegSize;
187 CPURegList fp_list = CPURegList(CPURegister::kVRegister, v_reg_size, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800188
189 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
190 UseScratchRegisterScope temps(masm);
191
192 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100193 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
194 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800195 int64_t reg_size = kXRegSizeInBytes;
196 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
197 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100198 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800199 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
200 // If the offset does not fit in the instruction's immediate field, use an alternate register
201 // to compute the base address(float point registers spill base address).
202 Register new_base = temps.AcquireSameSizeAs(base);
203 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
204 base = new_base;
205 spill_offset = -core_spill_size;
206 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
207 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
208 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
209 }
210
211 if (is_save) {
212 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
213 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
214 } else {
215 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
216 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
217 }
218}
219
220void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800221 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100222 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
223 for (uint32_t i : LowToHighBits(core_spills)) {
224 // If the register holds an object, update the stack mask.
225 if (locations->RegisterContainsObject(i)) {
226 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800227 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100228 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
229 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
230 saved_core_stack_offsets_[i] = stack_offset;
231 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800232 }
233
Vladimir Marko804b03f2016-09-14 16:26:36 +0100234 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
235 for (uint32_t i : LowToHighBits(fp_spills)) {
236 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
237 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
238 saved_fpu_stack_offsets_[i] = stack_offset;
239 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800240 }
241
Vladimir Marko804b03f2016-09-14 16:26:36 +0100242 SaveRestoreLiveRegistersHelper(codegen,
243 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800244 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
245}
246
247void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100248 SaveRestoreLiveRegistersHelper(codegen,
249 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800250 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
251}
252
Alexandre Rames5319def2014-10-23 10:03:10 +0100253class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
254 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000255 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100256
Alexandre Rames67555f72014-11-18 10:55:16 +0000257 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100258 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000259 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100260
Alexandre Rames5319def2014-10-23 10:03:10 +0100261 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000262 if (instruction_->CanThrowIntoCatchBlock()) {
263 // Live registers will be restored in the catch block if caught.
264 SaveLiveRegisters(codegen, instruction_->GetLocations());
265 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000266 // We're moving two locations to locations that could overlap, so we need a parallel
267 // move resolver.
268 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100269 codegen->EmitParallelMoves(locations->InAt(0),
270 LocationFrom(calling_convention.GetRegisterAt(0)),
271 DataType::Type::kInt32,
272 locations->InAt(1),
273 LocationFrom(calling_convention.GetRegisterAt(1)),
274 DataType::Type::kInt32);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000275 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
276 ? kQuickThrowStringBounds
277 : kQuickThrowArrayBounds;
278 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100279 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800280 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100281 }
282
Alexandre Rames8158f282015-08-07 10:26:17 +0100283 bool IsFatal() const OVERRIDE { return true; }
284
Alexandre Rames9931f312015-06-19 14:47:01 +0100285 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
286
Alexandre Rames5319def2014-10-23 10:03:10 +0100287 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100288 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
289};
290
Alexandre Rames67555f72014-11-18 10:55:16 +0000291class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
292 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000293 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000294
295 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
296 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
297 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000298 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800299 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000300 }
301
Alexandre Rames8158f282015-08-07 10:26:17 +0100302 bool IsFatal() const OVERRIDE { return true; }
303
Alexandre Rames9931f312015-06-19 14:47:01 +0100304 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
305
Alexandre Rames67555f72014-11-18 10:55:16 +0000306 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000307 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
308};
309
310class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
311 public:
312 LoadClassSlowPathARM64(HLoadClass* cls,
313 HInstruction* at,
314 uint32_t dex_pc,
Vladimir Markof3c52b42017-11-17 17:32:12 +0000315 bool do_clinit)
Vladimir Markoea4c1262017-02-06 19:59:33 +0000316 : SlowPathCodeARM64(at),
317 cls_(cls),
318 dex_pc_(dex_pc),
Vladimir Markof3c52b42017-11-17 17:32:12 +0000319 do_clinit_(do_clinit) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000320 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
321 }
322
323 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000324 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000325 Location out = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +0000326 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
327
328 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000329 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000330
Vladimir Markof3c52b42017-11-17 17:32:12 +0000331 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000332 dex::TypeIndex type_index = cls_->GetTypeIndex();
333 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000334 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
335 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000336 arm64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800337 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100338 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800339 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100340 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800341 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000342
343 // Move the class to the desired location.
Alexandre Rames67555f72014-11-18 10:55:16 +0000344 if (out.IsValid()) {
345 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100346 DataType::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000347 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000348 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000349 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000350 __ B(GetExitLabel());
351 }
352
Alexandre Rames9931f312015-06-19 14:47:01 +0100353 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
354
Alexandre Rames67555f72014-11-18 10:55:16 +0000355 private:
356 // The class this slow path will load.
357 HLoadClass* const cls_;
358
Alexandre Rames67555f72014-11-18 10:55:16 +0000359 // The dex PC of `at_`.
360 const uint32_t dex_pc_;
361
362 // Whether to initialize the class.
363 const bool do_clinit_;
364
365 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
366};
367
Vladimir Markoaad75c62016-10-03 08:46:48 +0000368class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
369 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000370 explicit LoadStringSlowPathARM64(HLoadString* instruction)
371 : SlowPathCodeARM64(instruction) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000372
373 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
374 LocationSummary* locations = instruction_->GetLocations();
375 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
376 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
377
378 __ Bind(GetEntryLabel());
379 SaveLiveRegisters(codegen, locations);
380
Vladimir Markof3c52b42017-11-17 17:32:12 +0000381 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000382 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
383 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000384 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
385 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100386 DataType::Type type = instruction_->GetType();
Vladimir Markoaad75c62016-10-03 08:46:48 +0000387 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
388
389 RestoreLiveRegisters(codegen, locations);
390
Vladimir Markoaad75c62016-10-03 08:46:48 +0000391 __ B(GetExitLabel());
392 }
393
394 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
395
396 private:
397 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
398};
399
Alexandre Rames5319def2014-10-23 10:03:10 +0100400class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
401 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000402 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100403
Alexandre Rames67555f72014-11-18 10:55:16 +0000404 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
405 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100406 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000407 if (instruction_->CanThrowIntoCatchBlock()) {
408 // Live registers will be restored in the catch block if caught.
409 SaveLiveRegisters(codegen, instruction_->GetLocations());
410 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000411 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
412 instruction_,
413 instruction_->GetDexPc(),
414 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800415 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100416 }
417
Alexandre Rames8158f282015-08-07 10:26:17 +0100418 bool IsFatal() const OVERRIDE { return true; }
419
Alexandre Rames9931f312015-06-19 14:47:01 +0100420 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
421
Alexandre Rames5319def2014-10-23 10:03:10 +0100422 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100423 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
424};
425
426class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
427 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100428 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000429 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100430
Alexandre Rames67555f72014-11-18 10:55:16 +0000431 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Artem Serov7957d952017-04-04 15:44:09 +0100432 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +0000433 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100434 __ Bind(GetEntryLabel());
Artem Serov7957d952017-04-04 15:44:09 +0100435 SaveLiveRegisters(codegen, locations); // Only saves live 128-bit regs for SIMD.
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000436 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800437 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Artem Serov7957d952017-04-04 15:44:09 +0100438 RestoreLiveRegisters(codegen, locations); // Only restores live 128-bit regs for SIMD.
Alexandre Rames67555f72014-11-18 10:55:16 +0000439 if (successor_ == nullptr) {
440 __ B(GetReturnLabel());
441 } else {
442 __ B(arm64_codegen->GetLabelOf(successor_));
443 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100444 }
445
Scott Wakeling97c72b72016-06-24 16:19:36 +0100446 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100447 DCHECK(successor_ == nullptr);
448 return &return_label_;
449 }
450
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100451 HBasicBlock* GetSuccessor() const {
452 return successor_;
453 }
454
Alexandre Rames9931f312015-06-19 14:47:01 +0100455 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
456
Alexandre Rames5319def2014-10-23 10:03:10 +0100457 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100458 // If not null, the block to branch to after the suspend check.
459 HBasicBlock* const successor_;
460
461 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100462 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100463
464 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
465};
466
Alexandre Rames67555f72014-11-18 10:55:16 +0000467class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
468 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000469 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000470 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000471
472 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000473 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800474
Alexandre Rames3e69f162014-12-10 10:36:50 +0000475 DCHECK(instruction_->IsCheckCast()
476 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
477 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100478 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000479
Alexandre Rames67555f72014-11-18 10:55:16 +0000480 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000481
Vladimir Marko87584542017-12-12 17:47:52 +0000482 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000483 SaveLiveRegisters(codegen, locations);
484 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000485
486 // We're moving two locations to locations that could overlap, so we need a parallel
487 // move resolver.
488 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800489 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800490 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100491 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800492 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800493 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100494 DataType::Type::kReference);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000495 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000496 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800497 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100498 DataType::Type ret_type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000499 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
500 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
501 } else {
502 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800503 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
504 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000505 }
506
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000507 if (!is_fatal_) {
508 RestoreLiveRegisters(codegen, locations);
509 __ B(GetExitLabel());
510 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000511 }
512
Alexandre Rames9931f312015-06-19 14:47:01 +0100513 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100514 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100515
Alexandre Rames67555f72014-11-18 10:55:16 +0000516 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000517 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000518
Alexandre Rames67555f72014-11-18 10:55:16 +0000519 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
520};
521
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700522class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
523 public:
Aart Bik42249c32016-01-07 15:33:50 -0800524 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000525 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700526
527 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800528 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700529 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100530 LocationSummary* locations = instruction_->GetLocations();
531 SaveLiveRegisters(codegen, locations);
532 InvokeRuntimeCallingConvention calling_convention;
533 __ Mov(calling_convention.GetRegisterAt(0),
534 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000535 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100536 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700537 }
538
Alexandre Rames9931f312015-06-19 14:47:01 +0100539 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
540
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700541 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700542 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
543};
544
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100545class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
546 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000547 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100548
549 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
550 LocationSummary* locations = instruction_->GetLocations();
551 __ Bind(GetEntryLabel());
552 SaveLiveRegisters(codegen, locations);
553
554 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100555 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100556 parallel_move.AddMove(
557 locations->InAt(0),
558 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100559 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100560 nullptr);
561 parallel_move.AddMove(
562 locations->InAt(1),
563 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100564 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100565 nullptr);
566 parallel_move.AddMove(
567 locations->InAt(2),
568 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100569 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100570 nullptr);
571 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
572
573 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000574 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100575 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
576 RestoreLiveRegisters(codegen, locations);
577 __ B(GetExitLabel());
578 }
579
580 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
581
582 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100583 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
584};
585
Zheng Xu3927c8b2015-11-18 17:46:25 +0800586void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
587 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000588 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800589
590 // We are about to use the assembler to place literals directly. Make sure we have enough
591 // underlying code buffer and we have generated the jump table with right size.
Artem Serov914d7a82017-02-07 14:33:49 +0000592 EmissionCheckScope scope(codegen->GetVIXLAssembler(),
593 num_entries * sizeof(int32_t),
594 CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800595
596 __ Bind(&table_start_);
597 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
598 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100599 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800600 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100601 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800602 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
603 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
604 Literal<int32_t> literal(jump_offset);
605 __ place(&literal);
606 }
607}
608
Roland Levillain54f869e2017-03-06 13:54:11 +0000609// Abstract base class for read barrier slow paths marking a reference
610// `ref`.
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000611//
Roland Levillain54f869e2017-03-06 13:54:11 +0000612// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100613// barrier marking runtime entry point to be invoked or an empty
614// location; in the latter case, the read barrier marking runtime
615// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000616class ReadBarrierMarkSlowPathBaseARM64 : public SlowPathCodeARM64 {
617 protected:
618 ReadBarrierMarkSlowPathBaseARM64(HInstruction* instruction, Location ref, Location entrypoint)
619 : SlowPathCodeARM64(instruction), ref_(ref), entrypoint_(entrypoint) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000620 DCHECK(kEmitCompilerReadBarrier);
621 }
622
Roland Levillain54f869e2017-03-06 13:54:11 +0000623 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathBaseARM64"; }
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000624
Roland Levillain54f869e2017-03-06 13:54:11 +0000625 // Generate assembly code calling the read barrier marking runtime
626 // entry point (ReadBarrierMarkRegX).
627 void GenerateReadBarrierMarkRuntimeCall(CodeGenerator* codegen) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000628 // No need to save live registers; it's taken care of by the
629 // entrypoint. Also, there is no need to update the stack mask,
630 // as this runtime call will not trigger a garbage collection.
631 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
632 DCHECK_NE(ref_.reg(), LR);
633 DCHECK_NE(ref_.reg(), WSP);
634 DCHECK_NE(ref_.reg(), WZR);
635 // IP0 is used internally by the ReadBarrierMarkRegX entry point
636 // as a temporary, it cannot be the entry point's input/output.
637 DCHECK_NE(ref_.reg(), IP0);
638 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
639 // "Compact" slow path, saving two moves.
640 //
641 // Instead of using the standard runtime calling convention (input
642 // and output in W0):
643 //
644 // W0 <- ref
645 // W0 <- ReadBarrierMark(W0)
646 // ref <- W0
647 //
648 // we just use rX (the register containing `ref`) as input and output
649 // of a dedicated entrypoint:
650 //
651 // rX <- ReadBarrierMarkRegX(rX)
652 //
653 if (entrypoint_.IsValid()) {
654 arm64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
655 __ Blr(XRegisterFrom(entrypoint_));
656 } else {
657 // Entrypoint is not already loaded, load from the thread.
658 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100659 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000660 // This runtime call does not require a stack map.
661 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
662 }
663 }
664
665 // The location (register) of the marked object reference.
666 const Location ref_;
667
668 // The location of the entrypoint if it is already loaded.
669 const Location entrypoint_;
670
Roland Levillain54f869e2017-03-06 13:54:11 +0000671 private:
672 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathBaseARM64);
673};
674
Alexandre Rames5319def2014-10-23 10:03:10 +0100675// Slow path marking an object reference `ref` during a read
676// barrier. The field `obj.field` in the object `obj` holding this
Roland Levillain54f869e2017-03-06 13:54:11 +0000677// reference does not get updated by this slow path after marking.
Alexandre Rames5319def2014-10-23 10:03:10 +0100678//
679// This means that after the execution of this slow path, `ref` will
680// always be up-to-date, but `obj.field` may not; i.e., after the
681// flip, `ref` will be a to-space reference, but `obj.field` will
682// probably still be a from-space reference (unless it gets updated by
683// another thread, or if another thread installed another object
684// reference (different from `ref`) in `obj.field`).
685//
Roland Levillain97c46462017-05-11 14:04:03 +0100686// Argument `entrypoint` must be a register location holding the read
687// barrier marking runtime entry point to be invoked or an empty
688// location; in the latter case, the read barrier marking runtime
689// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000690class ReadBarrierMarkSlowPathARM64 : public ReadBarrierMarkSlowPathBaseARM64 {
Alexandre Rames5319def2014-10-23 10:03:10 +0100691 public:
692 ReadBarrierMarkSlowPathARM64(HInstruction* instruction,
693 Location ref,
694 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000695 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100696 DCHECK(kEmitCompilerReadBarrier);
Alexandre Rames5319def2014-10-23 10:03:10 +0100697 }
698
699 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
700
701 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames542361f2015-01-29 16:57:31 +0000702 LocationSummary* locations = instruction_->GetLocations();
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100703 DCHECK(locations->CanCall());
704 DCHECK(ref_.IsRegister()) << ref_;
Alexandre Rames542361f2015-01-29 16:57:31 +0000705 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain54f869e2017-03-06 13:54:11 +0000706 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
707 << "Unexpected instruction in read barrier marking slow path: "
708 << instruction_->DebugName();
709
710 __ Bind(GetEntryLabel());
711 GenerateReadBarrierMarkRuntimeCall(codegen);
712 __ B(GetExitLabel());
713 }
714
715 private:
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000716 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
717};
718
Roland Levillain54f869e2017-03-06 13:54:11 +0000719// Slow path loading `obj`'s lock word, loading a reference from
720// object `*(obj + offset + (index << scale_factor))` into `ref`, and
721// marking `ref` if `obj` is gray according to the lock word (Baker
722// read barrier). The field `obj.field` in the object `obj` holding
723// this reference does not get updated by this slow path after marking
724// (see LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64
725// below for that).
726//
727// This means that after the execution of this slow path, `ref` will
728// always be up-to-date, but `obj.field` may not; i.e., after the
729// flip, `ref` will be a to-space reference, but `obj.field` will
730// probably still be a from-space reference (unless it gets updated by
731// another thread, or if another thread installed another object
732// reference (different from `ref`) in `obj.field`).
733//
734// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100735// barrier marking runtime entry point to be invoked or an empty
736// location; in the latter case, the read barrier marking runtime
737// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000738class LoadReferenceWithBakerReadBarrierSlowPathARM64 : public ReadBarrierMarkSlowPathBaseARM64 {
739 public:
740 LoadReferenceWithBakerReadBarrierSlowPathARM64(HInstruction* instruction,
741 Location ref,
742 Register obj,
743 uint32_t offset,
744 Location index,
745 size_t scale_factor,
746 bool needs_null_check,
747 bool use_load_acquire,
748 Register temp,
Roland Levillain97c46462017-05-11 14:04:03 +0100749 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000750 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint),
751 obj_(obj),
752 offset_(offset),
753 index_(index),
754 scale_factor_(scale_factor),
755 needs_null_check_(needs_null_check),
756 use_load_acquire_(use_load_acquire),
757 temp_(temp) {
758 DCHECK(kEmitCompilerReadBarrier);
759 DCHECK(kUseBakerReadBarrier);
760 }
761
762 const char* GetDescription() const OVERRIDE {
763 return "LoadReferenceWithBakerReadBarrierSlowPathARM64";
764 }
765
766 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
767 LocationSummary* locations = instruction_->GetLocations();
768 DCHECK(locations->CanCall());
769 DCHECK(ref_.IsRegister()) << ref_;
770 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
771 DCHECK(obj_.IsW());
772 DCHECK_NE(ref_.reg(), LocationFrom(temp_).reg());
Alexandre Rames5319def2014-10-23 10:03:10 +0100773 DCHECK(instruction_->IsInstanceFieldGet() ||
774 instruction_->IsStaticFieldGet() ||
775 instruction_->IsArrayGet() ||
776 instruction_->IsArraySet() ||
Alexandre Rames5319def2014-10-23 10:03:10 +0100777 instruction_->IsInstanceOf() ||
778 instruction_->IsCheckCast() ||
779 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
780 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
781 << "Unexpected instruction in read barrier marking slow path: "
782 << instruction_->DebugName();
783 // The read barrier instrumentation of object ArrayGet
784 // instructions does not support the HIntermediateAddress
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000785 // instruction.
786 DCHECK(!(instruction_->IsArrayGet() &&
Alexandre Rames542361f2015-01-29 16:57:31 +0000787 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
788
Roland Levillain54f869e2017-03-06 13:54:11 +0000789 // Temporary register `temp_`, used to store the lock word, must
790 // not be IP0 nor IP1, as we may use them to emit the reference
791 // load (in the call to GenerateRawReferenceLoad below), and we
792 // need the lock word to still be in `temp_` after the reference
793 // load.
794 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
795 DCHECK_NE(LocationFrom(temp_).reg(), IP1);
796
Alexandre Rames5319def2014-10-23 10:03:10 +0100797 __ Bind(GetEntryLabel());
Roland Levillain54f869e2017-03-06 13:54:11 +0000798
799 // When using MaybeGenerateReadBarrierSlow, the read barrier call is
800 // inserted after the original load. However, in fast path based
801 // Baker's read barriers, we need to perform the load of
802 // mirror::Object::monitor_ *before* the original reference load.
803 // This load-load ordering is required by the read barrier.
Roland Levillainff487002017-03-07 16:50:01 +0000804 // The slow path (for Baker's algorithm) should look like:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100805 //
Roland Levillain54f869e2017-03-06 13:54:11 +0000806 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
807 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
808 // HeapReference<mirror::Object> ref = *src; // Original reference load.
809 // bool is_gray = (rb_state == ReadBarrier::GrayState());
810 // if (is_gray) {
811 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
812 // }
Roland Levillaind966ce72017-02-09 16:20:14 +0000813 //
Roland Levillain54f869e2017-03-06 13:54:11 +0000814 // Note: the original implementation in ReadBarrier::Barrier is
815 // slightly more complex as it performs additional checks that we do
816 // not do here for performance reasons.
817
818 // /* int32_t */ monitor = obj->monitor_
819 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
820 __ Ldr(temp_, HeapOperand(obj_, monitor_offset));
821 if (needs_null_check_) {
822 codegen->MaybeRecordImplicitNullCheck(instruction_);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100823 }
Roland Levillain54f869e2017-03-06 13:54:11 +0000824 // /* LockWord */ lock_word = LockWord(monitor)
825 static_assert(sizeof(LockWord) == sizeof(int32_t),
826 "art::LockWord and int32_t have different sizes.");
827
828 // Introduce a dependency on the lock_word including rb_state,
829 // to prevent load-load reordering, and without using
830 // a memory barrier (which would be more expensive).
831 // `obj` is unchanged by this operation, but its value now depends
832 // on `temp`.
833 __ Add(obj_.X(), obj_.X(), Operand(temp_.X(), LSR, 32));
834
835 // The actual reference load.
836 // A possible implicit null check has already been handled above.
837 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
838 arm64_codegen->GenerateRawReferenceLoad(instruction_,
839 ref_,
840 obj_,
841 offset_,
842 index_,
843 scale_factor_,
844 /* needs_null_check */ false,
845 use_load_acquire_);
846
847 // Mark the object `ref` when `obj` is gray.
848 //
849 // if (rb_state == ReadBarrier::GrayState())
850 // ref = ReadBarrier::Mark(ref);
851 //
852 // Given the numeric representation, it's enough to check the low bit of the rb_state.
853 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
854 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
855 __ Tbz(temp_, LockWord::kReadBarrierStateShift, GetExitLabel());
856 GenerateReadBarrierMarkRuntimeCall(codegen);
857
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000858 __ B(GetExitLabel());
859 }
860
861 private:
Roland Levillain54f869e2017-03-06 13:54:11 +0000862 // The register containing the object holding the marked object reference field.
863 Register obj_;
864 // The offset, index and scale factor to access the reference in `obj_`.
865 uint32_t offset_;
866 Location index_;
867 size_t scale_factor_;
868 // Is a null check required?
869 bool needs_null_check_;
870 // Should this reference load use Load-Acquire semantics?
871 bool use_load_acquire_;
872 // A temporary register used to hold the lock word of `obj_`.
873 Register temp_;
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000874
Roland Levillain54f869e2017-03-06 13:54:11 +0000875 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierSlowPathARM64);
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000876};
877
Roland Levillain54f869e2017-03-06 13:54:11 +0000878// Slow path loading `obj`'s lock word, loading a reference from
879// object `*(obj + offset + (index << scale_factor))` into `ref`, and
880// marking `ref` if `obj` is gray according to the lock word (Baker
881// read barrier). If needed, this slow path also atomically updates
882// the field `obj.field` in the object `obj` holding this reference
883// after marking (contrary to
884// LoadReferenceWithBakerReadBarrierSlowPathARM64 above, which never
885// tries to update `obj.field`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100886//
887// This means that after the execution of this slow path, both `ref`
888// and `obj.field` will be up-to-date; i.e., after the flip, both will
889// hold the same to-space reference (unless another thread installed
890// another object reference (different from `ref`) in `obj.field`).
Roland Levillainba650a42017-03-06 13:52:32 +0000891//
Roland Levillain54f869e2017-03-06 13:54:11 +0000892// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100893// barrier marking runtime entry point to be invoked or an empty
894// location; in the latter case, the read barrier marking runtime
895// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000896class LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64
897 : public ReadBarrierMarkSlowPathBaseARM64 {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100898 public:
Roland Levillain97c46462017-05-11 14:04:03 +0100899 LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64(
900 HInstruction* instruction,
901 Location ref,
902 Register obj,
903 uint32_t offset,
904 Location index,
905 size_t scale_factor,
906 bool needs_null_check,
907 bool use_load_acquire,
908 Register temp,
909 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000910 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint),
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100911 obj_(obj),
Roland Levillain54f869e2017-03-06 13:54:11 +0000912 offset_(offset),
913 index_(index),
914 scale_factor_(scale_factor),
915 needs_null_check_(needs_null_check),
916 use_load_acquire_(use_load_acquire),
Roland Levillain35345a52017-02-27 14:32:08 +0000917 temp_(temp) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100918 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain54f869e2017-03-06 13:54:11 +0000919 DCHECK(kUseBakerReadBarrier);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100920 }
921
922 const char* GetDescription() const OVERRIDE {
Roland Levillain54f869e2017-03-06 13:54:11 +0000923 return "LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64";
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100924 }
925
926 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
927 LocationSummary* locations = instruction_->GetLocations();
928 Register ref_reg = WRegisterFrom(ref_);
929 DCHECK(locations->CanCall());
930 DCHECK(ref_.IsRegister()) << ref_;
931 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain54f869e2017-03-06 13:54:11 +0000932 DCHECK(obj_.IsW());
933 DCHECK_NE(ref_.reg(), LocationFrom(temp_).reg());
934
935 // This slow path is only used by the UnsafeCASObject intrinsic at the moment.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100936 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
937 << "Unexpected instruction in read barrier marking and field updating slow path: "
938 << instruction_->DebugName();
939 DCHECK(instruction_->GetLocations()->Intrinsified());
940 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
Roland Levillain54f869e2017-03-06 13:54:11 +0000941 DCHECK_EQ(offset_, 0u);
942 DCHECK_EQ(scale_factor_, 0u);
943 DCHECK_EQ(use_load_acquire_, false);
944 // The location of the offset of the marked reference field within `obj_`.
945 Location field_offset = index_;
946 DCHECK(field_offset.IsRegister()) << field_offset;
947
948 // Temporary register `temp_`, used to store the lock word, must
949 // not be IP0 nor IP1, as we may use them to emit the reference
950 // load (in the call to GenerateRawReferenceLoad below), and we
951 // need the lock word to still be in `temp_` after the reference
952 // load.
953 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
954 DCHECK_NE(LocationFrom(temp_).reg(), IP1);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100955
956 __ Bind(GetEntryLabel());
957
Roland Levillainff487002017-03-07 16:50:01 +0000958 // The implementation is similar to LoadReferenceWithBakerReadBarrierSlowPathARM64's:
959 //
960 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
961 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
962 // HeapReference<mirror::Object> ref = *src; // Original reference load.
963 // bool is_gray = (rb_state == ReadBarrier::GrayState());
964 // if (is_gray) {
965 // old_ref = ref;
966 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
967 // compareAndSwapObject(obj, field_offset, old_ref, ref);
968 // }
969
Roland Levillain54f869e2017-03-06 13:54:11 +0000970 // /* int32_t */ monitor = obj->monitor_
971 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
972 __ Ldr(temp_, HeapOperand(obj_, monitor_offset));
973 if (needs_null_check_) {
974 codegen->MaybeRecordImplicitNullCheck(instruction_);
975 }
976 // /* LockWord */ lock_word = LockWord(monitor)
977 static_assert(sizeof(LockWord) == sizeof(int32_t),
978 "art::LockWord and int32_t have different sizes.");
979
980 // Introduce a dependency on the lock_word including rb_state,
981 // to prevent load-load reordering, and without using
982 // a memory barrier (which would be more expensive).
983 // `obj` is unchanged by this operation, but its value now depends
984 // on `temp`.
985 __ Add(obj_.X(), obj_.X(), Operand(temp_.X(), LSR, 32));
986
987 // The actual reference load.
988 // A possible implicit null check has already been handled above.
989 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
990 arm64_codegen->GenerateRawReferenceLoad(instruction_,
991 ref_,
992 obj_,
993 offset_,
994 index_,
995 scale_factor_,
996 /* needs_null_check */ false,
997 use_load_acquire_);
998
999 // Mark the object `ref` when `obj` is gray.
1000 //
1001 // if (rb_state == ReadBarrier::GrayState())
1002 // ref = ReadBarrier::Mark(ref);
1003 //
1004 // Given the numeric representation, it's enough to check the low bit of the rb_state.
1005 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
1006 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
1007 __ Tbz(temp_, LockWord::kReadBarrierStateShift, GetExitLabel());
1008
1009 // Save the old value of the reference before marking it.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001010 // Note that we cannot use IP to save the old reference, as IP is
1011 // used internally by the ReadBarrierMarkRegX entry point, and we
1012 // need the old reference after the call to that entry point.
1013 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
1014 __ Mov(temp_.W(), ref_reg);
1015
Roland Levillain54f869e2017-03-06 13:54:11 +00001016 GenerateReadBarrierMarkRuntimeCall(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001017
1018 // If the new reference is different from the old reference,
Roland Levillain54f869e2017-03-06 13:54:11 +00001019 // update the field in the holder (`*(obj_ + field_offset)`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001020 //
1021 // Note that this field could also hold a different object, if
1022 // another thread had concurrently changed it. In that case, the
1023 // LDXR/CMP/BNE sequence of instructions in the compare-and-set
1024 // (CAS) operation below would abort the CAS, leaving the field
1025 // as-is.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001026 __ Cmp(temp_.W(), ref_reg);
Roland Levillain54f869e2017-03-06 13:54:11 +00001027 __ B(eq, GetExitLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001028
1029 // Update the the holder's field atomically. This may fail if
1030 // mutator updates before us, but it's OK. This is achieved
1031 // using a strong compare-and-set (CAS) operation with relaxed
1032 // memory synchronization ordering, where the expected value is
1033 // the old reference and the desired value is the new reference.
1034
1035 MacroAssembler* masm = arm64_codegen->GetVIXLAssembler();
1036 UseScratchRegisterScope temps(masm);
1037
1038 // Convenience aliases.
1039 Register base = obj_.W();
Roland Levillain54f869e2017-03-06 13:54:11 +00001040 Register offset = XRegisterFrom(field_offset);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001041 Register expected = temp_.W();
1042 Register value = ref_reg;
1043 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1044 Register tmp_value = temps.AcquireW(); // Value in memory.
1045
1046 __ Add(tmp_ptr, base.X(), Operand(offset));
1047
1048 if (kPoisonHeapReferences) {
1049 arm64_codegen->GetAssembler()->PoisonHeapReference(expected);
1050 if (value.Is(expected)) {
1051 // Do not poison `value`, as it is the same register as
1052 // `expected`, which has just been poisoned.
1053 } else {
1054 arm64_codegen->GetAssembler()->PoisonHeapReference(value);
1055 }
1056 }
1057
1058 // do {
1059 // tmp_value = [tmp_ptr] - expected;
1060 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1061
Roland Levillain24a4d112016-10-26 13:10:46 +01001062 vixl::aarch64::Label loop_head, comparison_failed, exit_loop;
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001063 __ Bind(&loop_head);
1064 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
1065 __ Cmp(tmp_value, expected);
Roland Levillain24a4d112016-10-26 13:10:46 +01001066 __ B(&comparison_failed, ne);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001067 __ Stxr(tmp_value, value, MemOperand(tmp_ptr));
1068 __ Cbnz(tmp_value, &loop_head);
Roland Levillain24a4d112016-10-26 13:10:46 +01001069 __ B(&exit_loop);
1070 __ Bind(&comparison_failed);
1071 __ Clrex();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001072 __ Bind(&exit_loop);
1073
1074 if (kPoisonHeapReferences) {
1075 arm64_codegen->GetAssembler()->UnpoisonHeapReference(expected);
1076 if (value.Is(expected)) {
1077 // Do not unpoison `value`, as it is the same register as
1078 // `expected`, which has just been unpoisoned.
1079 } else {
1080 arm64_codegen->GetAssembler()->UnpoisonHeapReference(value);
1081 }
1082 }
1083
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001084 __ B(GetExitLabel());
1085 }
1086
1087 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001088 // The register containing the object holding the marked object reference field.
1089 const Register obj_;
Roland Levillain54f869e2017-03-06 13:54:11 +00001090 // The offset, index and scale factor to access the reference in `obj_`.
1091 uint32_t offset_;
1092 Location index_;
1093 size_t scale_factor_;
1094 // Is a null check required?
1095 bool needs_null_check_;
1096 // Should this reference load use Load-Acquire semantics?
1097 bool use_load_acquire_;
1098 // A temporary register used to hold the lock word of `obj_`; and
1099 // also to hold the original reference value, when the reference is
1100 // marked.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001101 const Register temp_;
1102
Roland Levillain54f869e2017-03-06 13:54:11 +00001103 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001104};
1105
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001106// Slow path generating a read barrier for a heap reference.
1107class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
1108 public:
1109 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
1110 Location out,
1111 Location ref,
1112 Location obj,
1113 uint32_t offset,
1114 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +00001115 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001116 out_(out),
1117 ref_(ref),
1118 obj_(obj),
1119 offset_(offset),
1120 index_(index) {
1121 DCHECK(kEmitCompilerReadBarrier);
1122 // If `obj` is equal to `out` or `ref`, it means the initial object
1123 // has been overwritten by (or after) the heap object reference load
1124 // to be instrumented, e.g.:
1125 //
1126 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +00001127 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001128 //
1129 // In that case, we have lost the information about the original
1130 // object, and the emitted read barrier cannot work properly.
1131 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
1132 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
1133 }
1134
1135 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1136 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1137 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001138 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001139 DCHECK(locations->CanCall());
1140 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +01001141 DCHECK(instruction_->IsInstanceFieldGet() ||
1142 instruction_->IsStaticFieldGet() ||
1143 instruction_->IsArrayGet() ||
1144 instruction_->IsInstanceOf() ||
1145 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -07001146 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +00001147 << "Unexpected instruction in read barrier for heap reference slow path: "
1148 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +00001149 // The read barrier instrumentation of object ArrayGet
1150 // instructions does not support the HIntermediateAddress
1151 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +00001152 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +01001153 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001154
1155 __ Bind(GetEntryLabel());
1156
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001157 SaveLiveRegisters(codegen, locations);
1158
1159 // We may have to change the index's value, but as `index_` is a
1160 // constant member (like other "inputs" of this slow path),
1161 // introduce a copy of it, `index`.
1162 Location index = index_;
1163 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +01001164 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001165 if (instruction_->IsArrayGet()) {
1166 // Compute the actual memory offset and store it in `index`.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001167 Register index_reg = RegisterFrom(index_, DataType::Type::kInt32);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001168 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
1169 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
1170 // We are about to change the value of `index_reg` (see the
1171 // calls to vixl::MacroAssembler::Lsl and
1172 // vixl::MacroAssembler::Mov below), but it has
1173 // not been saved by the previous call to
1174 // art::SlowPathCode::SaveLiveRegisters, as it is a
1175 // callee-save register --
1176 // art::SlowPathCode::SaveLiveRegisters does not consider
1177 // callee-save registers, as it has been designed with the
1178 // assumption that callee-save registers are supposed to be
1179 // handled by the called function. So, as a callee-save
1180 // register, `index_reg` _would_ eventually be saved onto
1181 // the stack, but it would be too late: we would have
1182 // changed its value earlier. Therefore, we manually save
1183 // it here into another freely available register,
1184 // `free_reg`, chosen of course among the caller-save
1185 // registers (as a callee-save `free_reg` register would
1186 // exhibit the same problem).
1187 //
1188 // Note we could have requested a temporary register from
1189 // the register allocator instead; but we prefer not to, as
1190 // this is a slow path, and we know we can find a
1191 // caller-save register that is available.
1192 Register free_reg = FindAvailableCallerSaveRegister(codegen);
1193 __ Mov(free_reg.W(), index_reg);
1194 index_reg = free_reg;
1195 index = LocationFrom(index_reg);
1196 } else {
1197 // The initial register stored in `index_` has already been
1198 // saved in the call to art::SlowPathCode::SaveLiveRegisters
1199 // (as it is not a callee-save register), so we can freely
1200 // use it.
1201 }
1202 // Shifting the index value contained in `index_reg` by the scale
1203 // factor (2) cannot overflow in practice, as the runtime is
1204 // unable to allocate object arrays with a size larger than
1205 // 2^26 - 1 (that is, 2^28 - 4 bytes).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001206 __ Lsl(index_reg, index_reg, DataType::SizeShift(type));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001207 static_assert(
1208 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
1209 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
1210 __ Add(index_reg, index_reg, Operand(offset_));
1211 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01001212 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
1213 // intrinsics, `index_` is not shifted by a scale factor of 2
1214 // (as in the case of ArrayGet), as it is actually an offset
1215 // to an object field within an object.
1216 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001217 DCHECK(instruction_->GetLocations()->Intrinsified());
1218 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
1219 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
1220 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001221 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +01001222 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001223 }
1224 }
1225
1226 // We're moving two or three locations to locations that could
1227 // overlap, so we need a parallel move resolver.
1228 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +01001229 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001230 parallel_move.AddMove(ref_,
1231 LocationFrom(calling_convention.GetRegisterAt(0)),
1232 type,
1233 nullptr);
1234 parallel_move.AddMove(obj_,
1235 LocationFrom(calling_convention.GetRegisterAt(1)),
1236 type,
1237 nullptr);
1238 if (index.IsValid()) {
1239 parallel_move.AddMove(index,
1240 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001241 DataType::Type::kInt32,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001242 nullptr);
1243 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1244 } else {
1245 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1246 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
1247 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001248 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001249 instruction_,
1250 instruction_->GetDexPc(),
1251 this);
1252 CheckEntrypointTypes<
1253 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
1254 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1255
1256 RestoreLiveRegisters(codegen, locations);
1257
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001258 __ B(GetExitLabel());
1259 }
1260
1261 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
1262
1263 private:
1264 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001265 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
1266 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001267 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1268 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
1269 return Register(VIXLRegCodeFromART(i), kXRegSize);
1270 }
1271 }
1272 // We shall never fail to find a free caller-save register, as
1273 // there are more than two core caller-save registers on ARM64
1274 // (meaning it is possible to find one which is different from
1275 // `ref` and `obj`).
1276 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1277 LOG(FATAL) << "Could not find a free register";
1278 UNREACHABLE();
1279 }
1280
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001281 const Location out_;
1282 const Location ref_;
1283 const Location obj_;
1284 const uint32_t offset_;
1285 // An additional location containing an index to an array.
1286 // Only used for HArrayGet and the UnsafeGetObject &
1287 // UnsafeGetObjectVolatile intrinsics.
1288 const Location index_;
1289
1290 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
1291};
1292
1293// Slow path generating a read barrier for a GC root.
1294class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
1295 public:
1296 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +00001297 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +00001298 DCHECK(kEmitCompilerReadBarrier);
1299 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001300
1301 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1302 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001303 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001304 DCHECK(locations->CanCall());
1305 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +00001306 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1307 << "Unexpected instruction in read barrier for GC root slow path: "
1308 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001309
1310 __ Bind(GetEntryLabel());
1311 SaveLiveRegisters(codegen, locations);
1312
1313 InvokeRuntimeCallingConvention calling_convention;
1314 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1315 // The argument of the ReadBarrierForRootSlow is not a managed
1316 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
1317 // thus we need a 64-bit move here, and we cannot use
1318 //
1319 // arm64_codegen->MoveLocation(
1320 // LocationFrom(calling_convention.GetRegisterAt(0)),
1321 // root_,
1322 // type);
1323 //
1324 // which would emit a 32-bit move, as `type` is a (32-bit wide)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001325 // reference type (`DataType::Type::kReference`).
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001326 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001327 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001328 instruction_,
1329 instruction_->GetDexPc(),
1330 this);
1331 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1332 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1333
1334 RestoreLiveRegisters(codegen, locations);
1335 __ B(GetExitLabel());
1336 }
1337
1338 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
1339
1340 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001341 const Location out_;
1342 const Location root_;
1343
1344 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
1345};
1346
Alexandre Rames5319def2014-10-23 10:03:10 +01001347#undef __
1348
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001349Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(DataType::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001350 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001351 if (type == DataType::Type::kVoid) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001352 LOG(FATAL) << "Unreachable type " << type;
1353 }
1354
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001355 if (DataType::IsFloatingPointType(type) &&
Alexandre Rames5319def2014-10-23 10:03:10 +01001356 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001357 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001358 } else if (!DataType::IsFloatingPointType(type) &&
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001359 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
1360 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
1361 } else {
1362 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001363 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
1364 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +01001365 }
1366
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001367 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001368 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +01001369 return next_location;
1370}
1371
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001372Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +01001373 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001374}
1375
Serban Constantinescu579885a2015-02-22 20:51:33 +00001376CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001377 const CompilerOptions& compiler_options,
1378 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +01001379 : CodeGenerator(graph,
1380 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001381 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +00001382 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001383 callee_saved_core_registers.GetList(),
1384 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001385 compiler_options,
1386 stats),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001387 block_labels_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1388 jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +01001389 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +00001390 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001391 move_resolver_(graph->GetAllocator(), this),
1392 assembler_(graph->GetAllocator()),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001393 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001394 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +01001395 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001396 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001397 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001398 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001399 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001400 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001401 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001402 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6fd16062018-06-26 11:02:04 +01001403 boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001404 baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001405 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001406 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001407 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001408 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001409 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001410 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001411}
Alexandre Rames5319def2014-10-23 10:03:10 +01001412
Alexandre Rames67555f72014-11-18 10:55:16 +00001413#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +01001414
Zheng Xu3927c8b2015-11-18 17:46:25 +08001415void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001416 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001417 jump_table->EmitTable(this);
1418 }
1419}
1420
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001421void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001422 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001423 // Ensure we emit the literal pool.
1424 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +00001425
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001426 CodeGenerator::Finalize(allocator);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001427
1428 // Verify Baker read barrier linker patches.
1429 if (kIsDebugBuild) {
1430 ArrayRef<const uint8_t> code = allocator->GetMemory();
1431 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
1432 DCHECK(info.label.IsBound());
1433 uint32_t literal_offset = info.label.GetLocation();
1434 DCHECK_ALIGNED(literal_offset, 4u);
1435
1436 auto GetInsn = [&code](uint32_t offset) {
1437 DCHECK_ALIGNED(offset, 4u);
1438 return
1439 (static_cast<uint32_t>(code[offset + 0]) << 0) +
1440 (static_cast<uint32_t>(code[offset + 1]) << 8) +
1441 (static_cast<uint32_t>(code[offset + 2]) << 16)+
1442 (static_cast<uint32_t>(code[offset + 3]) << 24);
1443 };
1444
1445 const uint32_t encoded_data = info.custom_data;
1446 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
1447 // Check that the next instruction matches the expected LDR.
1448 switch (kind) {
1449 case BakerReadBarrierKind::kField: {
1450 DCHECK_GE(code.size() - literal_offset, 8u);
1451 uint32_t next_insn = GetInsn(literal_offset + 4u);
1452 // LDR (immediate) with correct base_reg.
1453 CheckValidReg(next_insn & 0x1fu); // Check destination register.
1454 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1455 CHECK_EQ(next_insn & 0xffc003e0u, 0xb9400000u | (base_reg << 5));
1456 break;
1457 }
1458 case BakerReadBarrierKind::kArray: {
1459 DCHECK_GE(code.size() - literal_offset, 8u);
1460 uint32_t next_insn = GetInsn(literal_offset + 4u);
1461 // LDR (register) with the correct base_reg, size=10 (32-bit), option=011 (extend = LSL),
1462 // and S=1 (shift amount = 2 for 32-bit version), i.e. LDR Wt, [Xn, Xm, LSL #2].
1463 CheckValidReg(next_insn & 0x1fu); // Check destination register.
1464 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1465 CHECK_EQ(next_insn & 0xffe0ffe0u, 0xb8607800u | (base_reg << 5));
1466 CheckValidReg((next_insn >> 16) & 0x1f); // Check index register
1467 break;
1468 }
1469 case BakerReadBarrierKind::kGcRoot: {
1470 DCHECK_GE(literal_offset, 4u);
1471 uint32_t prev_insn = GetInsn(literal_offset - 4u);
1472 // LDR (immediate) with correct root_reg.
1473 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1474 CHECK_EQ(prev_insn & 0xffc0001fu, 0xb9400000u | root_reg);
1475 break;
1476 }
1477 default:
1478 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
1479 UNREACHABLE();
1480 }
1481 }
1482 }
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001483}
1484
Zheng Xuad4450e2015-04-17 18:48:56 +08001485void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1486 // Note: There are 6 kinds of moves:
1487 // 1. constant -> GPR/FPR (non-cycle)
1488 // 2. constant -> stack (non-cycle)
1489 // 3. GPR/FPR -> GPR/FPR
1490 // 4. GPR/FPR -> stack
1491 // 5. stack -> GPR/FPR
1492 // 6. stack -> stack (non-cycle)
1493 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1494 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1495 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1496 // dependency.
1497 vixl_temps_.Open(GetVIXLAssembler());
1498}
1499
1500void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1501 vixl_temps_.Close();
1502}
1503
1504Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
Artem Serovd4bccf12017-04-03 18:47:32 +01001505 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister
1506 || kind == Location::kStackSlot || kind == Location::kDoubleStackSlot
1507 || kind == Location::kSIMDStackSlot);
1508 kind = (kind == Location::kFpuRegister || kind == Location::kSIMDStackSlot)
1509 ? Location::kFpuRegister
1510 : Location::kRegister;
Zheng Xuad4450e2015-04-17 18:48:56 +08001511 Location scratch = GetScratchLocation(kind);
1512 if (!scratch.Equals(Location::NoLocation())) {
1513 return scratch;
1514 }
1515 // Allocate from VIXL temp registers.
1516 if (kind == Location::kRegister) {
1517 scratch = LocationFrom(vixl_temps_.AcquireX());
1518 } else {
Roland Levillain952b2352017-05-03 19:49:14 +01001519 DCHECK_EQ(kind, Location::kFpuRegister);
Artem Serovd4bccf12017-04-03 18:47:32 +01001520 scratch = LocationFrom(codegen_->GetGraph()->HasSIMD()
1521 ? vixl_temps_.AcquireVRegisterOfSize(kQRegSize)
1522 : vixl_temps_.AcquireD());
Zheng Xuad4450e2015-04-17 18:48:56 +08001523 }
1524 AddScratchLocation(scratch);
1525 return scratch;
1526}
1527
1528void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1529 if (loc.IsRegister()) {
1530 vixl_temps_.Release(XRegisterFrom(loc));
1531 } else {
1532 DCHECK(loc.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001533 vixl_temps_.Release(codegen_->GetGraph()->HasSIMD() ? QRegisterFrom(loc) : DRegisterFrom(loc));
Zheng Xuad4450e2015-04-17 18:48:56 +08001534 }
1535 RemoveScratchLocation(loc);
1536}
1537
Alexandre Rames3e69f162014-12-10 10:36:50 +00001538void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001539 MoveOperands* move = moves_[index];
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001540 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001541}
1542
Alexandre Rames5319def2014-10-23 10:03:10 +01001543void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001544 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001545 __ Bind(&frame_entry_label_);
1546
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001547 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1548 UseScratchRegisterScope temps(masm);
1549 Register temp = temps.AcquireX();
1550 __ Ldrh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1551 __ Add(temp, temp, 1);
1552 __ Strh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1553 }
1554
Vladimir Marko33bff252017-11-01 14:35:42 +00001555 bool do_overflow_check =
1556 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm64) || !IsLeafMethod();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001557 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001558 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001559 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001560 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Vladimir Marko33bff252017-11-01 14:35:42 +00001561 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kArm64)));
Artem Serov914d7a82017-02-07 14:33:49 +00001562 {
1563 // Ensure that between load and RecordPcInfo there are no pools emitted.
1564 ExactAssemblyScope eas(GetVIXLAssembler(),
1565 kInstructionSize,
1566 CodeBufferCheckScope::kExactSize);
1567 __ ldr(wzr, MemOperand(temp, 0));
1568 RecordPcInfo(nullptr, 0);
1569 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001570 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001571
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001572 if (!HasEmptyFrame()) {
1573 int frame_size = GetFrameSize();
1574 // Stack layout:
1575 // sp[frame_size - 8] : lr.
1576 // ... : other preserved core registers.
1577 // ... : other preserved fp registers.
1578 // ... : reserved frame space.
1579 // sp[0] : current method.
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001580
1581 // Save the current method if we need it. Note that we do not
1582 // do this in HCurrentMethod, as the instruction might have been removed
1583 // in the SSA graph.
1584 if (RequiresCurrentMethod()) {
1585 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001586 } else {
1587 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001588 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001589 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001590 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1591 frame_size - GetCoreSpillSize());
1592 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1593 frame_size - FrameEntrySpillSize());
Mingyao Yang063fc772016-08-02 11:02:54 -07001594
1595 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1596 // Initialize should_deoptimize flag to 0.
1597 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1598 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1599 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001600 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01001601
1602 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01001603}
1604
1605void CodeGeneratorARM64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001606 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001607 if (!HasEmptyFrame()) {
1608 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001609 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1610 frame_size - FrameEntrySpillSize());
1611 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1612 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001613 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001614 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001615 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001616 __ Ret();
1617 GetAssembler()->cfi().RestoreState();
1618 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001619}
1620
Scott Wakeling97c72b72016-06-24 16:19:36 +01001621CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001622 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001623 return CPURegList(CPURegister::kRegister, kXRegSize,
1624 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001625}
1626
Scott Wakeling97c72b72016-06-24 16:19:36 +01001627CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001628 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1629 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001630 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1631 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001632}
1633
Alexandre Rames5319def2014-10-23 10:03:10 +01001634void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1635 __ Bind(GetLabelOf(block));
1636}
1637
Calin Juravle175dc732015-08-25 15:42:32 +01001638void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1639 DCHECK(location.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001640 __ Mov(RegisterFrom(location, DataType::Type::kInt32), value);
Calin Juravle175dc732015-08-25 15:42:32 +01001641}
1642
Calin Juravlee460d1d2015-09-29 04:52:17 +01001643void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1644 if (location.IsRegister()) {
1645 locations->AddTemp(location);
1646 } else {
1647 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1648 }
1649}
1650
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001651void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001652 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001653 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001654 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001655 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001656 if (value_can_be_null) {
1657 __ Cbz(value, &done);
1658 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001659 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001660 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001661 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001662 if (value_can_be_null) {
1663 __ Bind(&done);
1664 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001665}
1666
David Brazdil58282f42016-01-14 12:45:10 +00001667void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001668 // Blocked core registers:
1669 // lr : Runtime reserved.
1670 // tr : Runtime reserved.
Roland Levillain97c46462017-05-11 14:04:03 +01001671 // mr : Runtime reserved.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001672 // ip1 : VIXL core temp.
1673 // ip0 : VIXL core temp.
1674 //
1675 // Blocked fp registers:
1676 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001677 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1678 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001679 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001680 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001681 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001682
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001683 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001684 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001685 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001686 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001687
David Brazdil58282f42016-01-14 12:45:10 +00001688 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001689 // Stubs do not save callee-save floating point registers. If the graph
1690 // is debuggable, we need to deal with these registers differently. For
1691 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001692 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1693 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001694 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001695 }
1696 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001697}
1698
Alexandre Rames3e69f162014-12-10 10:36:50 +00001699size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1700 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1701 __ Str(reg, MemOperand(sp, stack_index));
1702 return kArm64WordSize;
1703}
1704
1705size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1706 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1707 __ Ldr(reg, MemOperand(sp, stack_index));
1708 return kArm64WordSize;
1709}
1710
1711size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1712 FPRegister reg = FPRegister(reg_id, kDRegSize);
1713 __ Str(reg, MemOperand(sp, stack_index));
1714 return kArm64WordSize;
1715}
1716
1717size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1718 FPRegister reg = FPRegister(reg_id, kDRegSize);
1719 __ Ldr(reg, MemOperand(sp, stack_index));
1720 return kArm64WordSize;
1721}
1722
Alexandre Rames5319def2014-10-23 10:03:10 +01001723void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001724 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001725}
1726
1727void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001728 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001729}
1730
Vladimir Markoa0431112018-06-25 09:32:54 +01001731const Arm64InstructionSetFeatures& CodeGeneratorARM64::GetInstructionSetFeatures() const {
1732 return *GetCompilerOptions().GetInstructionSetFeatures()->AsArm64InstructionSetFeatures();
1733}
1734
Alexandre Rames67555f72014-11-18 10:55:16 +00001735void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001736 if (constant->IsIntConstant()) {
1737 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1738 } else if (constant->IsLongConstant()) {
1739 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1740 } else if (constant->IsNullConstant()) {
1741 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001742 } else if (constant->IsFloatConstant()) {
1743 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1744 } else {
1745 DCHECK(constant->IsDoubleConstant());
1746 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1747 }
1748}
1749
Alexandre Rames3e69f162014-12-10 10:36:50 +00001750
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001751static bool CoherentConstantAndType(Location constant, DataType::Type type) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001752 DCHECK(constant.IsConstant());
1753 HConstant* cst = constant.GetConstant();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001754 return (cst->IsIntConstant() && type == DataType::Type::kInt32) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001755 // Null is mapped to a core W register, which we associate with kPrimInt.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001756 (cst->IsNullConstant() && type == DataType::Type::kInt32) ||
1757 (cst->IsLongConstant() && type == DataType::Type::kInt64) ||
1758 (cst->IsFloatConstant() && type == DataType::Type::kFloat32) ||
1759 (cst->IsDoubleConstant() && type == DataType::Type::kFloat64);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001760}
1761
Roland Levillain952b2352017-05-03 19:49:14 +01001762// Allocate a scratch register from the VIXL pool, querying first
1763// the floating-point register pool, and then the core register
1764// pool. This is essentially a reimplementation of
Roland Levillain558dea12017-01-27 19:40:44 +00001765// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
1766// using a different allocation strategy.
1767static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
1768 vixl::aarch64::UseScratchRegisterScope* temps,
1769 int size_in_bits) {
1770 return masm->GetScratchFPRegisterList()->IsEmpty()
1771 ? CPURegister(temps->AcquireRegisterOfSize(size_in_bits))
1772 : CPURegister(temps->AcquireVRegisterOfSize(size_in_bits));
1773}
1774
Calin Juravlee460d1d2015-09-29 04:52:17 +01001775void CodeGeneratorARM64::MoveLocation(Location destination,
1776 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001777 DataType::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001778 if (source.Equals(destination)) {
1779 return;
1780 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001781
1782 // A valid move can always be inferred from the destination and source
1783 // locations. When moving from and to a register, the argument type can be
1784 // used to generate 32bit instead of 64bit moves. In debug mode we also
1785 // checks the coherency of the locations and the type.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001786 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001787
1788 if (destination.IsRegister() || destination.IsFpuRegister()) {
1789 if (unspecified_type) {
1790 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1791 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001792 (src_cst != nullptr && (src_cst->IsIntConstant()
1793 || src_cst->IsFloatConstant()
1794 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001795 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001796 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexandre Rames67555f72014-11-18 10:55:16 +00001797 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001798 // If the source is a double stack slot or a 64bit constant, a 64bit
1799 // type is appropriate. Else the source is a register, and since the
1800 // type has not been specified, we chose a 64bit type to force a 64bit
1801 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001802 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexandre Rames67555f72014-11-18 10:55:16 +00001803 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001804 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001805 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1806 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001807 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001808 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1809 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1810 __ Ldr(dst, StackOperandFrom(source));
Artem Serovd4bccf12017-04-03 18:47:32 +01001811 } else if (source.IsSIMDStackSlot()) {
1812 __ Ldr(QRegisterFrom(destination), StackOperandFrom(source));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001813 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001814 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001815 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001816 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001817 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001818 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001819 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001820 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001821 DataType::Type source_type = DataType::Is64BitType(dst_type)
1822 ? DataType::Type::kInt64
1823 : DataType::Type::kInt32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001824 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1825 }
1826 } else {
1827 DCHECK(source.IsFpuRegister());
1828 if (destination.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001829 DataType::Type source_type = DataType::Is64BitType(dst_type)
1830 ? DataType::Type::kFloat64
1831 : DataType::Type::kFloat32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001832 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1833 } else {
1834 DCHECK(destination.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001835 if (GetGraph()->HasSIMD()) {
1836 __ Mov(QRegisterFrom(destination), QRegisterFrom(source));
1837 } else {
1838 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
1839 }
1840 }
1841 }
1842 } else if (destination.IsSIMDStackSlot()) {
1843 if (source.IsFpuRegister()) {
1844 __ Str(QRegisterFrom(source), StackOperandFrom(destination));
1845 } else {
1846 DCHECK(source.IsSIMDStackSlot());
1847 UseScratchRegisterScope temps(GetVIXLAssembler());
1848 if (GetVIXLAssembler()->GetScratchFPRegisterList()->IsEmpty()) {
1849 Register temp = temps.AcquireX();
1850 __ Ldr(temp, MemOperand(sp, source.GetStackIndex()));
1851 __ Str(temp, MemOperand(sp, destination.GetStackIndex()));
1852 __ Ldr(temp, MemOperand(sp, source.GetStackIndex() + kArm64WordSize));
1853 __ Str(temp, MemOperand(sp, destination.GetStackIndex() + kArm64WordSize));
1854 } else {
1855 FPRegister temp = temps.AcquireVRegisterOfSize(kQRegSize);
1856 __ Ldr(temp, StackOperandFrom(source));
1857 __ Str(temp, StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001858 }
1859 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001860 } else { // The destination is not a register. It must be a stack slot.
1861 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1862 if (source.IsRegister() || source.IsFpuRegister()) {
1863 if (unspecified_type) {
1864 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001865 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001866 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001867 dst_type =
1868 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001869 }
1870 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001871 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1872 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001873 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001874 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001875 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1876 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001877 UseScratchRegisterScope temps(GetVIXLAssembler());
1878 HConstant* src_cst = source.GetConstant();
1879 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001880 if (src_cst->IsZeroBitPattern()) {
Scott Wakeling79db9972017-01-19 14:08:42 +00001881 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant())
1882 ? Register(xzr)
1883 : Register(wzr);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001884 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001885 if (src_cst->IsIntConstant()) {
1886 temp = temps.AcquireW();
1887 } else if (src_cst->IsLongConstant()) {
1888 temp = temps.AcquireX();
1889 } else if (src_cst->IsFloatConstant()) {
1890 temp = temps.AcquireS();
1891 } else {
1892 DCHECK(src_cst->IsDoubleConstant());
1893 temp = temps.AcquireD();
1894 }
1895 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001896 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001897 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001898 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001899 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001900 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001901 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001902 // Use any scratch register (a core or a floating-point one)
1903 // from VIXL scratch register pools as a temporary.
1904 //
1905 // We used to only use the FP scratch register pool, but in some
1906 // rare cases the only register from this pool (D31) would
1907 // already be used (e.g. within a ParallelMove instruction, when
1908 // a move is blocked by a another move requiring a scratch FP
1909 // register, which would reserve D31). To prevent this issue, we
1910 // ask for a scratch register of any type (core or FP).
Roland Levillain558dea12017-01-27 19:40:44 +00001911 //
1912 // Also, we start by asking for a FP scratch register first, as the
Roland Levillain952b2352017-05-03 19:49:14 +01001913 // demand of scratch core registers is higher. This is why we
Roland Levillain558dea12017-01-27 19:40:44 +00001914 // use AcquireFPOrCoreCPURegisterOfSize instead of
1915 // UseScratchRegisterScope::AcquireCPURegisterOfSize, which
1916 // allocates core scratch registers first.
1917 CPURegister temp = AcquireFPOrCoreCPURegisterOfSize(
1918 GetVIXLAssembler(),
1919 &temps,
1920 (destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001921 __ Ldr(temp, StackOperandFrom(source));
1922 __ Str(temp, StackOperandFrom(destination));
1923 }
1924 }
1925}
1926
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001927void CodeGeneratorARM64::Load(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001928 CPURegister dst,
1929 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001930 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001931 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001932 case DataType::Type::kUint8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001933 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001934 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001935 case DataType::Type::kInt8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001936 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001937 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001938 case DataType::Type::kUint16:
Alexandre Rames67555f72014-11-18 10:55:16 +00001939 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001940 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001941 case DataType::Type::kInt16:
1942 __ Ldrsh(Register(dst), src);
1943 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001944 case DataType::Type::kInt32:
1945 case DataType::Type::kReference:
1946 case DataType::Type::kInt64:
1947 case DataType::Type::kFloat32:
1948 case DataType::Type::kFloat64:
1949 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001950 __ Ldr(dst, src);
1951 break;
Aart Bik66c158e2018-01-31 12:55:04 -08001952 case DataType::Type::kUint32:
1953 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001954 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001955 LOG(FATAL) << "Unreachable type " << type;
1956 }
1957}
1958
Calin Juravle77520bc2015-01-12 18:45:46 +00001959void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001960 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001961 const MemOperand& src,
1962 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001963 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001964 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001965 Register temp_base = temps.AcquireX();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001966 DataType::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001967
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001968 DCHECK(!src.IsPreIndex());
1969 DCHECK(!src.IsPostIndex());
1970
1971 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001972 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Artem Serov914d7a82017-02-07 14:33:49 +00001973 {
1974 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1975 MemOperand base = MemOperand(temp_base);
1976 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001977 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001978 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001979 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00001980 {
1981 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1982 __ ldarb(Register(dst), base);
1983 if (needs_null_check) {
1984 MaybeRecordImplicitNullCheck(instruction);
1985 }
1986 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001987 if (type == DataType::Type::kInt8) {
1988 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
Artem Serov914d7a82017-02-07 14:33:49 +00001989 }
1990 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001991 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001992 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00001993 {
1994 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1995 __ ldarh(Register(dst), base);
1996 if (needs_null_check) {
1997 MaybeRecordImplicitNullCheck(instruction);
1998 }
1999 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002000 if (type == DataType::Type::kInt16) {
2001 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
2002 }
Artem Serov914d7a82017-02-07 14:33:49 +00002003 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002004 case DataType::Type::kInt32:
2005 case DataType::Type::kReference:
2006 case DataType::Type::kInt64:
2007 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00002008 {
2009 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2010 __ ldar(Register(dst), base);
2011 if (needs_null_check) {
2012 MaybeRecordImplicitNullCheck(instruction);
2013 }
2014 }
2015 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002016 case DataType::Type::kFloat32:
2017 case DataType::Type::kFloat64: {
Artem Serov914d7a82017-02-07 14:33:49 +00002018 DCHECK(dst.IsFPRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002019 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002020
Artem Serov914d7a82017-02-07 14:33:49 +00002021 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
2022 {
2023 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2024 __ ldar(temp, base);
2025 if (needs_null_check) {
2026 MaybeRecordImplicitNullCheck(instruction);
2027 }
2028 }
2029 __ Fmov(FPRegister(dst), temp);
2030 break;
Roland Levillain44015862016-01-22 11:47:17 +00002031 }
Aart Bik66c158e2018-01-31 12:55:04 -08002032 case DataType::Type::kUint32:
2033 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002034 case DataType::Type::kVoid:
Artem Serov914d7a82017-02-07 14:33:49 +00002035 LOG(FATAL) << "Unreachable type " << type;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002036 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002037 }
2038}
2039
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002040void CodeGeneratorARM64::Store(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002041 CPURegister src,
2042 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002043 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002044 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002045 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002046 case DataType::Type::kInt8:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002047 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002048 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002049 case DataType::Type::kUint16:
2050 case DataType::Type::kInt16:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002051 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002052 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002053 case DataType::Type::kInt32:
2054 case DataType::Type::kReference:
2055 case DataType::Type::kInt64:
2056 case DataType::Type::kFloat32:
2057 case DataType::Type::kFloat64:
2058 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002059 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00002060 break;
Aart Bik66c158e2018-01-31 12:55:04 -08002061 case DataType::Type::kUint32:
2062 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002063 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002064 LOG(FATAL) << "Unreachable type " << type;
2065 }
2066}
2067
Artem Serov914d7a82017-02-07 14:33:49 +00002068void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002069 DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002070 CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +00002071 const MemOperand& dst,
2072 bool needs_null_check) {
2073 MacroAssembler* masm = GetVIXLAssembler();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002074 UseScratchRegisterScope temps(GetVIXLAssembler());
2075 Register temp_base = temps.AcquireX();
2076
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002077 DCHECK(!dst.IsPreIndex());
2078 DCHECK(!dst.IsPostIndex());
2079
2080 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08002081 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01002082 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002083 MemOperand base = MemOperand(temp_base);
Artem Serov914d7a82017-02-07 14:33:49 +00002084 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002085 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002086 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002087 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002088 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00002089 {
2090 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2091 __ stlrb(Register(src), base);
2092 if (needs_null_check) {
2093 MaybeRecordImplicitNullCheck(instruction);
2094 }
2095 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002096 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002097 case DataType::Type::kUint16:
2098 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00002099 {
2100 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2101 __ stlrh(Register(src), base);
2102 if (needs_null_check) {
2103 MaybeRecordImplicitNullCheck(instruction);
2104 }
2105 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002106 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002107 case DataType::Type::kInt32:
2108 case DataType::Type::kReference:
2109 case DataType::Type::kInt64:
2110 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00002111 {
2112 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2113 __ stlr(Register(src), base);
2114 if (needs_null_check) {
2115 MaybeRecordImplicitNullCheck(instruction);
2116 }
2117 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002118 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002119 case DataType::Type::kFloat32:
2120 case DataType::Type::kFloat64: {
2121 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002122 Register temp_src;
2123 if (src.IsZero()) {
2124 // The zero register is used to avoid synthesizing zero constants.
2125 temp_src = Register(src);
2126 } else {
2127 DCHECK(src.IsFPRegister());
2128 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
2129 __ Fmov(temp_src, FPRegister(src));
2130 }
Artem Serov914d7a82017-02-07 14:33:49 +00002131 {
2132 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
2133 __ stlr(temp_src, base);
2134 if (needs_null_check) {
2135 MaybeRecordImplicitNullCheck(instruction);
2136 }
2137 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002138 break;
2139 }
Aart Bik66c158e2018-01-31 12:55:04 -08002140 case DataType::Type::kUint32:
2141 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002142 case DataType::Type::kVoid:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002143 LOG(FATAL) << "Unreachable type " << type;
2144 }
2145}
2146
Calin Juravle175dc732015-08-25 15:42:32 +01002147void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
2148 HInstruction* instruction,
2149 uint32_t dex_pc,
2150 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01002151 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00002152
2153 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value()));
2154 {
2155 // Ensure the pc position is recorded immediately after the `blr` instruction.
2156 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
2157 __ blr(lr);
2158 if (EntrypointRequiresStackMap(entrypoint)) {
2159 RecordPcInfo(instruction, dex_pc, slow_path);
2160 }
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00002161 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002162}
2163
Roland Levillaindec8f632016-07-22 17:10:06 +01002164void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
2165 HInstruction* instruction,
2166 SlowPathCode* slow_path) {
2167 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Roland Levillaindec8f632016-07-22 17:10:06 +01002168 __ Ldr(lr, MemOperand(tr, entry_point_offset));
2169 __ Blr(lr);
2170}
2171
Alexandre Rames67555f72014-11-18 10:55:16 +00002172void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002173 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002174 UseScratchRegisterScope temps(GetVIXLAssembler());
2175 Register temp = temps.AcquireW();
Vladimir Markodc682aa2018-01-04 18:42:57 +00002176 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
2177 const size_t status_byte_offset =
2178 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
2179 constexpr uint32_t shifted_initialized_value =
2180 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002181
Serban Constantinescu02164b32014-11-13 14:05:07 +00002182 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002183 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Vladimir Markodc682aa2018-01-04 18:42:57 +00002184 __ Add(temp, class_reg, status_byte_offset);
Igor Murashkin86083f72017-10-27 10:59:04 -07002185 __ Ldarb(temp, HeapOperand(temp));
Vladimir Markodc682aa2018-01-04 18:42:57 +00002186 __ Cmp(temp, shifted_initialized_value);
Vladimir Marko2c64a832018-01-04 11:31:56 +00002187 __ B(lo, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00002188 __ Bind(slow_path->GetExitLabel());
2189}
Alexandre Rames5319def2014-10-23 10:03:10 +01002190
Vladimir Marko175e7862018-03-27 09:03:13 +00002191void InstructionCodeGeneratorARM64::GenerateBitstringTypeCheckCompare(
2192 HTypeCheckInstruction* check, vixl::aarch64::Register temp) {
2193 uint32_t path_to_root = check->GetBitstringPathToRoot();
2194 uint32_t mask = check->GetBitstringMask();
2195 DCHECK(IsPowerOfTwo(mask + 1));
2196 size_t mask_bits = WhichPowerOf2(mask + 1);
2197
2198 if (mask_bits == 16u) {
2199 // Load only the bitstring part of the status word.
2200 __ Ldrh(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
2201 } else {
2202 // /* uint32_t */ temp = temp->status_
2203 __ Ldr(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
2204 // Extract the bitstring bits.
2205 __ Ubfx(temp, temp, 0, mask_bits);
2206 }
2207 // Compare the bitstring bits to `path_to_root`.
2208 __ Cmp(temp, path_to_root);
2209}
2210
Roland Levillain44015862016-01-22 11:47:17 +00002211void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002212 BarrierType type = BarrierAll;
2213
2214 switch (kind) {
2215 case MemBarrierKind::kAnyAny:
2216 case MemBarrierKind::kAnyStore: {
2217 type = BarrierAll;
2218 break;
2219 }
2220 case MemBarrierKind::kLoadAny: {
2221 type = BarrierReads;
2222 break;
2223 }
2224 case MemBarrierKind::kStoreStore: {
2225 type = BarrierWrites;
2226 break;
2227 }
2228 default:
2229 LOG(FATAL) << "Unexpected memory barrier " << kind;
2230 }
2231 __ Dmb(InnerShareable, type);
2232}
2233
Serban Constantinescu02164b32014-11-13 14:05:07 +00002234void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
2235 HBasicBlock* successor) {
2236 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002237 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
2238 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002239 slow_path =
2240 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathARM64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002241 instruction->SetSlowPath(slow_path);
2242 codegen_->AddSlowPath(slow_path);
2243 if (successor != nullptr) {
2244 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002245 }
2246 } else {
2247 DCHECK_EQ(slow_path->GetSuccessor(), successor);
2248 }
2249
Serban Constantinescu02164b32014-11-13 14:05:07 +00002250 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
2251 Register temp = temps.AcquireW();
2252
Andreas Gampe542451c2016-07-26 09:02:02 -07002253 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002254 if (successor == nullptr) {
2255 __ Cbnz(temp, slow_path->GetEntryLabel());
2256 __ Bind(slow_path->GetReturnLabel());
2257 } else {
2258 __ Cbz(temp, codegen_->GetLabelOf(successor));
2259 __ B(slow_path->GetEntryLabel());
2260 // slow_path will return to GetLabelOf(successor).
2261 }
2262}
2263
Alexandre Rames5319def2014-10-23 10:03:10 +01002264InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
2265 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08002266 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01002267 assembler_(codegen->GetAssembler()),
2268 codegen_(codegen) {}
2269
Alexandre Rames67555f72014-11-18 10:55:16 +00002270void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002271 DCHECK_EQ(instr->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01002272 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002273 DataType::Type type = instr->GetResultType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002274 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002275 case DataType::Type::kInt32:
2276 case DataType::Type::kInt64:
Alexandre Rames5319def2014-10-23 10:03:10 +01002277 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002278 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002279 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002280 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002281
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002282 case DataType::Type::kFloat32:
2283 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002284 locations->SetInAt(0, Location::RequiresFpuRegister());
2285 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002286 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002287 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002288
Alexandre Rames5319def2014-10-23 10:03:10 +01002289 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002290 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002291 }
2292}
2293
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002294void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction,
2295 const FieldInfo& field_info) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002296 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2297
2298 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002299 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Rames09a99962015-04-15 11:47:56 +01002300 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002301 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2302 object_field_get_with_read_barrier
2303 ? LocationSummary::kCallOnSlowPath
2304 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002305 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002306 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Roland Levillaind0b51832017-01-26 19:04:23 +00002307 // We need a temporary register for the read barrier marking slow
2308 // path in CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002309 if (kBakerReadBarrierLinkTimeThunksEnableForFields &&
2310 !Runtime::Current()->UseJitCompilation() &&
2311 !field_info.IsVolatile()) {
2312 // If link-time thunks for the Baker read barrier are enabled, for AOT
2313 // non-volatile loads we need a temporary only if the offset is too big.
2314 if (field_info.GetFieldOffset().Uint32Value() >= kReferenceLoadMinFarOffset) {
2315 locations->AddTemp(FixedTempLocation());
2316 }
2317 } else {
2318 locations->AddTemp(Location::RequiresRegister());
2319 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002320 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002321 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002322 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002323 locations->SetOut(Location::RequiresFpuRegister());
2324 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002325 // The output overlaps for an object field get when read barriers
2326 // are enabled: we do not want the load to overwrite the object's
2327 // location, as we need it to emit the read barrier.
2328 locations->SetOut(
2329 Location::RequiresRegister(),
2330 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01002331 }
2332}
2333
2334void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
2335 const FieldInfo& field_info) {
2336 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00002337 LocationSummary* locations = instruction->GetLocations();
2338 Location base_loc = locations->InAt(0);
2339 Location out = locations->Out();
2340 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Vladimir Marko61b92282017-10-11 13:23:17 +01002341 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
2342 DataType::Type load_type = instruction->GetType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002343 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01002344
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002345 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier &&
Vladimir Marko61b92282017-10-11 13:23:17 +01002346 load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002347 // Object FieldGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002348 // /* HeapReference<Object> */ out = *(base + offset)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002349 Register base = RegisterFrom(base_loc, DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002350 Location maybe_temp =
2351 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
Roland Levillain44015862016-01-22 11:47:17 +00002352 // Note that potential implicit null checks are handled in this
2353 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
2354 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2355 instruction,
2356 out,
2357 base,
2358 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002359 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00002360 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002361 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00002362 } else {
2363 // General case.
2364 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002365 // Note that a potential implicit null check is handled in this
2366 // CodeGeneratorARM64::LoadAcquire call.
2367 // NB: LoadAcquire will record the pc info if needed.
2368 codegen_->LoadAcquire(
2369 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01002370 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002371 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2372 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Vladimir Marko61b92282017-10-11 13:23:17 +01002373 codegen_->Load(load_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01002374 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01002375 }
Vladimir Marko61b92282017-10-11 13:23:17 +01002376 if (load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002377 // If read barriers are enabled, emit read barriers other than
2378 // Baker's using a slow path (and also unpoison the loaded
2379 // reference, if heap poisoning is enabled).
2380 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
2381 }
Roland Levillain4d027112015-07-01 15:41:14 +01002382 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002383}
2384
2385void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
2386 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002387 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01002388 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002389 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
2390 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002391 } else if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002392 locations->SetInAt(1, Location::RequiresFpuRegister());
2393 } else {
2394 locations->SetInAt(1, Location::RequiresRegister());
2395 }
2396}
2397
2398void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002399 const FieldInfo& field_info,
2400 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002401 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2402
2403 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002404 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01002405 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01002406 Offset offset = field_info.GetFieldOffset();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002407 DataType::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002408
Roland Levillain4d027112015-07-01 15:41:14 +01002409 {
2410 // We use a block to end the scratch scope before the write barrier, thus
2411 // freeing the temporary registers so they can be used in `MarkGCCard`.
2412 UseScratchRegisterScope temps(GetVIXLAssembler());
2413
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002414 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01002415 DCHECK(value.IsW());
2416 Register temp = temps.AcquireW();
2417 __ Mov(temp, value.W());
2418 GetAssembler()->PoisonHeapReference(temp.W());
2419 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01002420 }
Roland Levillain4d027112015-07-01 15:41:14 +01002421
2422 if (field_info.IsVolatile()) {
Artem Serov914d7a82017-02-07 14:33:49 +00002423 codegen_->StoreRelease(
2424 instruction, field_type, source, HeapOperand(obj, offset), /* needs_null_check */ true);
Roland Levillain4d027112015-07-01 15:41:14 +01002425 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002426 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2427 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002428 codegen_->Store(field_type, source, HeapOperand(obj, offset));
2429 codegen_->MaybeRecordImplicitNullCheck(instruction);
2430 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002431 }
2432
2433 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002434 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01002435 }
2436}
2437
Alexandre Rames67555f72014-11-18 10:55:16 +00002438void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002439 DataType::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002440
2441 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002442 case DataType::Type::kInt32:
2443 case DataType::Type::kInt64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002444 Register dst = OutputRegister(instr);
2445 Register lhs = InputRegisterAt(instr, 0);
2446 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01002447 if (instr->IsAdd()) {
2448 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002449 } else if (instr->IsAnd()) {
2450 __ And(dst, lhs, rhs);
2451 } else if (instr->IsOr()) {
2452 __ Orr(dst, lhs, rhs);
2453 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002454 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002455 } else if (instr->IsRor()) {
2456 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002457 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002458 __ Ror(dst, lhs, shift);
2459 } else {
2460 // Ensure shift distance is in the same size register as the result. If
2461 // we are rotating a long and the shift comes in a w register originally,
2462 // we don't need to sxtw for use as an x since the shift distances are
2463 // all & reg_bits - 1.
2464 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
2465 }
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002466 } else if (instr->IsMin() || instr->IsMax()) {
2467 __ Cmp(lhs, rhs);
2468 __ Csel(dst, lhs, rhs, instr->IsMin() ? lt : gt);
Alexandre Rames67555f72014-11-18 10:55:16 +00002469 } else {
2470 DCHECK(instr->IsXor());
2471 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002472 }
2473 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002474 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002475 case DataType::Type::kFloat32:
2476 case DataType::Type::kFloat64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002477 FPRegister dst = OutputFPRegister(instr);
2478 FPRegister lhs = InputFPRegisterAt(instr, 0);
2479 FPRegister rhs = InputFPRegisterAt(instr, 1);
2480 if (instr->IsAdd()) {
2481 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002482 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002483 __ Fsub(dst, lhs, rhs);
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002484 } else if (instr->IsMin()) {
2485 __ Fmin(dst, lhs, rhs);
2486 } else if (instr->IsMax()) {
2487 __ Fmax(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002488 } else {
2489 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002490 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002491 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002492 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002493 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002494 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002495 }
2496}
2497
Serban Constantinescu02164b32014-11-13 14:05:07 +00002498void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2499 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2500
Vladimir Markoca6fff82017-10-03 14:49:14 +01002501 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002502 DataType::Type type = instr->GetResultType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002503 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002504 case DataType::Type::kInt32:
2505 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002506 locations->SetInAt(0, Location::RequiresRegister());
2507 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Artem Serov87c97052016-09-23 13:34:31 +01002508 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002509 break;
2510 }
2511 default:
2512 LOG(FATAL) << "Unexpected shift type " << type;
2513 }
2514}
2515
2516void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2517 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2518
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002519 DataType::Type type = instr->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002520 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002521 case DataType::Type::kInt32:
2522 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002523 Register dst = OutputRegister(instr);
2524 Register lhs = InputRegisterAt(instr, 0);
2525 Operand rhs = InputOperandAt(instr, 1);
2526 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002527 uint32_t shift_value = rhs.GetImmediate() &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002528 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002529 if (instr->IsShl()) {
2530 __ Lsl(dst, lhs, shift_value);
2531 } else if (instr->IsShr()) {
2532 __ Asr(dst, lhs, shift_value);
2533 } else {
2534 __ Lsr(dst, lhs, shift_value);
2535 }
2536 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002537 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002538
2539 if (instr->IsShl()) {
2540 __ Lsl(dst, lhs, rhs_reg);
2541 } else if (instr->IsShr()) {
2542 __ Asr(dst, lhs, rhs_reg);
2543 } else {
2544 __ Lsr(dst, lhs, rhs_reg);
2545 }
2546 }
2547 break;
2548 }
2549 default:
2550 LOG(FATAL) << "Unexpected shift operation type " << type;
2551 }
2552}
2553
Alexandre Rames5319def2014-10-23 10:03:10 +01002554void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002555 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002556}
2557
2558void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002559 HandleBinaryOp(instruction);
2560}
2561
2562void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2563 HandleBinaryOp(instruction);
2564}
2565
2566void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2567 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002568}
2569
Artem Serov7fc63502016-02-09 17:15:29 +00002570void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002571 DCHECK(DataType::IsIntegralType(instr->GetType())) << instr->GetType();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002572 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002573 locations->SetInAt(0, Location::RequiresRegister());
2574 // There is no immediate variant of negated bitwise instructions in AArch64.
2575 locations->SetInAt(1, Location::RequiresRegister());
2576 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2577}
2578
Artem Serov7fc63502016-02-09 17:15:29 +00002579void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002580 Register dst = OutputRegister(instr);
2581 Register lhs = InputRegisterAt(instr, 0);
2582 Register rhs = InputRegisterAt(instr, 1);
2583
2584 switch (instr->GetOpKind()) {
2585 case HInstruction::kAnd:
2586 __ Bic(dst, lhs, rhs);
2587 break;
2588 case HInstruction::kOr:
2589 __ Orn(dst, lhs, rhs);
2590 break;
2591 case HInstruction::kXor:
2592 __ Eon(dst, lhs, rhs);
2593 break;
2594 default:
2595 LOG(FATAL) << "Unreachable";
2596 }
2597}
2598
Anton Kirilov74234da2017-01-13 14:42:47 +00002599void LocationsBuilderARM64::VisitDataProcWithShifterOp(
2600 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002601 DCHECK(instruction->GetType() == DataType::Type::kInt32 ||
2602 instruction->GetType() == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002603 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002604 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames8626b742015-11-25 16:28:08 +00002605 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2606 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2607 } else {
2608 locations->SetInAt(0, Location::RequiresRegister());
2609 }
2610 locations->SetInAt(1, Location::RequiresRegister());
2611 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2612}
2613
Anton Kirilov74234da2017-01-13 14:42:47 +00002614void InstructionCodeGeneratorARM64::VisitDataProcWithShifterOp(
2615 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002616 DataType::Type type = instruction->GetType();
Alexandre Rames8626b742015-11-25 16:28:08 +00002617 HInstruction::InstructionKind kind = instruction->GetInstrKind();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002618 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002619 Register out = OutputRegister(instruction);
2620 Register left;
2621 if (kind != HInstruction::kNeg) {
2622 left = InputRegisterAt(instruction, 0);
2623 }
Anton Kirilov74234da2017-01-13 14:42:47 +00002624 // If this `HDataProcWithShifterOp` was created by merging a type conversion as the
Alexandre Rames8626b742015-11-25 16:28:08 +00002625 // shifter operand operation, the IR generating `right_reg` (input to the type
2626 // conversion) can have a different type from the current instruction's type,
2627 // so we manually indicate the type.
2628 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Alexandre Rames8626b742015-11-25 16:28:08 +00002629 Operand right_operand(0);
2630
Anton Kirilov74234da2017-01-13 14:42:47 +00002631 HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2632 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002633 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2634 } else {
Anton Kirilov74234da2017-01-13 14:42:47 +00002635 right_operand = Operand(right_reg,
2636 helpers::ShiftFromOpKind(op_kind),
2637 instruction->GetShiftAmount());
Alexandre Rames8626b742015-11-25 16:28:08 +00002638 }
2639
2640 // Logical binary operations do not support extension operations in the
2641 // operand. Note that VIXL would still manage if it was passed by generating
2642 // the extension as a separate instruction.
2643 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2644 DCHECK(!right_operand.IsExtendedRegister() ||
2645 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2646 kind != HInstruction::kNeg));
2647 switch (kind) {
2648 case HInstruction::kAdd:
2649 __ Add(out, left, right_operand);
2650 break;
2651 case HInstruction::kAnd:
2652 __ And(out, left, right_operand);
2653 break;
2654 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002655 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002656 __ Neg(out, right_operand);
2657 break;
2658 case HInstruction::kOr:
2659 __ Orr(out, left, right_operand);
2660 break;
2661 case HInstruction::kSub:
2662 __ Sub(out, left, right_operand);
2663 break;
2664 case HInstruction::kXor:
2665 __ Eor(out, left, right_operand);
2666 break;
2667 default:
2668 LOG(FATAL) << "Unexpected operation kind: " << kind;
2669 UNREACHABLE();
2670 }
2671}
2672
Artem Serov328429f2016-07-06 16:23:04 +01002673void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002674 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002675 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002676 locations->SetInAt(0, Location::RequiresRegister());
2677 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
Artem Serov87c97052016-09-23 13:34:31 +01002678 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002679}
2680
Roland Levillain19c54192016-11-04 13:44:09 +00002681void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002682 __ Add(OutputRegister(instruction),
2683 InputRegisterAt(instruction, 0),
2684 Operand(InputOperandAt(instruction, 1)));
2685}
2686
Artem Serove1811ed2017-04-27 16:50:47 +01002687void LocationsBuilderARM64::VisitIntermediateAddressIndex(HIntermediateAddressIndex* instruction) {
2688 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002689 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serove1811ed2017-04-27 16:50:47 +01002690
2691 HIntConstant* shift = instruction->GetShift()->AsIntConstant();
2692
2693 locations->SetInAt(0, Location::RequiresRegister());
2694 // For byte case we don't need to shift the index variable so we can encode the data offset into
2695 // ADD instruction. For other cases we prefer the data_offset to be in register; that will hoist
2696 // data offset constant generation out of the loop and reduce the critical path length in the
2697 // loop.
2698 locations->SetInAt(1, shift->GetValue() == 0
2699 ? Location::ConstantLocation(instruction->GetOffset()->AsIntConstant())
2700 : Location::RequiresRegister());
2701 locations->SetInAt(2, Location::ConstantLocation(shift));
2702 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2703}
2704
2705void InstructionCodeGeneratorARM64::VisitIntermediateAddressIndex(
2706 HIntermediateAddressIndex* instruction) {
2707 Register index_reg = InputRegisterAt(instruction, 0);
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002708 uint32_t shift = Int64FromLocation(instruction->GetLocations()->InAt(2));
Artem Serove1811ed2017-04-27 16:50:47 +01002709 uint32_t offset = instruction->GetOffset()->AsIntConstant()->GetValue();
2710
2711 if (shift == 0) {
2712 __ Add(OutputRegister(instruction), index_reg, offset);
2713 } else {
2714 Register offset_reg = InputRegisterAt(instruction, 1);
2715 __ Add(OutputRegister(instruction), offset_reg, Operand(index_reg, LSL, shift));
2716 }
2717}
2718
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002719void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002720 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002721 new (GetGraph()->GetAllocator()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002722 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2723 if (instr->GetOpKind() == HInstruction::kSub &&
2724 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002725 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002726 // Don't allocate register for Mneg instruction.
2727 } else {
2728 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2729 Location::RequiresRegister());
2730 }
2731 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2732 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002733 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2734}
2735
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002736void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002737 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002738 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2739 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002740
2741 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2742 // This fixup should be carried out for all multiply-accumulate instructions:
2743 // madd, msub, smaddl, smsubl, umaddl and umsubl.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002744 if (instr->GetType() == DataType::Type::kInt64 &&
Alexandre Rames418318f2015-11-20 15:55:47 +00002745 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2746 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002747 vixl::aarch64::Instruction* prev =
2748 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002749 if (prev->IsLoadOrStore()) {
2750 // Make sure we emit only exactly one nop.
Artem Serov914d7a82017-02-07 14:33:49 +00002751 ExactAssemblyScope scope(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002752 __ nop();
2753 }
2754 }
2755
2756 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002757 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002758 __ Madd(res, mul_left, mul_right, accumulator);
2759 } else {
2760 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002761 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002762 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002763 __ Mneg(res, mul_left, mul_right);
2764 } else {
2765 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2766 __ Msub(res, mul_left, mul_right, accumulator);
2767 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002768 }
2769}
2770
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002771void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002772 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002773 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002774 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002775 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2776 object_array_get_with_read_barrier
2777 ? LocationSummary::kCallOnSlowPath
2778 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002779 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002780 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Roland Levillain54f869e2017-03-06 13:54:11 +00002781 // We need a temporary register for the read barrier marking slow
2782 // path in CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002783 if (kBakerReadBarrierLinkTimeThunksEnableForFields &&
2784 !Runtime::Current()->UseJitCompilation() &&
2785 instruction->GetIndex()->IsConstant()) {
2786 // Array loads with constant index are treated as field loads.
2787 // If link-time thunks for the Baker read barrier are enabled, for AOT
2788 // constant index loads we need a temporary only if the offset is too big.
2789 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
2790 uint32_t index = instruction->GetIndex()->AsIntConstant()->GetValue();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002791 offset += index << DataType::SizeShift(DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002792 if (offset >= kReferenceLoadMinFarOffset) {
2793 locations->AddTemp(FixedTempLocation());
2794 }
2795 } else {
2796 locations->AddTemp(Location::RequiresRegister());
2797 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002798 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002799 locations->SetInAt(0, Location::RequiresRegister());
2800 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002801 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002802 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2803 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002804 // The output overlaps in the case of an object array get with
2805 // read barriers enabled: we do not want the move to overwrite the
2806 // array's location, as we need it to emit the read barrier.
2807 locations->SetOut(
2808 Location::RequiresRegister(),
2809 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002810 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002811}
2812
2813void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002814 DataType::Type type = instruction->GetType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002815 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002816 LocationSummary* locations = instruction->GetLocations();
2817 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002818 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002819 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002820 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2821 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002822 MacroAssembler* masm = GetVIXLAssembler();
2823 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002824
Roland Levillain19c54192016-11-04 13:44:09 +00002825 // The read barrier instrumentation of object ArrayGet instructions
2826 // does not support the HIntermediateAddress instruction.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002827 DCHECK(!((type == DataType::Type::kReference) &&
Roland Levillain19c54192016-11-04 13:44:09 +00002828 instruction->GetArray()->IsIntermediateAddress() &&
2829 kEmitCompilerReadBarrier));
2830
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002831 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00002832 // Object ArrayGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002833 // Note that a potential implicit null check is handled in the
2834 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
Vladimir Marko66d691d2017-04-07 17:53:39 +01002835 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002836 if (index.IsConstant()) {
2837 // Array load with a constant index can be treated as a field load.
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002838 offset += Int64FromLocation(index) << DataType::SizeShift(type);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002839 Location maybe_temp =
2840 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
2841 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2842 out,
2843 obj.W(),
2844 offset,
2845 maybe_temp,
Vladimir Marko66d691d2017-04-07 17:53:39 +01002846 /* needs_null_check */ false,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002847 /* use_load_acquire */ false);
2848 } else {
2849 Register temp = WRegisterFrom(locations->GetTemp(0));
2850 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko66d691d2017-04-07 17:53:39 +01002851 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ false);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002852 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002853 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002854 // General case.
2855 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002856 Register length;
2857 if (maybe_compressed_char_at) {
2858 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2859 length = temps.AcquireW();
Artem Serov914d7a82017-02-07 14:33:49 +00002860 {
2861 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2862 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2863
2864 if (instruction->GetArray()->IsIntermediateAddress()) {
2865 DCHECK_LT(count_offset, offset);
2866 int64_t adjusted_offset =
2867 static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2868 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2869 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2870 } else {
2871 __ Ldr(length, HeapOperand(obj, count_offset));
2872 }
2873 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002874 }
jessicahandojo05765752016-09-09 19:01:32 -07002875 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002876 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002877 if (maybe_compressed_char_at) {
2878 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002879 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2880 "Expecting 0=compressed, 1=uncompressed");
2881 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002882 __ Ldrb(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002883 HeapOperand(obj, offset + Int64FromLocation(index)));
jessicahandojo05765752016-09-09 19:01:32 -07002884 __ B(&done);
2885 __ Bind(&uncompressed_load);
2886 __ Ldrh(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002887 HeapOperand(obj, offset + (Int64FromLocation(index) << 1)));
jessicahandojo05765752016-09-09 19:01:32 -07002888 __ Bind(&done);
2889 } else {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002890 offset += Int64FromLocation(index) << DataType::SizeShift(type);
jessicahandojo05765752016-09-09 19:01:32 -07002891 source = HeapOperand(obj, offset);
2892 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002893 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002894 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002895 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002896 // We do not need to compute the intermediate address from the array: the
2897 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002898 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002899 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002900 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002901 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2902 }
2903 temp = obj;
2904 } else {
2905 __ Add(temp, obj, offset);
2906 }
jessicahandojo05765752016-09-09 19:01:32 -07002907 if (maybe_compressed_char_at) {
2908 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002909 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2910 "Expecting 0=compressed, 1=uncompressed");
2911 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002912 __ Ldrb(Register(OutputCPURegister(instruction)),
2913 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2914 __ B(&done);
2915 __ Bind(&uncompressed_load);
2916 __ Ldrh(Register(OutputCPURegister(instruction)),
2917 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2918 __ Bind(&done);
2919 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002920 source = HeapOperand(temp, XRegisterFrom(index), LSL, DataType::SizeShift(type));
jessicahandojo05765752016-09-09 19:01:32 -07002921 }
Roland Levillain44015862016-01-22 11:47:17 +00002922 }
jessicahandojo05765752016-09-09 19:01:32 -07002923 if (!maybe_compressed_char_at) {
Artem Serov914d7a82017-02-07 14:33:49 +00002924 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2925 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
jessicahandojo05765752016-09-09 19:01:32 -07002926 codegen_->Load(type, OutputCPURegister(instruction), source);
2927 codegen_->MaybeRecordImplicitNullCheck(instruction);
2928 }
Roland Levillain44015862016-01-22 11:47:17 +00002929
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002930 if (type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002931 static_assert(
2932 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2933 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2934 Location obj_loc = locations->InAt(0);
2935 if (index.IsConstant()) {
2936 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2937 } else {
2938 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2939 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002940 }
Roland Levillain4d027112015-07-01 15:41:14 +01002941 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002942}
2943
Alexandre Rames5319def2014-10-23 10:03:10 +01002944void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002945 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002946 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002947 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002948}
2949
2950void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002951 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002952 vixl::aarch64::Register out = OutputRegister(instruction);
Artem Serov914d7a82017-02-07 14:33:49 +00002953 {
2954 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2955 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2956 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
2957 codegen_->MaybeRecordImplicitNullCheck(instruction);
2958 }
jessicahandojo05765752016-09-09 19:01:32 -07002959 // Mask out compression flag from String's array length.
2960 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002961 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002962 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002963}
2964
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002965void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002966 DataType::Type value_type = instruction->GetComponentType();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002967
2968 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002969 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002970 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002971 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002972 LocationSummary::kCallOnSlowPath :
2973 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002974 locations->SetInAt(0, Location::RequiresRegister());
2975 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002976 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2977 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002978 } else if (DataType::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002979 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002980 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002981 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002982 }
2983}
2984
2985void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002986 DataType::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002987 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002988 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002989 bool needs_write_barrier =
2990 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002991
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002992 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002993 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002994 CPURegister source = value;
2995 Location index = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002996 size_t offset = mirror::Array::DataOffset(DataType::Size(value_type)).Uint32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002997 MemOperand destination = HeapOperand(array);
2998 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002999
3000 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003001 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003002 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003003 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003004 destination = HeapOperand(array, offset);
3005 } else {
3006 UseScratchRegisterScope temps(masm);
3007 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01003008 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01003009 // We do not need to compute the intermediate address from the array: the
3010 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01003011 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01003012 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01003013 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01003014 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
3015 }
3016 temp = array;
3017 } else {
3018 __ Add(temp, array, offset);
3019 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003020 destination = HeapOperand(temp,
3021 XRegisterFrom(index),
3022 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003023 DataType::SizeShift(value_type));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003024 }
Artem Serov914d7a82017-02-07 14:33:49 +00003025 {
3026 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
3027 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3028 codegen_->Store(value_type, value, destination);
3029 codegen_->MaybeRecordImplicitNullCheck(instruction);
3030 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003031 } else {
Artem Serov328429f2016-07-06 16:23:04 +01003032 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01003033 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003034 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01003035 {
3036 // We use a block to end the scratch scope before the write barrier, thus
3037 // freeing the temporary registers so they can be used in `MarkGCCard`.
3038 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003039 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01003040 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003041 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003042 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01003043 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01003044 destination = HeapOperand(temp,
3045 XRegisterFrom(index),
3046 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003047 DataType::SizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01003048 }
3049
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003050 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3051 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3052 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3053
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003054 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01003055 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathARM64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003056 codegen_->AddSlowPath(slow_path);
3057 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01003058 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003059 __ Cbnz(Register(value), &non_zero);
3060 if (!index.IsConstant()) {
3061 __ Add(temp, array, offset);
3062 }
Artem Serov914d7a82017-02-07 14:33:49 +00003063 {
3064 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools
3065 // emitted.
3066 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3067 __ Str(wzr, destination);
3068 codegen_->MaybeRecordImplicitNullCheck(instruction);
3069 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003070 __ B(&done);
3071 __ Bind(&non_zero);
3072 }
3073
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003074 // Note that when Baker read barriers are enabled, the type
3075 // checks are performed without read barriers. This is fine,
3076 // even in the case where a class object is in the from-space
3077 // after the flip, as a comparison involving such a type would
3078 // not produce a false positive; it may of course produce a
3079 // false negative, in which case we would take the ArraySet
3080 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01003081
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003082 Register temp2 = temps.AcquireSameSizeAs(array);
3083 // /* HeapReference<Class> */ temp = array->klass_
Artem Serov914d7a82017-02-07 14:33:49 +00003084 {
3085 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
3086 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3087 __ Ldr(temp, HeapOperand(array, class_offset));
3088 codegen_->MaybeRecordImplicitNullCheck(instruction);
3089 }
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003090 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01003091
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003092 // /* HeapReference<Class> */ temp = temp->component_type_
3093 __ Ldr(temp, HeapOperand(temp, component_offset));
3094 // /* HeapReference<Class> */ temp2 = value->klass_
3095 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
3096 // If heap poisoning is enabled, no need to unpoison `temp`
3097 // nor `temp2`, as we are comparing two poisoned references.
3098 __ Cmp(temp, temp2);
3099 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01003100
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003101 if (instruction->StaticTypeOfArrayIsObjectArray()) {
3102 vixl::aarch64::Label do_put;
3103 __ B(eq, &do_put);
3104 // If heap poisoning is enabled, the `temp` reference has
3105 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003106 GetAssembler()->MaybeUnpoisonHeapReference(temp);
3107
Roland Levillain9d6e1f82016-09-05 15:57:33 +01003108 // /* HeapReference<Class> */ temp = temp->super_class_
3109 __ Ldr(temp, HeapOperand(temp, super_offset));
3110 // If heap poisoning is enabled, no need to unpoison
3111 // `temp`, as we are comparing against null below.
3112 __ Cbnz(temp, slow_path->GetEntryLabel());
3113 __ Bind(&do_put);
3114 } else {
3115 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003116 }
3117 }
3118
3119 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01003120 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003121 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01003122 __ Mov(temp2, value.W());
3123 GetAssembler()->PoisonHeapReference(temp2);
3124 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003125 }
3126
3127 if (!index.IsConstant()) {
3128 __ Add(temp, array, offset);
Vladimir Markod1ef8732017-04-18 13:55:13 +01003129 } else {
3130 // We no longer need the `temp` here so release it as the store below may
3131 // need a scratch register (if the constant index makes the offset too large)
3132 // and the poisoned `source` could be using the other scratch register.
3133 temps.Release(temp);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003134 }
Artem Serov914d7a82017-02-07 14:33:49 +00003135 {
3136 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
3137 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3138 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003139
Artem Serov914d7a82017-02-07 14:33:49 +00003140 if (!may_need_runtime_call_for_type_check) {
3141 codegen_->MaybeRecordImplicitNullCheck(instruction);
3142 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003143 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003144 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01003145
3146 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
3147
3148 if (done.IsLinked()) {
3149 __ Bind(&done);
3150 }
3151
3152 if (slow_path != nullptr) {
3153 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01003154 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003155 }
3156}
3157
Alexandre Rames67555f72014-11-18 10:55:16 +00003158void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003159 RegisterSet caller_saves = RegisterSet::Empty();
3160 InvokeRuntimeCallingConvention calling_convention;
3161 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
3162 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
3163 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00003164 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00003165 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00003166}
3167
3168void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01003169 BoundsCheckSlowPathARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003170 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003171 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00003172 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
3173 __ B(slow_path->GetEntryLabel(), hs);
3174}
3175
Alexandre Rames67555f72014-11-18 10:55:16 +00003176void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
3177 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003178 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexandre Rames67555f72014-11-18 10:55:16 +00003179 locations->SetInAt(0, Location::RequiresRegister());
3180 if (check->HasUses()) {
3181 locations->SetOut(Location::SameAsFirstInput());
3182 }
3183}
3184
3185void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
3186 // We assume the class is not null.
Vladimir Marko174b2e22017-10-12 13:34:49 +01003187 SlowPathCodeARM64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(
Alexandre Rames67555f72014-11-18 10:55:16 +00003188 check->GetLoadClass(), check, check->GetDexPc(), true);
3189 codegen_->AddSlowPath(slow_path);
3190 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
3191}
3192
Roland Levillain1a653882016-03-18 18:05:57 +00003193static bool IsFloatingPointZeroConstant(HInstruction* inst) {
3194 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
3195 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
3196}
3197
3198void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
3199 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
3200 Location rhs_loc = instruction->GetLocations()->InAt(1);
3201 if (rhs_loc.IsConstant()) {
3202 // 0.0 is the only immediate that can be encoded directly in
3203 // an FCMP instruction.
3204 //
3205 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
3206 // specify that in a floating-point comparison, positive zero
3207 // and negative zero are considered equal, so we can use the
3208 // literal 0.0 for both cases here.
3209 //
3210 // Note however that some methods (Float.equal, Float.compare,
3211 // Float.compareTo, Double.equal, Double.compare,
3212 // Double.compareTo, Math.max, Math.min, StrictMath.max,
3213 // StrictMath.min) consider 0.0 to be (strictly) greater than
3214 // -0.0. So if we ever translate calls to these methods into a
3215 // HCompare instruction, we must handle the -0.0 case with
3216 // care here.
3217 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
3218 __ Fcmp(lhs_reg, 0.0);
3219 } else {
3220 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
3221 }
Roland Levillain7f63c522015-07-13 15:54:55 +00003222}
3223
Serban Constantinescu02164b32014-11-13 14:05:07 +00003224void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003225 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003226 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003227 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01003228 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003229 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003230 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003231 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003232 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003233 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003234 case DataType::Type::kInt32:
3235 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003236 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00003237 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00003238 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3239 break;
3240 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003241 case DataType::Type::kFloat32:
3242 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003243 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00003244 locations->SetInAt(1,
3245 IsFloatingPointZeroConstant(compare->InputAt(1))
3246 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
3247 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00003248 locations->SetOut(Location::RequiresRegister());
3249 break;
3250 }
3251 default:
3252 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3253 }
3254}
3255
3256void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003257 DataType::Type in_type = compare->InputAt(0)->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00003258
3259 // 0 if: left == right
3260 // 1 if: left > right
3261 // -1 if: left < right
3262 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003263 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003264 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003265 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003266 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003267 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003268 case DataType::Type::kInt32:
3269 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003270 Register result = OutputRegister(compare);
3271 Register left = InputRegisterAt(compare, 0);
3272 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003273 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08003274 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
3275 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00003276 break;
3277 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003278 case DataType::Type::kFloat32:
3279 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003280 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00003281 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003282 __ Cset(result, ne);
3283 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01003284 break;
3285 }
3286 default:
3287 LOG(FATAL) << "Unimplemented compare type " << in_type;
3288 }
3289}
3290
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003291void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003292 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00003293
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003294 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003295 locations->SetInAt(0, Location::RequiresFpuRegister());
3296 locations->SetInAt(1,
3297 IsFloatingPointZeroConstant(instruction->InputAt(1))
3298 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
3299 : Location::RequiresFpuRegister());
3300 } else {
3301 // Integer cases.
3302 locations->SetInAt(0, Location::RequiresRegister());
3303 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
3304 }
3305
David Brazdilb3e773e2016-01-26 11:28:37 +00003306 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00003307 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01003308 }
3309}
3310
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003311void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003312 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003313 return;
3314 }
3315
3316 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01003317 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00003318 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01003319
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003320 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00003321 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003322 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00003323 } else {
3324 // Integer cases.
3325 Register lhs = InputRegisterAt(instruction, 0);
3326 Operand rhs = InputOperandAt(instruction, 1);
3327 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003328 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00003329 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003330}
3331
3332#define FOR_EACH_CONDITION_INSTRUCTION(M) \
3333 M(Equal) \
3334 M(NotEqual) \
3335 M(LessThan) \
3336 M(LessThanOrEqual) \
3337 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07003338 M(GreaterThanOrEqual) \
3339 M(Below) \
3340 M(BelowOrEqual) \
3341 M(Above) \
3342 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01003343#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003344void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
3345void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01003346FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00003347#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01003348#undef FOR_EACH_CONDITION_INSTRUCTION
3349
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003350void InstructionCodeGeneratorARM64::GenerateIntDivForPower2Denom(HDiv* instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003351 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003352 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003353 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
3354
3355 Register out = OutputRegister(instruction);
3356 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01003357
3358 if (abs_imm == 2) {
3359 int bits = DataType::Size(instruction->GetResultType()) * kBitsPerByte;
3360 __ Add(out, dividend, Operand(dividend, LSR, bits - 1));
3361 } else {
3362 UseScratchRegisterScope temps(GetVIXLAssembler());
3363 Register temp = temps.AcquireSameSizeAs(out);
3364 __ Add(temp, dividend, abs_imm - 1);
3365 __ Cmp(dividend, 0);
3366 __ Csel(out, temp, dividend, lt);
3367 }
3368
Zheng Xuc6667102015-05-15 16:08:45 +08003369 int ctz_imm = CTZ(abs_imm);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003370 if (imm > 0) {
3371 __ Asr(out, out, ctz_imm);
Zheng Xuc6667102015-05-15 16:08:45 +08003372 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003373 __ Neg(out, Operand(out, ASR, ctz_imm));
Zheng Xuc6667102015-05-15 16:08:45 +08003374 }
3375}
3376
3377void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3378 DCHECK(instruction->IsDiv() || instruction->IsRem());
3379
3380 LocationSummary* locations = instruction->GetLocations();
3381 Location second = locations->InAt(1);
3382 DCHECK(second.IsConstant());
3383
3384 Register out = OutputRegister(instruction);
3385 Register dividend = InputRegisterAt(instruction, 0);
3386 int64_t imm = Int64FromConstant(second.GetConstant());
3387
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003388 DataType::Type type = instruction->GetResultType();
3389 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Zheng Xuc6667102015-05-15 16:08:45 +08003390
3391 int64_t magic;
3392 int shift;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003393 CalculateMagicAndShiftForDivRem(
3394 imm, type == DataType::Type::kInt64 /* is_long */, &magic, &shift);
Zheng Xuc6667102015-05-15 16:08:45 +08003395
3396 UseScratchRegisterScope temps(GetVIXLAssembler());
3397 Register temp = temps.AcquireSameSizeAs(out);
3398
3399 // temp = get_high(dividend * magic)
3400 __ Mov(temp, magic);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003401 if (type == DataType::Type::kInt64) {
Zheng Xuc6667102015-05-15 16:08:45 +08003402 __ Smulh(temp, dividend, temp);
3403 } else {
3404 __ Smull(temp.X(), dividend, temp);
3405 __ Lsr(temp.X(), temp.X(), 32);
3406 }
3407
3408 if (imm > 0 && magic < 0) {
3409 __ Add(temp, temp, dividend);
3410 } else if (imm < 0 && magic > 0) {
3411 __ Sub(temp, temp, dividend);
3412 }
3413
3414 if (shift != 0) {
3415 __ Asr(temp, temp, shift);
3416 }
3417
3418 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003419 __ Sub(out, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08003420 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003421 __ Sub(temp, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08003422 // TODO: Strength reduction for msub.
3423 Register temp_imm = temps.AcquireSameSizeAs(out);
3424 __ Mov(temp_imm, imm);
3425 __ Msub(out, temp, temp_imm, dividend);
3426 }
3427}
3428
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003429void InstructionCodeGeneratorARM64::GenerateIntDivForConstDenom(HDiv *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003430 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Zheng Xuc6667102015-05-15 16:08:45 +08003431
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003432 if (imm == 0) {
3433 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3434 return;
3435 }
Zheng Xuc6667102015-05-15 16:08:45 +08003436
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003437 if (IsPowerOfTwo(AbsOrMin(imm))) {
3438 GenerateIntDivForPower2Denom(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003439 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003440 // Cases imm == -1 or imm == 1 are handled by InstructionSimplifier.
3441 DCHECK(imm < -2 || imm > 2) << imm;
3442 GenerateDivRemWithAnyConstant(instruction);
3443 }
3444}
3445
3446void InstructionCodeGeneratorARM64::GenerateIntDiv(HDiv *instruction) {
3447 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
3448 << instruction->GetResultType();
3449
3450 if (instruction->GetLocations()->InAt(1).IsConstant()) {
3451 GenerateIntDivForConstDenom(instruction);
3452 } else {
3453 Register out = OutputRegister(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003454 Register dividend = InputRegisterAt(instruction, 0);
3455 Register divisor = InputRegisterAt(instruction, 1);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003456 __ Sdiv(out, dividend, divisor);
Zheng Xuc6667102015-05-15 16:08:45 +08003457 }
3458}
3459
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003460void LocationsBuilderARM64::VisitDiv(HDiv* div) {
3461 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003462 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003463 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003464 case DataType::Type::kInt32:
3465 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003466 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08003467 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003468 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3469 break;
3470
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003471 case DataType::Type::kFloat32:
3472 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003473 locations->SetInAt(0, Location::RequiresFpuRegister());
3474 locations->SetInAt(1, Location::RequiresFpuRegister());
3475 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3476 break;
3477
3478 default:
3479 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3480 }
3481}
3482
3483void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003484 DataType::Type type = div->GetResultType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003485 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003486 case DataType::Type::kInt32:
3487 case DataType::Type::kInt64:
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003488 GenerateIntDiv(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003489 break;
3490
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003491 case DataType::Type::kFloat32:
3492 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003493 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3494 break;
3495
3496 default:
3497 LOG(FATAL) << "Unexpected div type " << type;
3498 }
3499}
3500
Alexandre Rames67555f72014-11-18 10:55:16 +00003501void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003502 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003503 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003504}
3505
3506void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3507 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003508 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003509 codegen_->AddSlowPath(slow_path);
3510 Location value = instruction->GetLocations()->InAt(0);
3511
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003512 DataType::Type type = instruction->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +00003513
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003514 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003515 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00003516 return;
3517 }
3518
Alexandre Rames67555f72014-11-18 10:55:16 +00003519 if (value.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003520 int64_t divisor = Int64FromLocation(value);
Alexandre Rames67555f72014-11-18 10:55:16 +00003521 if (divisor == 0) {
3522 __ B(slow_path->GetEntryLabel());
3523 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003524 // A division by a non-null constant is valid. We don't need to perform
3525 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003526 }
3527 } else {
3528 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3529 }
3530}
3531
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003532void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3533 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003534 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003535 locations->SetOut(Location::ConstantLocation(constant));
3536}
3537
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003538void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3539 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003540 // Will be generated at use site.
3541}
3542
Alexandre Rames5319def2014-10-23 10:03:10 +01003543void LocationsBuilderARM64::VisitExit(HExit* exit) {
3544 exit->SetLocations(nullptr);
3545}
3546
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003547void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003548}
3549
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003550void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3551 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003552 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003553 locations->SetOut(Location::ConstantLocation(constant));
3554}
3555
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003556void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003557 // Will be generated at use site.
3558}
3559
David Brazdilfc6a86a2015-06-26 10:33:45 +00003560void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003561 if (successor->IsExitBlock()) {
3562 DCHECK(got->GetPrevious()->AlwaysThrows());
3563 return; // no code needed
3564 }
3565
Serban Constantinescu02164b32014-11-13 14:05:07 +00003566 HBasicBlock* block = got->GetBlock();
3567 HInstruction* previous = got->GetPrevious();
3568 HLoopInformation* info = block->GetLoopInformation();
3569
David Brazdil46e2a392015-03-16 17:31:52 +00003570 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00003571 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
3572 UseScratchRegisterScope temps(GetVIXLAssembler());
3573 Register temp1 = temps.AcquireX();
3574 Register temp2 = temps.AcquireX();
3575 __ Ldr(temp1, MemOperand(sp, 0));
3576 __ Ldrh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3577 __ Add(temp2, temp2, 1);
3578 __ Strh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3579 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003580 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3581 return;
3582 }
3583 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3584 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01003585 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003586 }
3587 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003588 __ B(codegen_->GetLabelOf(successor));
3589 }
3590}
3591
David Brazdilfc6a86a2015-06-26 10:33:45 +00003592void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3593 got->SetLocations(nullptr);
3594}
3595
3596void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3597 HandleGoto(got, got->GetSuccessor());
3598}
3599
3600void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3601 try_boundary->SetLocations(nullptr);
3602}
3603
3604void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3605 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3606 if (!successor->IsExitBlock()) {
3607 HandleGoto(try_boundary, successor);
3608 }
3609}
3610
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003611void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003612 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003613 vixl::aarch64::Label* true_target,
3614 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003615 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003616
David Brazdil0debae72015-11-12 18:37:00 +00003617 if (true_target == nullptr && false_target == nullptr) {
3618 // Nothing to do. The code always falls through.
3619 return;
3620 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003621 // Constant condition, statically compared against "true" (integer value 1).
3622 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003623 if (true_target != nullptr) {
3624 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003625 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003626 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003627 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003628 if (false_target != nullptr) {
3629 __ B(false_target);
3630 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003631 }
David Brazdil0debae72015-11-12 18:37:00 +00003632 return;
3633 }
3634
3635 // The following code generates these patterns:
3636 // (1) true_target == nullptr && false_target != nullptr
3637 // - opposite condition true => branch to false_target
3638 // (2) true_target != nullptr && false_target == nullptr
3639 // - condition true => branch to true_target
3640 // (3) true_target != nullptr && false_target != nullptr
3641 // - condition true => branch to true_target
3642 // - branch to false_target
3643 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003644 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003645 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003646 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003647 if (true_target == nullptr) {
3648 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3649 } else {
3650 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3651 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003652 } else {
3653 // The condition instruction has not been materialized, use its inputs as
3654 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003655 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003656
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003657 DataType::Type type = condition->InputAt(0)->GetType();
3658 if (DataType::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003659 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003660 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003661 IfCondition opposite_condition = condition->GetOppositeCondition();
3662 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003663 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003664 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003665 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003666 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003667 // Integer cases.
3668 Register lhs = InputRegisterAt(condition, 0);
3669 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003670
3671 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003672 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003673 if (true_target == nullptr) {
3674 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3675 non_fallthrough_target = false_target;
3676 } else {
3677 arm64_cond = ARM64Condition(condition->GetCondition());
3678 non_fallthrough_target = true_target;
3679 }
3680
Aart Bik086d27e2016-01-20 17:02:00 -08003681 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003682 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003683 switch (arm64_cond) {
3684 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003685 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003686 break;
3687 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003688 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003689 break;
3690 case lt:
3691 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003692 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003693 break;
3694 case ge:
3695 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003696 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003697 break;
3698 default:
3699 // Without the `static_cast` the compiler throws an error for
3700 // `-Werror=sign-promo`.
3701 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3702 }
3703 } else {
3704 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003705 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003706 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003707 }
3708 }
David Brazdil0debae72015-11-12 18:37:00 +00003709
3710 // If neither branch falls through (case 3), the conditional branch to `true_target`
3711 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3712 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003713 __ B(false_target);
3714 }
3715}
3716
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003717void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003718 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003719 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003720 locations->SetInAt(0, Location::RequiresRegister());
3721 }
3722}
3723
3724void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003725 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3726 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003727 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3728 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3729 true_target = nullptr;
3730 }
3731 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3732 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3733 false_target = nullptr;
3734 }
David Brazdil0debae72015-11-12 18:37:00 +00003735 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003736}
3737
3738void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003739 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003740 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01003741 InvokeRuntimeCallingConvention calling_convention;
3742 RegisterSet caller_saves = RegisterSet::Empty();
3743 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
3744 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00003745 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003746 locations->SetInAt(0, Location::RequiresRegister());
3747 }
3748}
3749
3750void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003751 SlowPathCodeARM64* slow_path =
3752 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003753 GenerateTestAndBranch(deoptimize,
3754 /* condition_input_index */ 0,
3755 slow_path->GetEntryLabel(),
3756 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003757}
3758
Mingyao Yang063fc772016-08-02 11:02:54 -07003759void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003760 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07003761 LocationSummary(flag, LocationSummary::kNoCall);
3762 locations->SetOut(Location::RequiresRegister());
3763}
3764
3765void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3766 __ Ldr(OutputRegister(flag),
3767 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3768}
3769
David Brazdilc0b601b2016-02-08 14:20:45 +00003770static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3771 return condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003772 DataType::IsFloatingPointType(condition->InputAt(0)->GetType());
David Brazdilc0b601b2016-02-08 14:20:45 +00003773}
3774
Alexandre Rames880f1192016-06-13 16:04:50 +01003775static inline Condition GetConditionForSelect(HCondition* condition) {
3776 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003777 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3778 : ARM64Condition(cond);
3779}
3780
David Brazdil74eb1b22015-12-14 11:44:01 +00003781void LocationsBuilderARM64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003782 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003783 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003784 locations->SetInAt(0, Location::RequiresFpuRegister());
3785 locations->SetInAt(1, Location::RequiresFpuRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08003786 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames880f1192016-06-13 16:04:50 +01003787 } else {
3788 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3789 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3790 bool is_true_value_constant = cst_true_value != nullptr;
3791 bool is_false_value_constant = cst_false_value != nullptr;
3792 // Ask VIXL whether we should synthesize constants in registers.
3793 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3794 Operand true_op = is_true_value_constant ?
3795 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3796 Operand false_op = is_false_value_constant ?
3797 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3798 bool true_value_in_register = false;
3799 bool false_value_in_register = false;
3800 MacroAssembler::GetCselSynthesisInformation(
3801 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3802 true_value_in_register |= !is_true_value_constant;
3803 false_value_in_register |= !is_false_value_constant;
3804
3805 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3806 : Location::ConstantLocation(cst_true_value));
3807 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3808 : Location::ConstantLocation(cst_false_value));
Donghui Bai426b49c2016-11-08 14:55:38 +08003809 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
David Brazdil74eb1b22015-12-14 11:44:01 +00003810 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003811
David Brazdil74eb1b22015-12-14 11:44:01 +00003812 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3813 locations->SetInAt(2, Location::RequiresRegister());
3814 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003815}
3816
3817void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003818 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003819 Condition csel_cond;
3820
3821 if (IsBooleanValueOrMaterializedCondition(cond)) {
3822 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003823 // Use the condition flags set by the previous instruction.
3824 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003825 } else {
3826 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003827 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003828 }
3829 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003830 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003831 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003832 } else {
3833 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003834 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003835 }
3836
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003837 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003838 __ Fcsel(OutputFPRegister(select),
3839 InputFPRegisterAt(select, 1),
3840 InputFPRegisterAt(select, 0),
3841 csel_cond);
3842 } else {
3843 __ Csel(OutputRegister(select),
3844 InputOperandAt(select, 1),
3845 InputOperandAt(select, 0),
3846 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003847 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003848}
3849
David Srbecky0cf44932015-12-09 14:09:59 +00003850void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003851 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00003852}
3853
David Srbeckyd28f4a02016-03-14 17:14:24 +00003854void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3855 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003856}
3857
3858void CodeGeneratorARM64::GenerateNop() {
3859 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003860}
3861
Alexandre Rames5319def2014-10-23 10:03:10 +01003862void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00003863 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003864}
3865
3866void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003867 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003868}
3869
3870void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003871 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003872}
3873
3874void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003875 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003876}
3877
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003878// Temp is used for read barrier.
3879static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3880 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003881 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003882 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3883 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3884 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3885 return 1;
3886 }
3887 return 0;
3888}
3889
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003890// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003891// interface pointer, one for loading the current interface.
3892// The other checks have one temp for loading the object's class.
3893static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3894 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3895 return 3;
3896 }
3897 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003898}
3899
Alexandre Rames67555f72014-11-18 10:55:16 +00003900void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003901 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003902 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003903 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003904 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003905 case TypeCheckKind::kExactCheck:
3906 case TypeCheckKind::kAbstractClassCheck:
3907 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00003908 case TypeCheckKind::kArrayObjectCheck: {
3909 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
3910 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
3911 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003912 break;
Vladimir Marko87584542017-12-12 17:47:52 +00003913 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003914 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003915 case TypeCheckKind::kUnresolvedCheck:
3916 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003917 call_kind = LocationSummary::kCallOnSlowPath;
3918 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00003919 case TypeCheckKind::kBitstringCheck:
3920 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003921 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003922
Vladimir Markoca6fff82017-10-03 14:49:14 +01003923 LocationSummary* locations =
3924 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003925 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003926 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003927 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003928 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00003929 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
3930 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
3931 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
3932 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
3933 } else {
3934 locations->SetInAt(1, Location::RequiresRegister());
3935 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003936 // The "out" register is used as a temporary, so it overlaps with the inputs.
3937 // Note that TypeCheckSlowPathARM64 uses this register too.
3938 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003939 // Add temps if necessary for read barriers.
3940 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003941}
3942
3943void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003944 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003945 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003946 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003947 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00003948 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
3949 ? Register()
3950 : InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003951 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003952 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003953 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3954 DCHECK_LE(num_temps, 1u);
3955 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003956 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3957 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3958 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3959 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003960
Scott Wakeling97c72b72016-06-24 16:19:36 +01003961 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003962 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003963
3964 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003965 // Avoid null check if we know `obj` is not null.
3966 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003967 __ Cbz(obj, &zero);
3968 }
3969
Roland Levillain44015862016-01-22 11:47:17 +00003970 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003971 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003972 ReadBarrierOption read_barrier_option =
3973 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003974 // /* HeapReference<Class> */ out = obj->klass_
3975 GenerateReferenceLoadTwoRegisters(instruction,
3976 out_loc,
3977 obj_loc,
3978 class_offset,
3979 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003980 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003981 __ Cmp(out, cls);
3982 __ Cset(out, eq);
3983 if (zero.IsLinked()) {
3984 __ B(&done);
3985 }
3986 break;
3987 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003988
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003989 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003990 ReadBarrierOption read_barrier_option =
3991 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003992 // /* HeapReference<Class> */ out = obj->klass_
3993 GenerateReferenceLoadTwoRegisters(instruction,
3994 out_loc,
3995 obj_loc,
3996 class_offset,
3997 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003998 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003999 // If the class is abstract, we eagerly fetch the super class of the
4000 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004001 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004002 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004003 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004004 GenerateReferenceLoadOneRegister(instruction,
4005 out_loc,
4006 super_offset,
4007 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004008 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004009 // If `out` is null, we use it for the result, and jump to `done`.
4010 __ Cbz(out, &done);
4011 __ Cmp(out, cls);
4012 __ B(ne, &loop);
4013 __ Mov(out, 1);
4014 if (zero.IsLinked()) {
4015 __ B(&done);
4016 }
4017 break;
4018 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004019
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004020 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00004021 ReadBarrierOption read_barrier_option =
4022 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08004023 // /* HeapReference<Class> */ out = obj->klass_
4024 GenerateReferenceLoadTwoRegisters(instruction,
4025 out_loc,
4026 obj_loc,
4027 class_offset,
4028 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004029 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004030 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004031 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004032 __ Bind(&loop);
4033 __ Cmp(out, cls);
4034 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004035 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004036 GenerateReferenceLoadOneRegister(instruction,
4037 out_loc,
4038 super_offset,
4039 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004040 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004041 __ Cbnz(out, &loop);
4042 // If `out` is null, we use it for the result, and jump to `done`.
4043 __ B(&done);
4044 __ Bind(&success);
4045 __ Mov(out, 1);
4046 if (zero.IsLinked()) {
4047 __ B(&done);
4048 }
4049 break;
4050 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004051
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004052 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00004053 ReadBarrierOption read_barrier_option =
4054 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08004055 // /* HeapReference<Class> */ out = obj->klass_
4056 GenerateReferenceLoadTwoRegisters(instruction,
4057 out_loc,
4058 obj_loc,
4059 class_offset,
4060 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004061 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004062 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004063 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004064 __ Cmp(out, cls);
4065 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004066 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004067 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004068 GenerateReferenceLoadOneRegister(instruction,
4069 out_loc,
4070 component_offset,
4071 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00004072 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004073 // If `out` is null, we use it for the result, and jump to `done`.
4074 __ Cbz(out, &done);
4075 __ Ldrh(out, HeapOperand(out, primitive_offset));
4076 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
4077 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004078 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004079 __ Mov(out, 1);
4080 __ B(&done);
4081 break;
4082 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004083
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004084 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08004085 // No read barrier since the slow path will retry upon failure.
4086 // /* HeapReference<Class> */ out = obj->klass_
4087 GenerateReferenceLoadTwoRegisters(instruction,
4088 out_loc,
4089 obj_loc,
4090 class_offset,
4091 maybe_temp_loc,
4092 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004093 __ Cmp(out, cls);
4094 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01004095 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4096 instruction, /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004097 codegen_->AddSlowPath(slow_path);
4098 __ B(ne, slow_path->GetEntryLabel());
4099 __ Mov(out, 1);
4100 if (zero.IsLinked()) {
4101 __ B(&done);
4102 }
4103 break;
4104 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004105
Calin Juravle98893e12015-10-02 21:05:03 +01004106 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004107 case TypeCheckKind::kInterfaceCheck: {
4108 // Note that we indeed only call on slow path, but we always go
4109 // into the slow path for the unresolved and interface check
4110 // cases.
4111 //
4112 // We cannot directly call the InstanceofNonTrivial runtime
4113 // entry point without resorting to a type checking slow path
4114 // here (i.e. by calling InvokeRuntime directly), as it would
4115 // require to assign fixed registers for the inputs of this
4116 // HInstanceOf instruction (following the runtime calling
4117 // convention), which might be cluttered by the potential first
4118 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00004119 //
4120 // TODO: Introduce a new runtime entry point taking the object
4121 // to test (instead of its class) as argument, and let it deal
4122 // with the read barrier issues. This will let us refactor this
4123 // case of the `switch` code as it was previously (with a direct
4124 // call to the runtime not using a type checking slow path).
4125 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004126 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01004127 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4128 instruction, /* is_fatal */ false);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004129 codegen_->AddSlowPath(slow_path);
4130 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004131 if (zero.IsLinked()) {
4132 __ B(&done);
4133 }
4134 break;
4135 }
Vladimir Marko175e7862018-03-27 09:03:13 +00004136
4137 case TypeCheckKind::kBitstringCheck: {
4138 // /* HeapReference<Class> */ temp = obj->klass_
4139 GenerateReferenceLoadTwoRegisters(instruction,
4140 out_loc,
4141 obj_loc,
4142 class_offset,
4143 maybe_temp_loc,
4144 kWithoutReadBarrier);
4145
4146 GenerateBitstringTypeCheckCompare(instruction, out);
4147 __ Cset(out, eq);
4148 if (zero.IsLinked()) {
4149 __ B(&done);
4150 }
4151 break;
4152 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004153 }
4154
4155 if (zero.IsLinked()) {
4156 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01004157 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004158 }
4159
4160 if (done.IsLinked()) {
4161 __ Bind(&done);
4162 }
4163
4164 if (slow_path != nullptr) {
4165 __ Bind(slow_path->GetExitLabel());
4166 }
4167}
4168
4169void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004170 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00004171 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004172 LocationSummary* locations =
4173 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004174 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00004175 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
4176 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
4177 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
4178 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
4179 } else {
4180 locations->SetInAt(1, Location::RequiresRegister());
4181 }
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004182 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
4183 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004184}
4185
4186void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00004187 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004188 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004189 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004190 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00004191 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
4192 ? Register()
4193 : InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004194 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
4195 DCHECK_GE(num_temps, 1u);
4196 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004197 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004198 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
4199 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004200 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004201 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4202 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4203 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
4204 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
4205 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
4206 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
4207 const uint32_t object_array_data_offset =
4208 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004209
Vladimir Marko87584542017-12-12 17:47:52 +00004210 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004211 SlowPathCodeARM64* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004212 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4213 instruction, is_type_check_slow_path_fatal);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004214 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004215
Scott Wakeling97c72b72016-06-24 16:19:36 +01004216 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004217 // Avoid null check if we know obj is not null.
4218 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01004219 __ Cbz(obj, &done);
4220 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004221
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004222 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004223 case TypeCheckKind::kExactCheck:
4224 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004225 // /* HeapReference<Class> */ temp = obj->klass_
4226 GenerateReferenceLoadTwoRegisters(instruction,
4227 temp_loc,
4228 obj_loc,
4229 class_offset,
4230 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004231 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004232
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004233 __ Cmp(temp, cls);
4234 // Jump to slow path for throwing the exception or doing a
4235 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004236 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004237 break;
4238 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004239
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004240 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004241 // /* HeapReference<Class> */ temp = obj->klass_
4242 GenerateReferenceLoadTwoRegisters(instruction,
4243 temp_loc,
4244 obj_loc,
4245 class_offset,
4246 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004247 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004248
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004249 // If the class is abstract, we eagerly fetch the super class of the
4250 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004251 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004252 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004253 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004254 GenerateReferenceLoadOneRegister(instruction,
4255 temp_loc,
4256 super_offset,
4257 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004258 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004259
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004260 // If the class reference currently in `temp` is null, jump to the slow path to throw the
4261 // exception.
4262 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
4263 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004264 __ Cmp(temp, cls);
4265 __ B(ne, &loop);
4266 break;
4267 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004268
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004269 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004270 // /* HeapReference<Class> */ temp = obj->klass_
4271 GenerateReferenceLoadTwoRegisters(instruction,
4272 temp_loc,
4273 obj_loc,
4274 class_offset,
4275 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004276 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004277
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004278 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004279 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004280 __ Bind(&loop);
4281 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004282 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004283
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004284 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004285 GenerateReferenceLoadOneRegister(instruction,
4286 temp_loc,
4287 super_offset,
4288 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004289 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004290
4291 // If the class reference currently in `temp` is not null, jump
4292 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004293 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004294 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004295 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004296 break;
4297 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004298
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004299 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004300 // /* HeapReference<Class> */ temp = obj->klass_
4301 GenerateReferenceLoadTwoRegisters(instruction,
4302 temp_loc,
4303 obj_loc,
4304 class_offset,
4305 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004306 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004307
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004308 // Do an exact check.
4309 __ Cmp(temp, cls);
4310 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004311
4312 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004313 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004314 GenerateReferenceLoadOneRegister(instruction,
4315 temp_loc,
4316 component_offset,
4317 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004318 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004319
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004320 // If the component type is null, jump to the slow path to throw the exception.
4321 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
4322 // Otherwise, the object is indeed an array. Further check that this component type is not a
4323 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004324 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
4325 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004326 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004327 break;
4328 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004329
Calin Juravle98893e12015-10-02 21:05:03 +01004330 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004331 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004332 //
4333 // We cannot directly call the CheckCast runtime entry point
4334 // without resorting to a type checking slow path here (i.e. by
4335 // calling InvokeRuntime directly), as it would require to
4336 // assign fixed registers for the inputs of this HInstanceOf
4337 // instruction (following the runtime calling convention), which
4338 // might be cluttered by the potential first read barrier
4339 // emission at the beginning of this method.
4340 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004341 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004342 case TypeCheckKind::kInterfaceCheck: {
4343 // /* HeapReference<Class> */ temp = obj->klass_
4344 GenerateReferenceLoadTwoRegisters(instruction,
4345 temp_loc,
4346 obj_loc,
4347 class_offset,
4348 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004349 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004350
4351 // /* HeapReference<Class> */ temp = temp->iftable_
4352 GenerateReferenceLoadTwoRegisters(instruction,
4353 temp_loc,
4354 temp_loc,
4355 iftable_offset,
4356 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004357 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08004358 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004359 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08004360 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004361 vixl::aarch64::Label start_loop;
4362 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004363 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004364 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
4365 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004366 // Go to next interface.
4367 __ Add(temp, temp, 2 * kHeapReferenceSize);
4368 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004369 // Compare the classes and continue the loop if they do not match.
4370 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
4371 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004372 break;
4373 }
Vladimir Marko175e7862018-03-27 09:03:13 +00004374
4375 case TypeCheckKind::kBitstringCheck: {
4376 // /* HeapReference<Class> */ temp = obj->klass_
4377 GenerateReferenceLoadTwoRegisters(instruction,
4378 temp_loc,
4379 obj_loc,
4380 class_offset,
4381 maybe_temp2_loc,
4382 kWithoutReadBarrier);
4383
4384 GenerateBitstringTypeCheckCompare(instruction, temp);
4385 __ B(ne, type_check_slow_path->GetEntryLabel());
4386 break;
4387 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004388 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00004389 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004390
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004391 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004392}
4393
Alexandre Rames5319def2014-10-23 10:03:10 +01004394void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004395 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01004396 locations->SetOut(Location::ConstantLocation(constant));
4397}
4398
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004399void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004400 // Will be generated at use site.
4401}
4402
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004403void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004404 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004405 locations->SetOut(Location::ConstantLocation(constant));
4406}
4407
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004408void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004409 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004410}
4411
Calin Juravle175dc732015-08-25 15:42:32 +01004412void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4413 // The trampoline uses the same calling convention as dex calling conventions,
4414 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
4415 // the method_idx.
4416 HandleInvoke(invoke);
4417}
4418
4419void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4420 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004421 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Calin Juravle175dc732015-08-25 15:42:32 +01004422}
4423
Alexandre Rames5319def2014-10-23 10:03:10 +01004424void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01004425 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01004426 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01004427}
4428
Alexandre Rames67555f72014-11-18 10:55:16 +00004429void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4430 HandleInvoke(invoke);
4431}
4432
4433void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4434 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004435 LocationSummary* locations = invoke->GetLocations();
4436 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004437 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00004438 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004439 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00004440
4441 // The register ip1 is required to be used for the hidden argument in
4442 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01004443 MacroAssembler* masm = GetVIXLAssembler();
4444 UseScratchRegisterScope scratch_scope(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00004445 scratch_scope.Exclude(ip1);
4446 __ Mov(ip1, invoke->GetDexMethodIndex());
4447
Artem Serov914d7a82017-02-07 14:33:49 +00004448 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
Alexandre Rames67555f72014-11-18 10:55:16 +00004449 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004450 __ Ldr(temp.W(), StackOperandFrom(receiver));
Artem Serov914d7a82017-02-07 14:33:49 +00004451 {
4452 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4453 // /* HeapReference<Class> */ temp = temp->klass_
4454 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
4455 codegen_->MaybeRecordImplicitNullCheck(invoke);
4456 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004457 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00004458 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004459 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07004460 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Artem Serov914d7a82017-02-07 14:33:49 +00004461 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00004462 }
Artem Serov914d7a82017-02-07 14:33:49 +00004463
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004464 // Instead of simply (possibly) unpoisoning `temp` here, we should
4465 // emit a read barrier for the previous class reference load.
4466 // However this is not required in practice, as this is an
4467 // intermediate/temporary reference and because the current
4468 // concurrent copying collector keeps the from-space memory
4469 // intact/accessible until the end of the marking phase (the
4470 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01004471 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004472 __ Ldr(temp,
4473 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
4474 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004475 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00004476 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004477 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00004478 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07004479 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004480
4481 {
4482 // Ensure the pc position is recorded immediately after the `blr` instruction.
4483 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4484
4485 // lr();
4486 __ blr(lr);
4487 DCHECK(!codegen_->IsLeafMethod());
4488 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4489 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004490
4491 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004492}
4493
4494void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004495 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004496 if (intrinsic.TryDispatch(invoke)) {
4497 return;
4498 }
4499
Alexandre Rames67555f72014-11-18 10:55:16 +00004500 HandleInvoke(invoke);
4501}
4502
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00004503void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004504 // Explicit clinit checks triggered by static invokes must have been pruned by
4505 // art::PrepareForRegisterAllocation.
4506 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004507
Vladimir Markoca6fff82017-10-03 14:49:14 +01004508 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004509 if (intrinsic.TryDispatch(invoke)) {
4510 return;
4511 }
4512
Alexandre Rames67555f72014-11-18 10:55:16 +00004513 HandleInvoke(invoke);
4514}
4515
Andreas Gampe878d58c2015-01-15 23:24:00 -08004516static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
4517 if (invoke->GetLocations()->Intrinsified()) {
4518 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
4519 intrinsic.Dispatch(invoke);
4520 return true;
4521 }
4522 return false;
4523}
4524
Vladimir Markodc151b22015-10-15 18:02:30 +01004525HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
4526 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004527 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00004528 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01004529 return desired_dispatch_info;
4530}
4531
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004532void CodeGeneratorARM64::GenerateStaticOrDirectCall(
4533 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004534 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004535 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4536 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004537 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4538 uint32_t offset =
4539 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004540 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004541 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004542 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004543 }
Vladimir Marko58155012015-08-19 12:49:41 +00004544 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004545 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004546 break;
Vladimir Marko65979462017-05-19 17:25:12 +01004547 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
4548 DCHECK(GetCompilerOptions().IsBootImage());
4549 // Add ADRP with its PC-relative method patch.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004550 vixl::aarch64::Label* adrp_label = NewBootImageMethodPatch(invoke->GetTargetMethod());
Vladimir Marko65979462017-05-19 17:25:12 +01004551 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4552 // Add ADD with its PC-relative method patch.
4553 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004554 NewBootImageMethodPatch(invoke->GetTargetMethod(), adrp_label);
Vladimir Marko65979462017-05-19 17:25:12 +01004555 EmitAddPlaceholder(add_label, XRegisterFrom(temp), XRegisterFrom(temp));
4556 break;
4557 }
Vladimir Marko58155012015-08-19 12:49:41 +00004558 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
4559 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00004560 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00004561 break;
Vladimir Markob066d432018-01-03 13:14:37 +00004562 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
4563 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004564 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00004565 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_offset);
4566 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4567 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
4568 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_offset, adrp_label);
4569 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
4570 EmitLdrOffsetPlaceholder(ldr_label, WRegisterFrom(temp), XRegisterFrom(temp));
4571 break;
4572 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004573 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Markob066d432018-01-03 13:14:37 +00004574 // Add ADRP with its PC-relative .bss entry patch.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004575 MethodReference target_method(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
4576 vixl::aarch64::Label* adrp_label = NewMethodBssEntryPatch(target_method);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004577 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Markob066d432018-01-03 13:14:37 +00004578 // Add LDR with its PC-relative .bss entry patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004579 vixl::aarch64::Label* ldr_label =
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004580 NewMethodBssEntryPatch(target_method, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004581 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004582 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004583 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004584 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
4585 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
4586 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko58155012015-08-19 12:49:41 +00004587 }
4588 }
4589
4590 switch (invoke->GetCodePtrLocation()) {
4591 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004592 {
4593 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
4594 ExactAssemblyScope eas(GetVIXLAssembler(),
4595 kInstructionSize,
4596 CodeBufferCheckScope::kExactSize);
4597 __ bl(&frame_entry_label_);
4598 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
4599 }
Vladimir Marko58155012015-08-19 12:49:41 +00004600 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004601 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4602 // LR = callee_method->entry_point_from_quick_compiled_code_;
4603 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004604 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004605 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004606 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004607 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004608 ExactAssemblyScope eas(GetVIXLAssembler(),
4609 kInstructionSize,
4610 CodeBufferCheckScope::kExactSize);
4611 // lr()
4612 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004613 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004614 }
Vladimir Marko58155012015-08-19 12:49:41 +00004615 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004616 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004617
Andreas Gampe878d58c2015-01-15 23:24:00 -08004618 DCHECK(!IsLeafMethod());
4619}
4620
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004621void CodeGeneratorARM64::GenerateVirtualCall(
4622 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004623 // Use the calling convention instead of the location of the receiver, as
4624 // intrinsics may have put the receiver in a different register. In the intrinsics
4625 // slow path, the arguments have been moved to the right place, so here we are
4626 // guaranteed that the receiver is the first register of the calling convention.
4627 InvokeDexCallingConvention calling_convention;
4628 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004629 Register temp = XRegisterFrom(temp_in);
4630 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4631 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4632 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004633 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004634
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004635 DCHECK(receiver.IsRegister());
Artem Serov914d7a82017-02-07 14:33:49 +00004636
4637 {
4638 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4639 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4640 // /* HeapReference<Class> */ temp = receiver->klass_
4641 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
4642 MaybeRecordImplicitNullCheck(invoke);
4643 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004644 // Instead of simply (possibly) unpoisoning `temp` here, we should
4645 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004646 // intermediate/temporary reference and because the current
4647 // concurrent copying collector keeps the from-space memory
4648 // intact/accessible until the end of the marking phase (the
4649 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004650 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
4651 // temp = temp->GetMethodAt(method_offset);
4652 __ Ldr(temp, MemOperand(temp, method_offset));
4653 // lr = temp->GetEntryPoint();
4654 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
Artem Serov914d7a82017-02-07 14:33:49 +00004655 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004656 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004657 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4658 // lr();
4659 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004660 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004661 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004662}
4663
Orion Hodsonac141392017-01-13 11:53:47 +00004664void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4665 HandleInvoke(invoke);
4666}
4667
4668void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4669 codegen_->GenerateInvokePolymorphicCall(invoke);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004670 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Orion Hodsonac141392017-01-13 11:53:47 +00004671}
4672
Orion Hodson4c8e12e2018-05-18 08:33:20 +01004673void LocationsBuilderARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4674 HandleInvoke(invoke);
4675}
4676
4677void InstructionCodeGeneratorARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4678 codegen_->GenerateInvokeCustomCall(invoke);
4679 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
4680}
4681
Vladimir Marko6fd16062018-06-26 11:02:04 +01004682vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageIntrinsicPatch(
4683 uint32_t intrinsic_data,
4684 vixl::aarch64::Label* adrp_label) {
4685 return NewPcRelativePatch(
4686 /* dex_file */ nullptr, intrinsic_data, adrp_label, &boot_image_intrinsic_patches_);
4687}
4688
Vladimir Markob066d432018-01-03 13:14:37 +00004689vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageRelRoPatch(
4690 uint32_t boot_image_offset,
4691 vixl::aarch64::Label* adrp_label) {
4692 return NewPcRelativePatch(
4693 /* dex_file */ nullptr, boot_image_offset, adrp_label, &boot_image_method_patches_);
4694}
4695
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004696vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageMethodPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004697 MethodReference target_method,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004698 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004699 return NewPcRelativePatch(
4700 target_method.dex_file, target_method.index, adrp_label, &boot_image_method_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004701}
4702
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004703vixl::aarch64::Label* CodeGeneratorARM64::NewMethodBssEntryPatch(
4704 MethodReference target_method,
4705 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004706 return NewPcRelativePatch(
4707 target_method.dex_file, target_method.index, adrp_label, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004708}
4709
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004710vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageTypePatch(
Scott Wakeling97c72b72016-06-24 16:19:36 +01004711 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004712 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004713 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004714 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &boot_image_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004715}
4716
Vladimir Marko1998cd02017-01-13 13:02:58 +00004717vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4718 const DexFile& dex_file,
4719 dex::TypeIndex type_index,
4720 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004721 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004722}
4723
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004724vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageStringPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004725 const DexFile& dex_file,
4726 dex::StringIndex string_index,
4727 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004728 return NewPcRelativePatch(
4729 &dex_file, string_index.index_, adrp_label, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01004730}
4731
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004732vixl::aarch64::Label* CodeGeneratorARM64::NewStringBssEntryPatch(
4733 const DexFile& dex_file,
4734 dex::StringIndex string_index,
4735 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004736 return NewPcRelativePatch(&dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004737}
4738
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004739vixl::aarch64::Label* CodeGeneratorARM64::NewBakerReadBarrierPatch(uint32_t custom_data) {
4740 baker_read_barrier_patches_.emplace_back(custom_data);
4741 return &baker_read_barrier_patches_.back().label;
4742}
4743
Scott Wakeling97c72b72016-06-24 16:19:36 +01004744vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004745 const DexFile* dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004746 uint32_t offset_or_index,
4747 vixl::aarch64::Label* adrp_label,
4748 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004749 // Add a patch entry and return the label.
4750 patches->emplace_back(dex_file, offset_or_index);
4751 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004752 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004753 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4754 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4755 return label;
4756}
4757
Scott Wakeling97c72b72016-06-24 16:19:36 +01004758vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4759 uint64_t address) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004760 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004761}
4762
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004763vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004764 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004765 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004766 return jit_string_patches_.GetOrCreate(
4767 StringReference(&dex_file, string_index),
4768 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4769}
4770
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004771vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004772 const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004773 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004774 return jit_class_patches_.GetOrCreate(
4775 TypeReference(&dex_file, type_index),
4776 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4777}
4778
Vladimir Markoaad75c62016-10-03 08:46:48 +00004779void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4780 vixl::aarch64::Register reg) {
4781 DCHECK(reg.IsX());
4782 SingleEmissionCheckScope guard(GetVIXLAssembler());
4783 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004784 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004785}
4786
4787void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4788 vixl::aarch64::Register out,
4789 vixl::aarch64::Register base) {
4790 DCHECK(out.IsX());
4791 DCHECK(base.IsX());
4792 SingleEmissionCheckScope guard(GetVIXLAssembler());
4793 __ Bind(fixup_label);
4794 __ add(out, base, Operand(/* offset placeholder */ 0));
4795}
4796
4797void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4798 vixl::aarch64::Register out,
4799 vixl::aarch64::Register base) {
4800 DCHECK(base.IsX());
4801 SingleEmissionCheckScope guard(GetVIXLAssembler());
4802 __ Bind(fixup_label);
4803 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4804}
4805
Vladimir Markoeebb8212018-06-05 14:57:24 +01004806void CodeGeneratorARM64::LoadBootImageAddress(vixl::aarch64::Register reg,
Vladimir Marko6fd16062018-06-26 11:02:04 +01004807 uint32_t boot_image_reference) {
4808 if (GetCompilerOptions().IsBootImage()) {
4809 // Add ADRP with its PC-relative type patch.
4810 vixl::aarch64::Label* adrp_label = NewBootImageIntrinsicPatch(boot_image_reference);
4811 EmitAdrpPlaceholder(adrp_label, reg.X());
4812 // Add ADD with its PC-relative type patch.
4813 vixl::aarch64::Label* add_label = NewBootImageIntrinsicPatch(boot_image_reference, adrp_label);
4814 EmitAddPlaceholder(add_label, reg.X(), reg.X());
4815 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01004816 DCHECK(Runtime::Current()->IsAotCompiler());
4817 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004818 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004819 EmitAdrpPlaceholder(adrp_label, reg.X());
4820 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004821 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_reference, adrp_label);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004822 EmitLdrOffsetPlaceholder(ldr_label, reg.W(), reg.X());
4823 } else {
4824 gc::Heap* heap = Runtime::Current()->GetHeap();
4825 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004826 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01004827 __ Ldr(reg.W(), DeduplicateBootImageAddressLiteral(reinterpret_cast<uintptr_t>(address)));
4828 }
4829}
4830
Vladimir Marko6fd16062018-06-26 11:02:04 +01004831void CodeGeneratorARM64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
4832 uint32_t boot_image_offset) {
4833 DCHECK(invoke->IsStatic());
4834 InvokeRuntimeCallingConvention calling_convention;
4835 Register argument = calling_convention.GetRegisterAt(0);
4836 if (GetCompilerOptions().IsBootImage()) {
4837 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
4838 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
4839 MethodReference target_method = invoke->GetTargetMethod();
4840 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
4841 // Add ADRP with its PC-relative type patch.
4842 vixl::aarch64::Label* adrp_label = NewBootImageTypePatch(*target_method.dex_file, type_idx);
4843 EmitAdrpPlaceholder(adrp_label, argument.X());
4844 // Add ADD with its PC-relative type patch.
4845 vixl::aarch64::Label* add_label =
4846 NewBootImageTypePatch(*target_method.dex_file, type_idx, adrp_label);
4847 EmitAddPlaceholder(add_label, argument.X(), argument.X());
4848 } else {
4849 LoadBootImageAddress(argument, boot_image_offset);
4850 }
4851 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
4852 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4853}
4854
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004855template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00004856inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4857 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004858 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00004859 for (const PcRelativePatchInfo& info : infos) {
4860 linker_patches->push_back(Factory(info.label.GetLocation(),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004861 info.target_dex_file,
Vladimir Markoaad75c62016-10-03 08:46:48 +00004862 info.pc_insn_label->GetLocation(),
4863 info.offset_or_index));
4864 }
4865}
4866
Vladimir Marko6fd16062018-06-26 11:02:04 +01004867template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
4868linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
4869 const DexFile* target_dex_file,
4870 uint32_t pc_insn_offset,
4871 uint32_t boot_image_offset) {
4872 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
4873 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00004874}
4875
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004876void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00004877 DCHECK(linker_patches->empty());
4878 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004879 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004880 method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004881 boot_image_type_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004882 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004883 boot_image_string_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004884 string_bss_entry_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01004885 boot_image_intrinsic_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004886 baker_read_barrier_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004887 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01004888 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004889 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004890 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004891 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004892 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004893 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004894 boot_image_string_patches_, linker_patches);
Vladimir Marko6fd16062018-06-26 11:02:04 +01004895 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
4896 boot_image_intrinsic_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01004897 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +01004898 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
Vladimir Markob066d432018-01-03 13:14:37 +00004899 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004900 DCHECK(boot_image_type_patches_.empty());
4901 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004902 DCHECK(boot_image_intrinsic_patches_.empty());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004903 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004904 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
4905 method_bss_entry_patches_, linker_patches);
4906 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
4907 type_bss_entry_patches_, linker_patches);
4908 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
4909 string_bss_entry_patches_, linker_patches);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004910 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004911 linker_patches->push_back(linker::LinkerPatch::BakerReadBarrierBranchPatch(
4912 info.label.GetLocation(), info.custom_data));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004913 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004914 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004915}
4916
Vladimir Markoca1e0382018-04-11 09:58:41 +00004917bool CodeGeneratorARM64::NeedsThunkCode(const linker::LinkerPatch& patch) const {
4918 return patch.GetType() == linker::LinkerPatch::Type::kBakerReadBarrierBranch ||
4919 patch.GetType() == linker::LinkerPatch::Type::kCallRelative;
4920}
4921
4922void CodeGeneratorARM64::EmitThunkCode(const linker::LinkerPatch& patch,
4923 /*out*/ ArenaVector<uint8_t>* code,
4924 /*out*/ std::string* debug_name) {
4925 Arm64Assembler assembler(GetGraph()->GetAllocator());
4926 switch (patch.GetType()) {
4927 case linker::LinkerPatch::Type::kCallRelative: {
4928 // The thunk just uses the entry point in the ArtMethod. This works even for calls
4929 // to the generic JNI and interpreter trampolines.
4930 Offset offset(ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4931 kArm64PointerSize).Int32Value());
4932 assembler.JumpTo(ManagedRegister(arm64::X0), offset, ManagedRegister(arm64::IP0));
4933 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
4934 *debug_name = "MethodCallThunk";
4935 }
4936 break;
4937 }
4938 case linker::LinkerPatch::Type::kBakerReadBarrierBranch: {
4939 DCHECK_EQ(patch.GetBakerCustomValue2(), 0u);
4940 CompileBakerReadBarrierThunk(assembler, patch.GetBakerCustomValue1(), debug_name);
4941 break;
4942 }
4943 default:
4944 LOG(FATAL) << "Unexpected patch type " << patch.GetType();
4945 UNREACHABLE();
4946 }
4947
4948 // Ensure we emit the literal pool if any.
4949 assembler.FinalizeCode();
4950 code->resize(assembler.CodeSize());
4951 MemoryRegion code_region(code->data(), code->size());
4952 assembler.FinalizeInstructions(code_region);
4953}
4954
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004955vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value) {
4956 return uint32_literals_.GetOrCreate(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004957 value,
4958 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4959}
4960
Scott Wakeling97c72b72016-06-24 16:19:36 +01004961vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004962 return uint64_literals_.GetOrCreate(
4963 value,
4964 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004965}
4966
Andreas Gampe878d58c2015-01-15 23:24:00 -08004967void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004968 // Explicit clinit checks triggered by static invokes must have been pruned by
4969 // art::PrepareForRegisterAllocation.
4970 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004971
Andreas Gampe878d58c2015-01-15 23:24:00 -08004972 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004973 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004974 return;
4975 }
4976
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004977 {
4978 // Ensure that between the BLR (emitted by GenerateStaticOrDirectCall) and RecordPcInfo there
4979 // are no pools emitted.
4980 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4981 LocationSummary* locations = invoke->GetLocations();
4982 codegen_->GenerateStaticOrDirectCall(
4983 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
4984 }
4985
4986 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004987}
4988
4989void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004990 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004991 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004992 return;
4993 }
4994
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004995 {
4996 // Ensure that between the BLR (emitted by GenerateVirtualCall) and RecordPcInfo there
4997 // are no pools emitted.
4998 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4999 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
5000 DCHECK(!codegen_->IsLeafMethod());
5001 }
5002
5003 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005004}
5005
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005006HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
5007 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005008 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005009 case HLoadClass::LoadKind::kInvalid:
5010 LOG(FATAL) << "UNREACHABLE";
5011 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005012 case HLoadClass::LoadKind::kReferrersClass:
5013 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005014 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005015 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005016 case HLoadClass::LoadKind::kBssEntry:
5017 DCHECK(!Runtime::Current()->UseJitCompilation());
5018 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005019 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005020 DCHECK(Runtime::Current()->UseJitCompilation());
5021 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005022 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005023 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005024 break;
5025 }
5026 return desired_class_load_kind;
5027}
5028
Alexandre Rames67555f72014-11-18 10:55:16 +00005029void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005030 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005031 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005032 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00005033 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005034 cls,
5035 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00005036 LocationFrom(vixl::aarch64::x0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00005037 DCHECK(calling_convention.GetRegisterAt(0).Is(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005038 return;
5039 }
Vladimir Marko41559982017-01-06 14:04:23 +00005040 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005041
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005042 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5043 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005044 ? LocationSummary::kCallOnSlowPath
5045 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005046 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005047 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005048 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005049 }
5050
Vladimir Marko41559982017-01-06 14:04:23 +00005051 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005052 locations->SetInAt(0, Location::RequiresRegister());
5053 }
5054 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005055 if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
5056 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5057 // Rely on the type resolution or initialization and marking to save everything we need.
Vladimir Markoea4c1262017-02-06 19:59:33 +00005058 RegisterSet caller_saves = RegisterSet::Empty();
5059 InvokeRuntimeCallingConvention calling_convention;
5060 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
5061 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005062 RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference),
5063 DataType::Type::kReference).GetCode());
Vladimir Markoea4c1262017-02-06 19:59:33 +00005064 locations->SetCustomSlowPathCallerSaves(caller_saves);
5065 } else {
5066 // For non-Baker read barrier we have a temp-clobbering call.
5067 }
5068 }
Alexandre Rames67555f72014-11-18 10:55:16 +00005069}
5070
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005071// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5072// move.
5073void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005074 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005075 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00005076 codegen_->GenerateLoadClassRuntimeCall(cls);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005077 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Calin Juravle580b6092015-10-06 17:35:58 +01005078 return;
5079 }
Vladimir Marko41559982017-01-06 14:04:23 +00005080 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005081
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005082 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01005083 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00005084
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005085 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5086 ? kWithoutReadBarrier
5087 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005088 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00005089 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005090 case HLoadClass::LoadKind::kReferrersClass: {
5091 DCHECK(!cls->CanCallRuntime());
5092 DCHECK(!cls->MustGenerateClinitCheck());
5093 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5094 Register current_method = InputRegisterAt(cls, 0);
Vladimir Markoca1e0382018-04-11 09:58:41 +00005095 codegen_->GenerateGcRootFieldLoad(cls,
5096 out_loc,
5097 current_method,
5098 ArtMethod::DeclaringClassOffset().Int32Value(),
5099 /* fixup_label */ nullptr,
5100 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005101 break;
5102 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005103 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005104 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005105 // Add ADRP with its PC-relative type patch.
5106 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08005107 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005108 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005109 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005110 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005111 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005112 codegen_->NewBootImageTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005113 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005114 break;
5115 }
5116 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005117 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005118 uint32_t address = dchecked_integral_cast<uint32_t>(
5119 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
5120 DCHECK_NE(address, 0u);
5121 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005122 break;
5123 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005124 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005125 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005126 uint32_t boot_image_offset = codegen_->GetBootImageOffset(cls);
5127 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
5128 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005129 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005130 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005131 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005132 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005133 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005134 break;
5135 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005136 case HLoadClass::LoadKind::kBssEntry: {
5137 // Add ADRP with its PC-relative Class .bss entry patch.
5138 const DexFile& dex_file = cls->GetDexFile();
5139 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Markof3c52b42017-11-17 17:32:12 +00005140 vixl::aarch64::Register temp = XRegisterFrom(out_loc);
5141 vixl::aarch64::Label* adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
5142 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005143 // Add LDR with its PC-relative Class .bss entry patch.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005144 vixl::aarch64::Label* ldr_label =
Vladimir Markof3c52b42017-11-17 17:32:12 +00005145 codegen_->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005146 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoca1e0382018-04-11 09:58:41 +00005147 codegen_->GenerateGcRootFieldLoad(cls,
5148 out_loc,
5149 temp,
5150 /* offset placeholder */ 0u,
5151 ldr_label,
5152 read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005153 generate_null_check = true;
5154 break;
5155 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005156 case HLoadClass::LoadKind::kJitTableAddress: {
5157 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
5158 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005159 cls->GetClass()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00005160 codegen_->GenerateGcRootFieldLoad(cls,
5161 out_loc,
5162 out.X(),
5163 /* offset */ 0,
5164 /* fixup_label */ nullptr,
5165 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005166 break;
5167 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005168 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005169 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00005170 LOG(FATAL) << "UNREACHABLE";
5171 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005172 }
5173
Vladimir Markoea4c1262017-02-06 19:59:33 +00005174 bool do_clinit = cls->MustGenerateClinitCheck();
5175 if (generate_null_check || do_clinit) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005176 DCHECK(cls->CanCallRuntime());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005177 SlowPathCodeARM64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(
Vladimir Markof3c52b42017-11-17 17:32:12 +00005178 cls, cls, cls->GetDexPc(), do_clinit);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005179 codegen_->AddSlowPath(slow_path);
5180 if (generate_null_check) {
5181 __ Cbz(out, slow_path->GetEntryLabel());
5182 }
5183 if (cls->MustGenerateClinitCheck()) {
5184 GenerateClassInitializationCheck(slow_path, out);
5185 } else {
5186 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00005187 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005188 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005189 }
5190}
5191
Orion Hodsondbaa5c72018-05-10 08:22:46 +01005192void LocationsBuilderARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
5193 InvokeRuntimeCallingConvention calling_convention;
5194 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
5195 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
5196}
5197
5198void InstructionCodeGeneratorARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
5199 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
5200}
5201
Orion Hodson18259d72018-04-12 11:18:23 +01005202void LocationsBuilderARM64::VisitLoadMethodType(HLoadMethodType* load) {
5203 InvokeRuntimeCallingConvention calling_convention;
5204 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
5205 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
5206}
5207
5208void InstructionCodeGeneratorARM64::VisitLoadMethodType(HLoadMethodType* load) {
5209 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
5210}
5211
David Brazdilcb1c0552015-08-04 16:22:25 +01005212static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005213 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01005214}
5215
Alexandre Rames67555f72014-11-18 10:55:16 +00005216void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
5217 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005218 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexandre Rames67555f72014-11-18 10:55:16 +00005219 locations->SetOut(Location::RequiresRegister());
5220}
5221
5222void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005223 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
5224}
5225
5226void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005227 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01005228}
5229
5230void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5231 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00005232}
5233
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005234HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
5235 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005236 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005237 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005238 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00005239 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005240 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005241 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005242 case HLoadString::LoadKind::kJitTableAddress:
5243 DCHECK(Runtime::Current()->UseJitCompilation());
5244 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005245 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005246 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005247 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005248 }
5249 return desired_string_load_kind;
5250}
5251
Alexandre Rames67555f72014-11-18 10:55:16 +00005252void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005253 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01005254 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005255 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005256 InvokeRuntimeCallingConvention calling_convention;
5257 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
5258 } else {
5259 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005260 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5261 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005262 // Rely on the pResolveString and marking to save everything we need.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005263 RegisterSet caller_saves = RegisterSet::Empty();
5264 InvokeRuntimeCallingConvention calling_convention;
5265 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
5266 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005267 RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference),
5268 DataType::Type::kReference).GetCode());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005269 locations->SetCustomSlowPathCallerSaves(caller_saves);
5270 } else {
5271 // For non-Baker read barrier we have a temp-clobbering call.
5272 }
5273 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005274 }
Alexandre Rames67555f72014-11-18 10:55:16 +00005275}
5276
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005277// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5278// move.
5279void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00005280 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005281 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005282
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005283 switch (load->GetLoadKind()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005284 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005285 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005286 // Add ADRP with its PC-relative String patch.
5287 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005288 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005289 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005290 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005291 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005292 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005293 codegen_->NewBootImageStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005294 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005295 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005296 }
5297 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005298 uint32_t address = dchecked_integral_cast<uint32_t>(
5299 reinterpret_cast<uintptr_t>(load->GetString().Get()));
5300 DCHECK_NE(address, 0u);
5301 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005302 return;
5303 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005304 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005305 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005306 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
5307 uint32_t boot_image_offset = codegen_->GetBootImageOffset(load);
5308 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005309 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005310 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005311 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005312 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005313 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
5314 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005315 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005316 case HLoadString::LoadKind::kBssEntry: {
5317 // Add ADRP with its PC-relative String .bss entry patch.
5318 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005319 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00005320 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markof3c52b42017-11-17 17:32:12 +00005321 Register temp = XRegisterFrom(out_loc);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005322 vixl::aarch64::Label* adrp_label = codegen_->NewStringBssEntryPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005323 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005324 // Add LDR with its PC-relative String .bss entry patch.
Vladimir Markoaad75c62016-10-03 08:46:48 +00005325 vixl::aarch64::Label* ldr_label =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005326 codegen_->NewStringBssEntryPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005327 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoca1e0382018-04-11 09:58:41 +00005328 codegen_->GenerateGcRootFieldLoad(load,
5329 out_loc,
5330 temp,
5331 /* offset placeholder */ 0u,
5332 ldr_label,
5333 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005334 SlowPathCodeARM64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00005335 new (codegen_->GetScopedAllocator()) LoadStringSlowPathARM64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005336 codegen_->AddSlowPath(slow_path);
5337 __ Cbz(out.X(), slow_path->GetEntryLabel());
5338 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005339 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005340 return;
5341 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005342 case HLoadString::LoadKind::kJitTableAddress: {
5343 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005344 load->GetStringIndex(),
5345 load->GetString()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00005346 codegen_->GenerateGcRootFieldLoad(load,
5347 out_loc,
5348 out.X(),
5349 /* offset */ 0,
5350 /* fixup_label */ nullptr,
5351 kCompilerReadBarrierOption);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005352 return;
5353 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005354 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005355 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005356 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005357
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005358 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005359 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005360 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005361 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005362 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
5363 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005364 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005365}
5366
Alexandre Rames5319def2014-10-23 10:03:10 +01005367void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005368 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01005369 locations->SetOut(Location::ConstantLocation(constant));
5370}
5371
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005372void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005373 // Will be generated at use site.
5374}
5375
Alexandre Rames67555f72014-11-18 10:55:16 +00005376void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005377 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5378 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005379 InvokeRuntimeCallingConvention calling_convention;
5380 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5381}
5382
5383void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01005384 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005385 instruction,
5386 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005387 if (instruction->IsEnter()) {
5388 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5389 } else {
5390 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5391 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005392 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005393}
5394
Alexandre Rames42d641b2014-10-27 14:00:51 +00005395void LocationsBuilderARM64::VisitMul(HMul* mul) {
5396 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005397 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005398 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005399 case DataType::Type::kInt32:
5400 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005401 locations->SetInAt(0, Location::RequiresRegister());
5402 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005403 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005404 break;
5405
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005406 case DataType::Type::kFloat32:
5407 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005408 locations->SetInAt(0, Location::RequiresFpuRegister());
5409 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00005410 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005411 break;
5412
5413 default:
5414 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5415 }
5416}
5417
5418void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
5419 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005420 case DataType::Type::kInt32:
5421 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005422 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
5423 break;
5424
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005425 case DataType::Type::kFloat32:
5426 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005427 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00005428 break;
5429
5430 default:
5431 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5432 }
5433}
5434
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005435void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
5436 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005437 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005438 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005439 case DataType::Type::kInt32:
5440 case DataType::Type::kInt64:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00005441 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00005442 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005443 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005444
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005445 case DataType::Type::kFloat32:
5446 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005447 locations->SetInAt(0, Location::RequiresFpuRegister());
5448 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005449 break;
5450
5451 default:
5452 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5453 }
5454}
5455
5456void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
5457 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005458 case DataType::Type::kInt32:
5459 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005460 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
5461 break;
5462
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005463 case DataType::Type::kFloat32:
5464 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005465 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005466 break;
5467
5468 default:
5469 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5470 }
5471}
5472
5473void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005474 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5475 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005476 InvokeRuntimeCallingConvention calling_convention;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005477 locations->SetOut(LocationFrom(x0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005478 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5479 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005480}
5481
5482void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01005483 // Note: if heap poisoning is enabled, the entry point takes cares
5484 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00005485 QuickEntrypointEnum entrypoint =
5486 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
5487 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005488 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005489 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005490}
5491
Alexandre Rames5319def2014-10-23 10:03:10 +01005492void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005493 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5494 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01005495 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07005496 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005497 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexandre Rames5319def2014-10-23 10:03:10 +01005498}
5499
5500void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07005501 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
5502 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005503 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005504}
5505
5506void LocationsBuilderARM64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005507 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00005508 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005509 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01005510}
5511
5512void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00005513 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005514 case DataType::Type::kInt32:
5515 case DataType::Type::kInt64:
Roland Levillain55dcfb52014-10-24 18:09:09 +01005516 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01005517 break;
5518
5519 default:
5520 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5521 }
5522}
5523
David Brazdil66d126e2015-04-03 16:02:44 +01005524void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005525 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
David Brazdil66d126e2015-04-03 16:02:44 +01005526 locations->SetInAt(0, Location::RequiresRegister());
5527 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5528}
5529
5530void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005531 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01005532}
5533
Alexandre Rames5319def2014-10-23 10:03:10 +01005534void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005535 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5536 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01005537}
5538
Calin Juravle2ae48182016-03-16 14:05:09 +00005539void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5540 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005541 return;
5542 }
Artem Serov914d7a82017-02-07 14:33:49 +00005543 {
5544 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
5545 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5546 Location obj = instruction->GetLocations()->InAt(0);
5547 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
5548 RecordPcInfo(instruction, instruction->GetDexPc());
5549 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005550}
5551
Calin Juravle2ae48182016-03-16 14:05:09 +00005552void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005553 SlowPathCodeARM64* slow_path = new (GetScopedAllocator()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005554 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01005555
5556 LocationSummary* locations = instruction->GetLocations();
5557 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005558
5559 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01005560}
5561
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005562void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005563 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005564}
5565
Alexandre Rames67555f72014-11-18 10:55:16 +00005566void LocationsBuilderARM64::VisitOr(HOr* instruction) {
5567 HandleBinaryOp(instruction);
5568}
5569
5570void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
5571 HandleBinaryOp(instruction);
5572}
5573
Alexandre Rames3e69f162014-12-10 10:36:50 +00005574void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5575 LOG(FATAL) << "Unreachable";
5576}
5577
5578void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005579 if (instruction->GetNext()->IsSuspendCheck() &&
5580 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5581 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5582 // The back edge will generate the suspend check.
5583 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5584 }
5585
Alexandre Rames3e69f162014-12-10 10:36:50 +00005586 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5587}
5588
Alexandre Rames5319def2014-10-23 10:03:10 +01005589void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005590 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005591 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5592 if (location.IsStackSlot()) {
5593 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5594 } else if (location.IsDoubleStackSlot()) {
5595 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5596 }
5597 locations->SetOut(location);
5598}
5599
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005600void InstructionCodeGeneratorARM64::VisitParameterValue(
5601 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005602 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005603}
5604
5605void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
5606 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005607 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01005608 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005609}
5610
5611void InstructionCodeGeneratorARM64::VisitCurrentMethod(
5612 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5613 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01005614}
5615
5616void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005617 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005618 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005619 locations->SetInAt(i, Location::Any());
5620 }
5621 locations->SetOut(Location::Any());
5622}
5623
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005624void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005625 LOG(FATAL) << "Unreachable";
5626}
5627
Serban Constantinescu02164b32014-11-13 14:05:07 +00005628void LocationsBuilderARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005629 DataType::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00005630 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005631 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005632 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005633 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005634
5635 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005636 case DataType::Type::kInt32:
5637 case DataType::Type::kInt64:
Serban Constantinescu02164b32014-11-13 14:05:07 +00005638 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08005639 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00005640 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5641 break;
5642
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005643 case DataType::Type::kFloat32:
5644 case DataType::Type::kFloat64: {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005645 InvokeRuntimeCallingConvention calling_convention;
5646 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
5647 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
5648 locations->SetOut(calling_convention.GetReturnLocation(type));
5649
5650 break;
5651 }
5652
Serban Constantinescu02164b32014-11-13 14:05:07 +00005653 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005654 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00005655 }
5656}
5657
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005658void InstructionCodeGeneratorARM64::GenerateIntRemForPower2Denom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005659 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005660 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
5661 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
5662
5663 Register out = OutputRegister(instruction);
5664 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005665
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005666 if (abs_imm == 2) {
5667 __ Cmp(dividend, 0);
5668 __ And(out, dividend, 1);
5669 __ Csneg(out, out, out, ge);
5670 } else {
5671 UseScratchRegisterScope temps(GetVIXLAssembler());
5672 Register temp = temps.AcquireSameSizeAs(out);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005673
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005674 __ Negs(temp, dividend);
5675 __ And(out, dividend, abs_imm - 1);
5676 __ And(temp, temp, abs_imm - 1);
5677 __ Csneg(out, out, temp, mi);
5678 }
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005679}
5680
5681void InstructionCodeGeneratorARM64::GenerateIntRemForOneOrMinusOneDenom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005682 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005683 DCHECK(imm == 1 || imm == -1) << imm;
5684
5685 Register out = OutputRegister(instruction);
5686 __ Mov(out, 0);
5687}
5688
5689void InstructionCodeGeneratorARM64::GenerateIntRemForConstDenom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005690 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005691
5692 if (imm == 0) {
5693 // Do not generate anything.
5694 // DivZeroCheck would prevent any code to be executed.
5695 return;
5696 }
5697
5698 if (imm == 1 || imm == -1) {
5699 // TODO: These cases need to be optimized in InstructionSimplifier
5700 GenerateIntRemForOneOrMinusOneDenom(instruction);
5701 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
5702 GenerateIntRemForPower2Denom(instruction);
5703 } else {
5704 DCHECK(imm < -2 || imm > 2) << imm;
5705 GenerateDivRemWithAnyConstant(instruction);
5706 }
5707}
5708
5709void InstructionCodeGeneratorARM64::GenerateIntRem(HRem* instruction) {
5710 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
5711 << instruction->GetResultType();
5712
5713 if (instruction->GetLocations()->InAt(1).IsConstant()) {
5714 GenerateIntRemForConstDenom(instruction);
5715 } else {
5716 Register out = OutputRegister(instruction);
5717 Register dividend = InputRegisterAt(instruction, 0);
5718 Register divisor = InputRegisterAt(instruction, 1);
5719 UseScratchRegisterScope temps(GetVIXLAssembler());
5720 Register temp = temps.AcquireSameSizeAs(out);
5721 __ Sdiv(temp, dividend, divisor);
5722 __ Msub(out, temp, divisor, dividend);
5723 }
5724}
5725
Serban Constantinescu02164b32014-11-13 14:05:07 +00005726void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005727 DataType::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005728
Serban Constantinescu02164b32014-11-13 14:05:07 +00005729 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005730 case DataType::Type::kInt32:
5731 case DataType::Type::kInt64: {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005732 GenerateIntRem(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005733 break;
5734 }
5735
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005736 case DataType::Type::kFloat32:
5737 case DataType::Type::kFloat64: {
5738 QuickEntrypointEnum entrypoint =
5739 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005740 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005741 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00005742 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5743 } else {
5744 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5745 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005746 break;
5747 }
5748
Serban Constantinescu02164b32014-11-13 14:05:07 +00005749 default:
5750 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005751 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00005752 }
5753}
5754
Aart Bik1f8d51b2018-02-15 10:42:37 -08005755void LocationsBuilderARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005756 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005757}
5758
Aart Bik1f8d51b2018-02-15 10:42:37 -08005759void InstructionCodeGeneratorARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005760 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005761}
5762
5763void LocationsBuilderARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005764 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005765}
5766
5767void InstructionCodeGeneratorARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005768 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005769}
5770
Aart Bik3dad3412018-02-28 12:01:46 -08005771void LocationsBuilderARM64::VisitAbs(HAbs* abs) {
5772 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
5773 switch (abs->GetResultType()) {
5774 case DataType::Type::kInt32:
5775 case DataType::Type::kInt64:
5776 locations->SetInAt(0, Location::RequiresRegister());
5777 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5778 break;
5779 case DataType::Type::kFloat32:
5780 case DataType::Type::kFloat64:
5781 locations->SetInAt(0, Location::RequiresFpuRegister());
5782 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5783 break;
5784 default:
5785 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5786 }
5787}
5788
5789void InstructionCodeGeneratorARM64::VisitAbs(HAbs* abs) {
5790 switch (abs->GetResultType()) {
5791 case DataType::Type::kInt32:
5792 case DataType::Type::kInt64: {
5793 Register in_reg = InputRegisterAt(abs, 0);
5794 Register out_reg = OutputRegister(abs);
5795 __ Cmp(in_reg, Operand(0));
5796 __ Cneg(out_reg, in_reg, lt);
5797 break;
5798 }
5799 case DataType::Type::kFloat32:
5800 case DataType::Type::kFloat64: {
5801 FPRegister in_reg = InputFPRegisterAt(abs, 0);
5802 FPRegister out_reg = OutputFPRegister(abs);
5803 __ Fabs(out_reg, in_reg);
5804 break;
5805 }
5806 default:
5807 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5808 }
5809}
5810
Igor Murashkind01745e2017-04-05 16:40:31 -07005811void LocationsBuilderARM64::VisitConstructorFence(HConstructorFence* constructor_fence) {
5812 constructor_fence->SetLocations(nullptr);
5813}
5814
5815void InstructionCodeGeneratorARM64::VisitConstructorFence(
5816 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
5817 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
5818}
5819
Calin Juravle27df7582015-04-17 19:12:31 +01005820void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5821 memory_barrier->SetLocations(nullptr);
5822}
5823
5824void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005825 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01005826}
5827
Alexandre Rames5319def2014-10-23 10:03:10 +01005828void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005829 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005830 DataType::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005831 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01005832}
5833
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005834void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005835 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005836}
5837
5838void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
5839 instruction->SetLocations(nullptr);
5840}
5841
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005842void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005843 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005844}
5845
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005846void LocationsBuilderARM64::VisitRor(HRor* ror) {
5847 HandleBinaryOp(ror);
5848}
5849
5850void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
5851 HandleBinaryOp(ror);
5852}
5853
Serban Constantinescu02164b32014-11-13 14:05:07 +00005854void LocationsBuilderARM64::VisitShl(HShl* shl) {
5855 HandleShift(shl);
5856}
5857
5858void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5859 HandleShift(shl);
5860}
5861
5862void LocationsBuilderARM64::VisitShr(HShr* shr) {
5863 HandleShift(shr);
5864}
5865
5866void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5867 HandleShift(shr);
5868}
5869
Alexandre Rames5319def2014-10-23 10:03:10 +01005870void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005871 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005872}
5873
5874void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005875 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005876}
5877
Alexandre Rames67555f72014-11-18 10:55:16 +00005878void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005879 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005880}
5881
5882void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005883 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005884}
5885
5886void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005887 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005888}
5889
Alexandre Rames67555f72014-11-18 10:55:16 +00005890void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005891 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005892}
5893
Calin Juravlee460d1d2015-09-29 04:52:17 +01005894void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5895 HUnresolvedInstanceFieldGet* instruction) {
5896 FieldAccessCallingConventionARM64 calling_convention;
5897 codegen_->CreateUnresolvedFieldLocationSummary(
5898 instruction, instruction->GetFieldType(), calling_convention);
5899}
5900
5901void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5902 HUnresolvedInstanceFieldGet* instruction) {
5903 FieldAccessCallingConventionARM64 calling_convention;
5904 codegen_->GenerateUnresolvedFieldAccess(instruction,
5905 instruction->GetFieldType(),
5906 instruction->GetFieldIndex(),
5907 instruction->GetDexPc(),
5908 calling_convention);
5909}
5910
5911void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5912 HUnresolvedInstanceFieldSet* instruction) {
5913 FieldAccessCallingConventionARM64 calling_convention;
5914 codegen_->CreateUnresolvedFieldLocationSummary(
5915 instruction, instruction->GetFieldType(), calling_convention);
5916}
5917
5918void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5919 HUnresolvedInstanceFieldSet* instruction) {
5920 FieldAccessCallingConventionARM64 calling_convention;
5921 codegen_->GenerateUnresolvedFieldAccess(instruction,
5922 instruction->GetFieldType(),
5923 instruction->GetFieldIndex(),
5924 instruction->GetDexPc(),
5925 calling_convention);
5926}
5927
5928void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5929 HUnresolvedStaticFieldGet* instruction) {
5930 FieldAccessCallingConventionARM64 calling_convention;
5931 codegen_->CreateUnresolvedFieldLocationSummary(
5932 instruction, instruction->GetFieldType(), calling_convention);
5933}
5934
5935void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5936 HUnresolvedStaticFieldGet* instruction) {
5937 FieldAccessCallingConventionARM64 calling_convention;
5938 codegen_->GenerateUnresolvedFieldAccess(instruction,
5939 instruction->GetFieldType(),
5940 instruction->GetFieldIndex(),
5941 instruction->GetDexPc(),
5942 calling_convention);
5943}
5944
5945void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5946 HUnresolvedStaticFieldSet* instruction) {
5947 FieldAccessCallingConventionARM64 calling_convention;
5948 codegen_->CreateUnresolvedFieldLocationSummary(
5949 instruction, instruction->GetFieldType(), calling_convention);
5950}
5951
5952void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5953 HUnresolvedStaticFieldSet* instruction) {
5954 FieldAccessCallingConventionARM64 calling_convention;
5955 codegen_->GenerateUnresolvedFieldAccess(instruction,
5956 instruction->GetFieldType(),
5957 instruction->GetFieldIndex(),
5958 instruction->GetDexPc(),
5959 calling_convention);
5960}
5961
Alexandre Rames5319def2014-10-23 10:03:10 +01005962void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005963 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5964 instruction, LocationSummary::kCallOnSlowPath);
Artem Serov7957d952017-04-04 15:44:09 +01005965 // In suspend check slow path, usually there are no caller-save registers at all.
5966 // If SIMD instructions are present, however, we force spilling all live SIMD
5967 // registers in full width (since the runtime only saves/restores lower part).
5968 locations->SetCustomSlowPathCallerSaves(
5969 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexandre Rames5319def2014-10-23 10:03:10 +01005970}
5971
5972void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005973 HBasicBlock* block = instruction->GetBlock();
5974 if (block->GetLoopInformation() != nullptr) {
5975 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5976 // The back edge will generate the suspend check.
5977 return;
5978 }
5979 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5980 // The goto will generate the suspend check.
5981 return;
5982 }
5983 GenerateSuspendCheck(instruction, nullptr);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005984 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005985}
5986
Alexandre Rames67555f72014-11-18 10:55:16 +00005987void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005988 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5989 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005990 InvokeRuntimeCallingConvention calling_convention;
5991 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5992}
5993
5994void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005995 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08005996 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005997}
5998
5999void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
6000 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006001 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006002 DataType::Type input_type = conversion->GetInputType();
6003 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006004 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
6005 << input_type << " -> " << result_type;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006006 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
6007 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00006008 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
6009 }
6010
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006011 if (DataType::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00006012 locations->SetInAt(0, Location::RequiresFpuRegister());
6013 } else {
6014 locations->SetInAt(0, Location::RequiresRegister());
6015 }
6016
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006017 if (DataType::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00006018 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6019 } else {
6020 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6021 }
6022}
6023
6024void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006025 DataType::Type result_type = conversion->GetResultType();
6026 DataType::Type input_type = conversion->GetInputType();
Alexandre Rames67555f72014-11-18 10:55:16 +00006027
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006028 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
6029 << input_type << " -> " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00006030
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006031 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
6032 int result_size = DataType::Size(result_type);
6033 int input_size = DataType::Size(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00006034 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00006035 Register output = OutputRegister(conversion);
6036 Register source = InputRegisterAt(conversion, 0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006037 if (result_type == DataType::Type::kInt32 && input_type == DataType::Type::kInt64) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01006038 // 'int' values are used directly as W registers, discarding the top
6039 // bits, so we don't need to sign-extend and can just perform a move.
6040 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
6041 // top 32 bits of the target register. We theoretically could leave those
6042 // bits unchanged, but we would have to make sure that no code uses a
6043 // 32bit input value as a 64bit value assuming that the top 32 bits are
6044 // zero.
6045 __ Mov(output.W(), source.W());
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006046 } else if (DataType::IsUnsignedType(result_type) ||
6047 (DataType::IsUnsignedType(input_type) && input_size < result_size)) {
6048 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, result_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00006049 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00006050 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00006051 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006052 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00006053 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006054 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
6055 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Serban Constantinescu02164b32014-11-13 14:05:07 +00006056 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006057 } else if (DataType::IsFloatingPointType(result_type) &&
6058 DataType::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00006059 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
6060 } else {
6061 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
6062 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00006063 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00006064}
Alexandre Rames67555f72014-11-18 10:55:16 +00006065
Serban Constantinescu02164b32014-11-13 14:05:07 +00006066void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
6067 HandleShift(ushr);
6068}
6069
6070void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
6071 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00006072}
6073
6074void LocationsBuilderARM64::VisitXor(HXor* instruction) {
6075 HandleBinaryOp(instruction);
6076}
6077
6078void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
6079 HandleBinaryOp(instruction);
6080}
6081
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006082void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006083 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006084 LOG(FATAL) << "Unreachable";
6085}
6086
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01006087void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00006088 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00006089 LOG(FATAL) << "Unreachable";
6090}
6091
Mark Mendellfe57faa2015-09-18 09:26:15 -04006092// Simple implementation of packed switch - generate cascaded compare/jumps.
6093void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6094 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006095 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006096 locations->SetInAt(0, Location::RequiresRegister());
6097}
6098
6099void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6100 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08006101 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04006102 Register value_reg = InputRegisterAt(switch_instr, 0);
6103 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6104
Zheng Xu3927c8b2015-11-18 17:46:25 +08006105 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01006106 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08006107 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
6108 // make sure we don't emit it if the target may run out of range.
6109 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
6110 // ranges and emit the tables only as required.
6111 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04006112
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006113 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08006114 // Current instruction id is an upper bound of the number of HIRs in the graph.
6115 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
6116 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006117 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
6118 Register temp = temps.AcquireW();
6119 __ Subs(temp, value_reg, Operand(lower_bound));
6120
Zheng Xu3927c8b2015-11-18 17:46:25 +08006121 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006122 // Jump to successors[0] if value == lower_bound.
6123 __ B(eq, codegen_->GetLabelOf(successors[0]));
6124 int32_t last_index = 0;
6125 for (; num_entries - last_index > 2; last_index += 2) {
6126 __ Subs(temp, temp, Operand(2));
6127 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
6128 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
6129 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
6130 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
6131 }
6132 if (num_entries - last_index == 2) {
6133 // The last missing case_value.
6134 __ Cmp(temp, Operand(1));
6135 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08006136 }
6137
6138 // And the default for any other value.
6139 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
6140 __ B(codegen_->GetLabelOf(default_block));
6141 }
6142 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01006143 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08006144
6145 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
6146
6147 // Below instructions should use at most one blocked register. Since there are two blocked
6148 // registers, we are free to block one.
6149 Register temp_w = temps.AcquireW();
6150 Register index;
6151 // Remove the bias.
6152 if (lower_bound != 0) {
6153 index = temp_w;
6154 __ Sub(index, value_reg, Operand(lower_bound));
6155 } else {
6156 index = value_reg;
6157 }
6158
6159 // Jump to default block if index is out of the range.
6160 __ Cmp(index, Operand(num_entries));
6161 __ B(hs, codegen_->GetLabelOf(default_block));
6162
6163 // In current VIXL implementation, it won't require any blocked registers to encode the
6164 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
6165 // register pressure.
6166 Register table_base = temps.AcquireX();
6167 // Load jump offset from the table.
6168 __ Adr(table_base, jump_table->GetTableStartLabel());
6169 Register jump_offset = temp_w;
6170 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
6171
6172 // Jump to target block by branching to table_base(pc related) + offset.
6173 Register target_address = table_base;
6174 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
6175 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04006176 }
6177}
6178
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006179void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
6180 HInstruction* instruction,
6181 Location out,
6182 uint32_t offset,
6183 Location maybe_temp,
6184 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006185 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006186 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006187 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006188 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006189 if (kUseBakerReadBarrier) {
6190 // Load with fast path based Baker's read barrier.
6191 // /* HeapReference<Object> */ out = *(out + offset)
6192 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6193 out,
6194 out_reg,
6195 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006196 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006197 /* needs_null_check */ false,
6198 /* use_load_acquire */ false);
6199 } else {
6200 // Load with slow path based read barrier.
6201 // Save the value of `out` into `maybe_temp` before overwriting it
6202 // in the following move operation, as we will need it for the
6203 // read barrier below.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006204 Register temp_reg = RegisterFrom(maybe_temp, type);
Roland Levillain44015862016-01-22 11:47:17 +00006205 __ Mov(temp_reg, out_reg);
6206 // /* HeapReference<Object> */ out = *(out + offset)
6207 __ Ldr(out_reg, HeapOperand(out_reg, offset));
6208 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6209 }
6210 } else {
6211 // Plain load with no read barrier.
6212 // /* HeapReference<Object> */ out = *(out + offset)
6213 __ Ldr(out_reg, HeapOperand(out_reg, offset));
6214 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
6215 }
6216}
6217
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006218void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
6219 HInstruction* instruction,
6220 Location out,
6221 Location obj,
6222 uint32_t offset,
6223 Location maybe_temp,
6224 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006225 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006226 Register out_reg = RegisterFrom(out, type);
6227 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006228 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006229 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006230 if (kUseBakerReadBarrier) {
6231 // Load with fast path based Baker's read barrier.
Roland Levillain44015862016-01-22 11:47:17 +00006232 // /* HeapReference<Object> */ out = *(obj + offset)
6233 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6234 out,
6235 obj_reg,
6236 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006237 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006238 /* needs_null_check */ false,
6239 /* use_load_acquire */ false);
6240 } else {
6241 // Load with slow path based read barrier.
6242 // /* HeapReference<Object> */ out = *(obj + offset)
6243 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
6244 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6245 }
6246 } else {
6247 // Plain load with no read barrier.
6248 // /* HeapReference<Object> */ out = *(obj + offset)
6249 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
6250 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
6251 }
6252}
6253
Vladimir Markoca1e0382018-04-11 09:58:41 +00006254void CodeGeneratorARM64::GenerateGcRootFieldLoad(
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006255 HInstruction* instruction,
6256 Location root,
6257 Register obj,
6258 uint32_t offset,
6259 vixl::aarch64::Label* fixup_label,
6260 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00006261 DCHECK(fixup_label == nullptr || offset == 0u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006262 Register root_reg = RegisterFrom(root, DataType::Type::kReference);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006263 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006264 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006265 if (kUseBakerReadBarrier) {
6266 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillainba650a42017-03-06 13:52:32 +00006267 // Baker's read barrier are used.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006268 if (kBakerReadBarrierLinkTimeThunksEnableForGcRoots &&
6269 !Runtime::Current()->UseJitCompilation()) {
Roland Levillain97c46462017-05-11 14:04:03 +01006270 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in
6271 // the Marking Register) to decide whether we need to enter
6272 // the slow path to mark the GC root.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006273 //
6274 // We use link-time generated thunks for the slow path. That thunk
6275 // checks the reference and jumps to the entrypoint if needed.
6276 //
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006277 // lr = &return_address;
6278 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
Roland Levillain97c46462017-05-11 14:04:03 +01006279 // if (mr) { // Thread::Current()->GetIsGcMarking()
6280 // goto gc_root_thunk<root_reg>(lr)
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006281 // }
6282 // return_address:
Roland Levillain44015862016-01-22 11:47:17 +00006283
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006284 UseScratchRegisterScope temps(GetVIXLAssembler());
6285 DCHECK(temps.IsAvailable(ip0));
6286 DCHECK(temps.IsAvailable(ip1));
6287 temps.Exclude(ip0, ip1);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006288 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(root_reg.GetCode());
6289 vixl::aarch64::Label* cbnz_label = NewBakerReadBarrierPatch(custom_data);
Roland Levillainba650a42017-03-06 13:52:32 +00006290
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006291 EmissionCheckScope guard(GetVIXLAssembler(), 3 * vixl::aarch64::kInstructionSize);
6292 vixl::aarch64::Label return_address;
6293 __ adr(lr, &return_address);
6294 if (fixup_label != nullptr) {
6295 __ Bind(fixup_label);
6296 }
6297 static_assert(BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_OFFSET == -8,
6298 "GC root LDR must be 2 instruction (8B) before the return address label.");
6299 __ ldr(root_reg, MemOperand(obj.X(), offset));
6300 __ Bind(cbnz_label);
Roland Levillain97c46462017-05-11 14:04:03 +01006301 __ cbnz(mr, static_cast<int64_t>(0)); // Placeholder, patched at link-time.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006302 __ Bind(&return_address);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006303 } else {
Roland Levillain97c46462017-05-11 14:04:03 +01006304 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in
6305 // the Marking Register) to decide whether we need to enter
6306 // the slow path to mark the GC root.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006307 //
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006308 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
Roland Levillain97c46462017-05-11 14:04:03 +01006309 // if (mr) { // Thread::Current()->GetIsGcMarking()
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006310 // // Slow path.
Roland Levillain97c46462017-05-11 14:04:03 +01006311 // entrypoint = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6312 // root = entrypoint(root); // root = ReadBarrier::Mark(root); // Entry point call.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006313 // }
Roland Levillain44015862016-01-22 11:47:17 +00006314
Roland Levillain97c46462017-05-11 14:04:03 +01006315 // Slow path marking the GC root `root`. The entrypoint will
6316 // be loaded by the slow path code.
6317 SlowPathCodeARM64* slow_path =
Vladimir Markoca1e0382018-04-11 09:58:41 +00006318 new (GetScopedAllocator()) ReadBarrierMarkSlowPathARM64(instruction, root);
6319 AddSlowPath(slow_path);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006320
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006321 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6322 if (fixup_label == nullptr) {
6323 __ Ldr(root_reg, MemOperand(obj, offset));
6324 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006325 EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006326 }
6327 static_assert(
6328 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6329 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6330 "have different sizes.");
6331 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6332 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6333 "have different sizes.");
6334
Roland Levillain97c46462017-05-11 14:04:03 +01006335 __ Cbnz(mr, slow_path->GetEntryLabel());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006336 __ Bind(slow_path->GetExitLabel());
6337 }
Roland Levillain44015862016-01-22 11:47:17 +00006338 } else {
6339 // GC root loaded through a slow path for read barriers other
6340 // than Baker's.
6341 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006342 if (fixup_label == nullptr) {
6343 __ Add(root_reg.X(), obj.X(), offset);
6344 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006345 EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006346 }
Roland Levillain44015862016-01-22 11:47:17 +00006347 // /* mirror::Object* */ root = root->Read()
Vladimir Markoca1e0382018-04-11 09:58:41 +00006348 GenerateReadBarrierForRootSlow(instruction, root, root);
Roland Levillain44015862016-01-22 11:47:17 +00006349 }
6350 } else {
6351 // Plain GC root load with no read barrier.
6352 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006353 if (fixup_label == nullptr) {
6354 __ Ldr(root_reg, MemOperand(obj, offset));
6355 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006356 EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006357 }
Roland Levillain44015862016-01-22 11:47:17 +00006358 // Note that GC roots are not affected by heap poisoning, thus we
6359 // do not have to unpoison `root_reg` here.
6360 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006361 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillain44015862016-01-22 11:47:17 +00006362}
6363
6364void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6365 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006366 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006367 uint32_t offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006368 Location maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006369 bool needs_null_check,
6370 bool use_load_acquire) {
6371 DCHECK(kEmitCompilerReadBarrier);
6372 DCHECK(kUseBakerReadBarrier);
6373
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006374 if (kBakerReadBarrierLinkTimeThunksEnableForFields &&
6375 !use_load_acquire &&
6376 !Runtime::Current()->UseJitCompilation()) {
Roland Levillain97c46462017-05-11 14:04:03 +01006377 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6378 // Marking Register) to decide whether we need to enter the slow
6379 // path to mark the reference. Then, in the slow path, check the
6380 // gray bit in the lock word of the reference's holder (`obj`) to
6381 // decide whether to mark `ref` or not.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006382 //
6383 // We use link-time generated thunks for the slow path. That thunk checks
6384 // the holder and jumps to the entrypoint if needed. If the holder is not
6385 // gray, it creates a fake dependency and returns to the LDR instruction.
6386 //
Vladimir Marko66d691d2017-04-07 17:53:39 +01006387 // lr = &gray_return_address;
Roland Levillain97c46462017-05-11 14:04:03 +01006388 // if (mr) { // Thread::Current()->GetIsGcMarking()
6389 // goto field_thunk<holder_reg, base_reg>(lr)
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006390 // }
6391 // not_gray_return_address:
6392 // // Original reference load. If the offset is too large to fit
6393 // // into LDR, we use an adjusted base register here.
Vladimir Marko88abba22017-05-03 17:09:25 +01006394 // HeapReference<mirror::Object> reference = *(obj+offset);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006395 // gray_return_address:
6396
6397 DCHECK_ALIGNED(offset, sizeof(mirror::HeapReference<mirror::Object>));
6398 Register base = obj;
6399 if (offset >= kReferenceLoadMinFarOffset) {
6400 DCHECK(maybe_temp.IsRegister());
6401 base = WRegisterFrom(maybe_temp);
6402 static_assert(IsPowerOfTwo(kReferenceLoadMinFarOffset), "Expecting a power of 2.");
6403 __ Add(base, obj, Operand(offset & ~(kReferenceLoadMinFarOffset - 1u)));
6404 offset &= (kReferenceLoadMinFarOffset - 1u);
6405 }
6406 UseScratchRegisterScope temps(GetVIXLAssembler());
6407 DCHECK(temps.IsAvailable(ip0));
6408 DCHECK(temps.IsAvailable(ip1));
6409 temps.Exclude(ip0, ip1);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006410 uint32_t custom_data = EncodeBakerReadBarrierFieldData(base.GetCode(), obj.GetCode());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006411 vixl::aarch64::Label* cbnz_label = NewBakerReadBarrierPatch(custom_data);
6412
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006413 {
6414 EmissionCheckScope guard(GetVIXLAssembler(),
6415 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6416 vixl::aarch64::Label return_address;
6417 __ adr(lr, &return_address);
6418 __ Bind(cbnz_label);
6419 __ cbnz(mr, static_cast<int64_t>(0)); // Placeholder, patched at link-time.
6420 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6421 "Field LDR must be 1 instruction (4B) before the return address label; "
6422 " 2 instructions (8B) for heap poisoning.");
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006423 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006424 __ ldr(ref_reg, MemOperand(base.X(), offset));
6425 if (needs_null_check) {
6426 MaybeRecordImplicitNullCheck(instruction);
6427 }
6428 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
6429 __ Bind(&return_address);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006430 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006431 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__, /* temp_loc */ LocationFrom(ip1));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006432 return;
6433 }
6434
Roland Levillain44015862016-01-22 11:47:17 +00006435 // /* HeapReference<Object> */ ref = *(obj + offset)
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006436 Register temp = WRegisterFrom(maybe_temp);
Roland Levillain44015862016-01-22 11:47:17 +00006437 Location no_index = Location::NoLocation();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006438 size_t no_scale_factor = 0u;
Roland Levillainbfea3352016-06-23 13:48:47 +01006439 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6440 ref,
6441 obj,
6442 offset,
6443 no_index,
6444 no_scale_factor,
6445 temp,
6446 needs_null_check,
6447 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00006448}
6449
6450void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6451 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006452 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006453 uint32_t data_offset,
6454 Location index,
6455 Register temp,
6456 bool needs_null_check) {
6457 DCHECK(kEmitCompilerReadBarrier);
6458 DCHECK(kUseBakerReadBarrier);
6459
Vladimir Marko66d691d2017-04-07 17:53:39 +01006460 static_assert(
6461 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6462 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006463 size_t scale_factor = DataType::SizeShift(DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006464
6465 if (kBakerReadBarrierLinkTimeThunksEnableForArrays &&
6466 !Runtime::Current()->UseJitCompilation()) {
Roland Levillain97c46462017-05-11 14:04:03 +01006467 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6468 // Marking Register) to decide whether we need to enter the slow
6469 // path to mark the reference. Then, in the slow path, check the
6470 // gray bit in the lock word of the reference's holder (`obj`) to
6471 // decide whether to mark `ref` or not.
Vladimir Marko66d691d2017-04-07 17:53:39 +01006472 //
6473 // We use link-time generated thunks for the slow path. That thunk checks
6474 // the holder and jumps to the entrypoint if needed. If the holder is not
6475 // gray, it creates a fake dependency and returns to the LDR instruction.
6476 //
Vladimir Marko66d691d2017-04-07 17:53:39 +01006477 // lr = &gray_return_address;
Roland Levillain97c46462017-05-11 14:04:03 +01006478 // if (mr) { // Thread::Current()->GetIsGcMarking()
6479 // goto array_thunk<base_reg>(lr)
Vladimir Marko66d691d2017-04-07 17:53:39 +01006480 // }
6481 // not_gray_return_address:
6482 // // Original reference load. If the offset is too large to fit
6483 // // into LDR, we use an adjusted base register here.
Vladimir Marko88abba22017-05-03 17:09:25 +01006484 // HeapReference<mirror::Object> reference = data[index];
Vladimir Marko66d691d2017-04-07 17:53:39 +01006485 // gray_return_address:
6486
6487 DCHECK(index.IsValid());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006488 Register index_reg = RegisterFrom(index, DataType::Type::kInt32);
6489 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006490
6491 UseScratchRegisterScope temps(GetVIXLAssembler());
6492 DCHECK(temps.IsAvailable(ip0));
6493 DCHECK(temps.IsAvailable(ip1));
6494 temps.Exclude(ip0, ip1);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006495 uint32_t custom_data = EncodeBakerReadBarrierArrayData(temp.GetCode());
Vladimir Marko66d691d2017-04-07 17:53:39 +01006496 vixl::aarch64::Label* cbnz_label = NewBakerReadBarrierPatch(custom_data);
6497
Vladimir Marko66d691d2017-04-07 17:53:39 +01006498 __ Add(temp.X(), obj.X(), Operand(data_offset));
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006499 {
6500 EmissionCheckScope guard(GetVIXLAssembler(),
6501 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6502 vixl::aarch64::Label return_address;
6503 __ adr(lr, &return_address);
6504 __ Bind(cbnz_label);
6505 __ cbnz(mr, static_cast<int64_t>(0)); // Placeholder, patched at link-time.
6506 static_assert(BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6507 "Array LDR must be 1 instruction (4B) before the return address label; "
6508 " 2 instructions (8B) for heap poisoning.");
6509 __ ldr(ref_reg, MemOperand(temp.X(), index_reg.X(), LSL, scale_factor));
6510 DCHECK(!needs_null_check); // The thunk cannot handle the null check.
6511 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
6512 __ Bind(&return_address);
6513 }
6514 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__, /* temp_loc */ LocationFrom(ip1));
Vladimir Marko66d691d2017-04-07 17:53:39 +01006515 return;
6516 }
6517
Roland Levillain44015862016-01-22 11:47:17 +00006518 // Array cells are never volatile variables, therefore array loads
6519 // never use Load-Acquire instructions on ARM64.
6520 const bool use_load_acquire = false;
6521
6522 // /* HeapReference<Object> */ ref =
6523 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01006524 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6525 ref,
6526 obj,
6527 data_offset,
6528 index,
6529 scale_factor,
6530 temp,
6531 needs_null_check,
6532 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00006533}
6534
6535void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6536 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006537 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006538 uint32_t offset,
6539 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01006540 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00006541 Register temp,
6542 bool needs_null_check,
Roland Levillainff487002017-03-07 16:50:01 +00006543 bool use_load_acquire) {
Roland Levillain44015862016-01-22 11:47:17 +00006544 DCHECK(kEmitCompilerReadBarrier);
6545 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01006546 // If we are emitting an array load, we should not be using a
6547 // Load Acquire instruction. In other words:
6548 // `instruction->IsArrayGet()` => `!use_load_acquire`.
6549 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00006550
Roland Levillain97c46462017-05-11 14:04:03 +01006551 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6552 // Marking Register) to decide whether we need to enter the slow
6553 // path to mark the reference. Then, in the slow path, check the
6554 // gray bit in the lock word of the reference's holder (`obj`) to
6555 // decide whether to mark `ref` or not.
Roland Levillain44015862016-01-22 11:47:17 +00006556 //
Roland Levillain97c46462017-05-11 14:04:03 +01006557 // if (mr) { // Thread::Current()->GetIsGcMarking()
Roland Levillainba650a42017-03-06 13:52:32 +00006558 // // Slow path.
Roland Levillain54f869e2017-03-06 13:54:11 +00006559 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6560 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6561 // HeapReference<mirror::Object> ref = *src; // Original reference load.
6562 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6563 // if (is_gray) {
Roland Levillain97c46462017-05-11 14:04:03 +01006564 // entrypoint = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6565 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
Roland Levillain54f869e2017-03-06 13:54:11 +00006566 // }
6567 // } else {
6568 // HeapReference<mirror::Object> ref = *src; // Original reference load.
Roland Levillain44015862016-01-22 11:47:17 +00006569 // }
Roland Levillain44015862016-01-22 11:47:17 +00006570
Roland Levillainba650a42017-03-06 13:52:32 +00006571 // Slow path marking the object `ref` when the GC is marking. The
Roland Levillain97c46462017-05-11 14:04:03 +01006572 // entrypoint will be loaded by the slow path code.
Roland Levillainff487002017-03-07 16:50:01 +00006573 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006574 new (GetScopedAllocator()) LoadReferenceWithBakerReadBarrierSlowPathARM64(
Roland Levillainff487002017-03-07 16:50:01 +00006575 instruction,
6576 ref,
6577 obj,
6578 offset,
6579 index,
6580 scale_factor,
6581 needs_null_check,
6582 use_load_acquire,
Roland Levillain97c46462017-05-11 14:04:03 +01006583 temp);
Roland Levillainba650a42017-03-06 13:52:32 +00006584 AddSlowPath(slow_path);
6585
Roland Levillain97c46462017-05-11 14:04:03 +01006586 __ Cbnz(mr, slow_path->GetEntryLabel());
Roland Levillainff487002017-03-07 16:50:01 +00006587 // Fast path: the GC is not marking: just load the reference.
Roland Levillain54f869e2017-03-06 13:54:11 +00006588 GenerateRawReferenceLoad(
6589 instruction, ref, obj, offset, index, scale_factor, needs_null_check, use_load_acquire);
Roland Levillainba650a42017-03-06 13:52:32 +00006590 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006591 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillainba650a42017-03-06 13:52:32 +00006592}
6593
Roland Levillainff487002017-03-07 16:50:01 +00006594void CodeGeneratorARM64::UpdateReferenceFieldWithBakerReadBarrier(HInstruction* instruction,
6595 Location ref,
6596 Register obj,
6597 Location field_offset,
6598 Register temp,
6599 bool needs_null_check,
6600 bool use_load_acquire) {
6601 DCHECK(kEmitCompilerReadBarrier);
6602 DCHECK(kUseBakerReadBarrier);
6603 // If we are emitting an array load, we should not be using a
6604 // Load Acquire instruction. In other words:
6605 // `instruction->IsArrayGet()` => `!use_load_acquire`.
6606 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
6607
Roland Levillain97c46462017-05-11 14:04:03 +01006608 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6609 // Marking Register) to decide whether we need to enter the slow
6610 // path to update the reference field within `obj`. Then, in the
6611 // slow path, check the gray bit in the lock word of the reference's
6612 // holder (`obj`) to decide whether to mark `ref` and update the
6613 // field or not.
Roland Levillainff487002017-03-07 16:50:01 +00006614 //
Roland Levillain97c46462017-05-11 14:04:03 +01006615 // if (mr) { // Thread::Current()->GetIsGcMarking()
Roland Levillainff487002017-03-07 16:50:01 +00006616 // // Slow path.
6617 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6618 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6619 // HeapReference<mirror::Object> ref = *(obj + field_offset); // Reference load.
6620 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6621 // if (is_gray) {
6622 // old_ref = ref;
Roland Levillain97c46462017-05-11 14:04:03 +01006623 // entrypoint = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6624 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
Roland Levillainff487002017-03-07 16:50:01 +00006625 // compareAndSwapObject(obj, field_offset, old_ref, ref);
6626 // }
6627 // }
6628
6629 // Slow path updating the object reference at address `obj + field_offset`
Roland Levillain97c46462017-05-11 14:04:03 +01006630 // when the GC is marking. The entrypoint will be loaded by the slow path code.
Roland Levillainff487002017-03-07 16:50:01 +00006631 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006632 new (GetScopedAllocator()) LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64(
Roland Levillainff487002017-03-07 16:50:01 +00006633 instruction,
6634 ref,
6635 obj,
6636 /* offset */ 0u,
6637 /* index */ field_offset,
6638 /* scale_factor */ 0u /* "times 1" */,
6639 needs_null_check,
6640 use_load_acquire,
Roland Levillain97c46462017-05-11 14:04:03 +01006641 temp);
Roland Levillainff487002017-03-07 16:50:01 +00006642 AddSlowPath(slow_path);
6643
Roland Levillain97c46462017-05-11 14:04:03 +01006644 __ Cbnz(mr, slow_path->GetEntryLabel());
Roland Levillainff487002017-03-07 16:50:01 +00006645 // Fast path: the GC is not marking: nothing to do (the field is
6646 // up-to-date, and we don't need to load the reference).
6647 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006648 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillainff487002017-03-07 16:50:01 +00006649}
6650
Roland Levillainba650a42017-03-06 13:52:32 +00006651void CodeGeneratorARM64::GenerateRawReferenceLoad(HInstruction* instruction,
6652 Location ref,
6653 Register obj,
6654 uint32_t offset,
6655 Location index,
6656 size_t scale_factor,
6657 bool needs_null_check,
6658 bool use_load_acquire) {
6659 DCHECK(obj.IsW());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006660 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006661 Register ref_reg = RegisterFrom(ref, type);
Roland Levillain44015862016-01-22 11:47:17 +00006662
Roland Levillainba650a42017-03-06 13:52:32 +00006663 // If needed, vixl::EmissionCheckScope guards are used to ensure
6664 // that no pools are emitted between the load (macro) instruction
6665 // and MaybeRecordImplicitNullCheck.
Roland Levillain44015862016-01-22 11:47:17 +00006666
Roland Levillain44015862016-01-22 11:47:17 +00006667 if (index.IsValid()) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006668 // Load types involving an "index": ArrayGet,
6669 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6670 // intrinsics.
Roland Levillainbfea3352016-06-23 13:48:47 +01006671 if (use_load_acquire) {
6672 // UnsafeGetObjectVolatile intrinsic case.
6673 // Register `index` is not an index in an object array, but an
6674 // offset to an object reference field within object `obj`.
6675 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
6676 DCHECK(instruction->GetLocations()->Intrinsified());
6677 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
6678 << instruction->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006679 DCHECK_EQ(offset, 0u);
6680 DCHECK_EQ(scale_factor, 0u);
Roland Levillainba650a42017-03-06 13:52:32 +00006681 DCHECK_EQ(needs_null_check, false);
6682 // /* HeapReference<mirror::Object> */ ref = *(obj + index)
Roland Levillainbfea3352016-06-23 13:48:47 +01006683 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
6684 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00006685 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006686 // ArrayGet and UnsafeGetObject and UnsafeCASObject intrinsics cases.
6687 // /* HeapReference<mirror::Object> */ ref = *(obj + offset + (index << scale_factor))
Roland Levillainbfea3352016-06-23 13:48:47 +01006688 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01006689 uint32_t computed_offset = offset + (Int64FromLocation(index) << scale_factor);
Roland Levillainba650a42017-03-06 13:52:32 +00006690 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillainbfea3352016-06-23 13:48:47 +01006691 Load(type, ref_reg, HeapOperand(obj, computed_offset));
Roland Levillainba650a42017-03-06 13:52:32 +00006692 if (needs_null_check) {
6693 MaybeRecordImplicitNullCheck(instruction);
6694 }
Roland Levillainbfea3352016-06-23 13:48:47 +01006695 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006696 UseScratchRegisterScope temps(GetVIXLAssembler());
6697 Register temp = temps.AcquireW();
6698 __ Add(temp, obj, offset);
6699 {
6700 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
6701 Load(type, ref_reg, HeapOperand(temp, XRegisterFrom(index), LSL, scale_factor));
6702 if (needs_null_check) {
6703 MaybeRecordImplicitNullCheck(instruction);
6704 }
6705 }
Roland Levillainbfea3352016-06-23 13:48:47 +01006706 }
Roland Levillain44015862016-01-22 11:47:17 +00006707 }
Roland Levillain44015862016-01-22 11:47:17 +00006708 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006709 // /* HeapReference<mirror::Object> */ ref = *(obj + offset)
Roland Levillain44015862016-01-22 11:47:17 +00006710 MemOperand field = HeapOperand(obj, offset);
6711 if (use_load_acquire) {
Roland Levillainba650a42017-03-06 13:52:32 +00006712 // Implicit null checks are handled by CodeGeneratorARM64::LoadAcquire.
6713 LoadAcquire(instruction, ref_reg, field, needs_null_check);
Roland Levillain44015862016-01-22 11:47:17 +00006714 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006715 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain44015862016-01-22 11:47:17 +00006716 Load(type, ref_reg, field);
Roland Levillainba650a42017-03-06 13:52:32 +00006717 if (needs_null_check) {
6718 MaybeRecordImplicitNullCheck(instruction);
6719 }
Roland Levillain44015862016-01-22 11:47:17 +00006720 }
6721 }
6722
6723 // Object* ref = ref_addr->AsMirrorPtr()
6724 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
Roland Levillain44015862016-01-22 11:47:17 +00006725}
6726
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006727void CodeGeneratorARM64::MaybeGenerateMarkingRegisterCheck(int code, Location temp_loc) {
6728 // The following condition is a compile-time one, so it does not have a run-time cost.
6729 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && kIsDebugBuild) {
6730 // The following condition is a run-time one; it is executed after the
6731 // previous compile-time test, to avoid penalizing non-debug builds.
6732 if (GetCompilerOptions().EmitRunTimeChecksInDebugMode()) {
6733 UseScratchRegisterScope temps(GetVIXLAssembler());
6734 Register temp = temp_loc.IsValid() ? WRegisterFrom(temp_loc) : temps.AcquireW();
6735 GetAssembler()->GenerateMarkingRegisterCheck(temp, code);
6736 }
6737 }
6738}
6739
Roland Levillain44015862016-01-22 11:47:17 +00006740void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
6741 Location out,
6742 Location ref,
6743 Location obj,
6744 uint32_t offset,
6745 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006746 DCHECK(kEmitCompilerReadBarrier);
6747
Roland Levillain44015862016-01-22 11:47:17 +00006748 // Insert a slow path based read barrier *after* the reference load.
6749 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006750 // If heap poisoning is enabled, the unpoisoning of the loaded
6751 // reference will be carried out by the runtime within the slow
6752 // path.
6753 //
6754 // Note that `ref` currently does not get unpoisoned (when heap
6755 // poisoning is enabled), which is alright as the `ref` argument is
6756 // not used by the artReadBarrierSlow entry point.
6757 //
6758 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006759 SlowPathCodeARM64* slow_path = new (GetScopedAllocator())
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006760 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
6761 AddSlowPath(slow_path);
6762
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006763 __ B(slow_path->GetEntryLabel());
6764 __ Bind(slow_path->GetExitLabel());
6765}
6766
Roland Levillain44015862016-01-22 11:47:17 +00006767void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6768 Location out,
6769 Location ref,
6770 Location obj,
6771 uint32_t offset,
6772 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006773 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00006774 // Baker's read barriers shall be handled by the fast path
6775 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
6776 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006777 // If heap poisoning is enabled, unpoisoning will be taken care of
6778 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00006779 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006780 } else if (kPoisonHeapReferences) {
6781 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
6782 }
6783}
6784
Roland Levillain44015862016-01-22 11:47:17 +00006785void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6786 Location out,
6787 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006788 DCHECK(kEmitCompilerReadBarrier);
6789
Roland Levillain44015862016-01-22 11:47:17 +00006790 // Insert a slow path based read barrier *after* the GC root load.
6791 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006792 // Note that GC roots are not affected by heap poisoning, so we do
6793 // not need to do anything special for this here.
6794 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006795 new (GetScopedAllocator()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006796 AddSlowPath(slow_path);
6797
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006798 __ B(slow_path->GetEntryLabel());
6799 __ Bind(slow_path->GetExitLabel());
6800}
6801
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006802void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
6803 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006804 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006805 locations->SetInAt(0, Location::RequiresRegister());
6806 locations->SetOut(Location::RequiresRegister());
6807}
6808
6809void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
6810 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00006811 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006812 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006813 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006814 __ Ldr(XRegisterFrom(locations->Out()),
6815 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006816 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006817 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006818 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006819 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
6820 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006821 __ Ldr(XRegisterFrom(locations->Out()),
6822 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006823 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006824}
6825
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006826static void PatchJitRootUse(uint8_t* code,
6827 const uint8_t* roots_data,
6828 vixl::aarch64::Literal<uint32_t>* literal,
6829 uint64_t index_in_table) {
6830 uint32_t literal_offset = literal->GetOffset();
6831 uintptr_t address =
6832 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
6833 uint8_t* data = code + literal_offset;
6834 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
6835}
6836
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006837void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
6838 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006839 const StringReference& string_reference = entry.first;
6840 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006841 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006842 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006843 }
6844 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006845 const TypeReference& type_reference = entry.first;
6846 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006847 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006848 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006849 }
6850}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006851
Alexandre Rames67555f72014-11-18 10:55:16 +00006852#undef __
6853#undef QUICK_ENTRY_POINT
6854
Vladimir Markoca1e0382018-04-11 09:58:41 +00006855#define __ assembler.GetVIXLAssembler()->
6856
6857static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler,
6858 vixl::aarch64::Register base_reg,
6859 vixl::aarch64::MemOperand& lock_word,
Vladimir Marko7a695052018-04-12 10:26:50 +01006860 vixl::aarch64::Label* slow_path,
6861 vixl::aarch64::Label* throw_npe = nullptr) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006862 // Load the lock word containing the rb_state.
6863 __ Ldr(ip0.W(), lock_word);
6864 // Given the numeric representation, it's enough to check the low bit of the rb_state.
6865 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6866 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6867 __ Tbnz(ip0.W(), LockWord::kReadBarrierStateShift, slow_path);
6868 static_assert(
6869 BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET,
6870 "Field and array LDR offsets must be the same to reuse the same code.");
Vladimir Marko7a695052018-04-12 10:26:50 +01006871 // To throw NPE, we return to the fast path; the artificial dependence below does not matter.
6872 if (throw_npe != nullptr) {
6873 __ Bind(throw_npe);
6874 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006875 // Adjust the return address back to the LDR (1 instruction; 2 for heap poisoning).
6876 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6877 "Field LDR must be 1 instruction (4B) before the return address label; "
6878 " 2 instructions (8B) for heap poisoning.");
6879 __ Add(lr, lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6880 // Introduce a dependency on the lock_word including rb_state,
6881 // to prevent load-load reordering, and without using
6882 // a memory barrier (which would be more expensive).
6883 __ Add(base_reg, base_reg, Operand(ip0, LSR, 32));
6884 __ Br(lr); // And return back to the function.
6885 // Note: The fake dependency is unnecessary for the slow path.
6886}
6887
6888// Load the read barrier introspection entrypoint in register `entrypoint`.
6889static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler,
6890 vixl::aarch64::Register entrypoint) {
6891 // entrypoint = Thread::Current()->pReadBarrierMarkReg16, i.e. pReadBarrierMarkIntrospection.
6892 DCHECK_EQ(ip0.GetCode(), 16u);
6893 const int32_t entry_point_offset =
6894 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ip0.GetCode());
6895 __ Ldr(entrypoint, MemOperand(tr, entry_point_offset));
6896}
6897
6898void CodeGeneratorARM64::CompileBakerReadBarrierThunk(Arm64Assembler& assembler,
6899 uint32_t encoded_data,
6900 /*out*/ std::string* debug_name) {
6901 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
6902 switch (kind) {
6903 case BakerReadBarrierKind::kField: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006904 auto base_reg =
6905 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6906 CheckValidReg(base_reg.GetCode());
6907 auto holder_reg =
6908 Register::GetXRegFromCode(BakerReadBarrierSecondRegField::Decode(encoded_data));
6909 CheckValidReg(holder_reg.GetCode());
6910 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6911 temps.Exclude(ip0, ip1);
Vladimir Marko7a695052018-04-12 10:26:50 +01006912 // If base_reg differs from holder_reg, the offset was too large and we must have emitted
6913 // an explicit null check before the load. Otherwise, for implicit null checks, we need to
6914 // null-check the holder as we do not necessarily do that check before going to the thunk.
6915 vixl::aarch64::Label throw_npe_label;
6916 vixl::aarch64::Label* throw_npe = nullptr;
6917 if (GetCompilerOptions().GetImplicitNullChecks() && holder_reg.Is(base_reg)) {
6918 throw_npe = &throw_npe_label;
6919 __ Cbz(holder_reg.W(), throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006920 }
Vladimir Marko7a695052018-04-12 10:26:50 +01006921 // Check if the holder is gray and, if not, add fake dependency to the base register
6922 // and return to the LDR instruction to load the reference. Otherwise, use introspection
6923 // to load the reference and call the entrypoint that performs further checks on the
6924 // reference and marks it if needed.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006925 vixl::aarch64::Label slow_path;
6926 MemOperand lock_word(holder_reg, mirror::Object::MonitorOffset().Int32Value());
Vladimir Marko7a695052018-04-12 10:26:50 +01006927 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006928 __ Bind(&slow_path);
6929 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6930 __ Ldr(ip0.W(), ldr_address); // Load the LDR (immediate) unsigned offset.
6931 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6932 __ Ubfx(ip0.W(), ip0.W(), 10, 12); // Extract the offset.
6933 __ Ldr(ip0.W(), MemOperand(base_reg, ip0, LSL, 2)); // Load the reference.
6934 // Do not unpoison. With heap poisoning enabled, the entrypoint expects a poisoned reference.
6935 __ Br(ip1); // Jump to the entrypoint.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006936 break;
6937 }
6938 case BakerReadBarrierKind::kArray: {
6939 auto base_reg =
6940 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6941 CheckValidReg(base_reg.GetCode());
6942 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6943 BakerReadBarrierSecondRegField::Decode(encoded_data));
6944 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6945 temps.Exclude(ip0, ip1);
6946 vixl::aarch64::Label slow_path;
6947 int32_t data_offset =
6948 mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
6949 MemOperand lock_word(base_reg, mirror::Object::MonitorOffset().Int32Value() - data_offset);
6950 DCHECK_LT(lock_word.GetOffset(), 0);
6951 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path);
6952 __ Bind(&slow_path);
6953 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET);
6954 __ Ldr(ip0.W(), ldr_address); // Load the LDR (register) unsigned offset.
6955 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6956 __ Ubfx(ip0, ip0, 16, 6); // Extract the index register, plus 32 (bit 21 is set).
6957 __ Bfi(ip1, ip0, 3, 6); // Insert ip0 to the entrypoint address to create
6958 // a switch case target based on the index register.
6959 __ Mov(ip0, base_reg); // Move the base register to ip0.
6960 __ Br(ip1); // Jump to the entrypoint's array switch case.
6961 break;
6962 }
6963 case BakerReadBarrierKind::kGcRoot: {
6964 // Check if the reference needs to be marked and if so (i.e. not null, not marked yet
6965 // and it does not have a forwarding address), call the correct introspection entrypoint;
6966 // otherwise return the reference (or the extracted forwarding address).
6967 // There is no gray bit check for GC roots.
6968 auto root_reg =
6969 Register::GetWRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6970 CheckValidReg(root_reg.GetCode());
6971 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6972 BakerReadBarrierSecondRegField::Decode(encoded_data));
6973 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6974 temps.Exclude(ip0, ip1);
6975 vixl::aarch64::Label return_label, not_marked, forwarding_address;
6976 __ Cbz(root_reg, &return_label);
6977 MemOperand lock_word(root_reg.X(), mirror::Object::MonitorOffset().Int32Value());
6978 __ Ldr(ip0.W(), lock_word);
6979 __ Tbz(ip0.W(), LockWord::kMarkBitStateShift, &not_marked);
6980 __ Bind(&return_label);
6981 __ Br(lr);
6982 __ Bind(&not_marked);
6983 __ Tst(ip0.W(), Operand(ip0.W(), LSL, 1));
6984 __ B(&forwarding_address, mi);
6985 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6986 // Adjust the art_quick_read_barrier_mark_introspection address in IP1 to
6987 // art_quick_read_barrier_mark_introspection_gc_roots.
6988 __ Add(ip1, ip1, Operand(BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRYPOINT_OFFSET));
6989 __ Mov(ip0.W(), root_reg);
6990 __ Br(ip1);
6991 __ Bind(&forwarding_address);
6992 __ Lsl(root_reg, ip0.W(), LockWord::kForwardingAddressShift);
6993 __ Br(lr);
6994 break;
6995 }
6996 default:
6997 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
6998 UNREACHABLE();
6999 }
7000
7001 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
7002 std::ostringstream oss;
7003 oss << "BakerReadBarrierThunk";
7004 switch (kind) {
7005 case BakerReadBarrierKind::kField:
7006 oss << "Field_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
7007 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
7008 break;
7009 case BakerReadBarrierKind::kArray:
7010 oss << "Array_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
7011 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
7012 BakerReadBarrierSecondRegField::Decode(encoded_data));
7013 break;
7014 case BakerReadBarrierKind::kGcRoot:
7015 oss << "GcRoot_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
7016 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
7017 BakerReadBarrierSecondRegField::Decode(encoded_data));
7018 break;
7019 }
7020 *debug_name = oss.str();
7021 }
7022}
7023
7024#undef __
7025
Alexandre Rames5319def2014-10-23 10:03:10 +01007026} // namespace arm64
7027} // namespace art