blob: 462225aafb308487522c65b81fda0a6e2fdc6ead [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Vladimir Markof4f2daa2017-03-20 18:26:59 +000019#include "arch/arm64/asm_support_arm64.h"
Serban Constantinescu579885a2015-02-22 20:51:33 +000020#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070021#include "art_method.h"
Andreas Gampe5678db52017-06-08 14:11:18 -070022#include "base/bit_utils.h"
23#include "base/bit_utils_iterator.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010024#include "class_table.h"
Zheng Xuc6667102015-05-15 16:08:45 +080025#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000026#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010027#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080028#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010029#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010030#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070031#include "heap_poisoning.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080032#include "intrinsics.h"
33#include "intrinsics_arm64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010034#include "linker/linker_patch.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070035#include "lock_word.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010036#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070037#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000038#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010039#include "thread.h"
40#include "utils/arm64/assembler_arm64.h"
41#include "utils/assembler.h"
42#include "utils/stack_checks.h"
43
Scott Wakeling97c72b72016-06-24 16:19:36 +010044using namespace vixl::aarch64; // NOLINT(build/namespaces)
Artem Serov914d7a82017-02-07 14:33:49 +000045using vixl::ExactAssemblyScope;
46using vixl::CodeBufferCheckScope;
47using vixl::EmissionCheckScope;
Alexandre Rames5319def2014-10-23 10:03:10 +010048
49#ifdef __
50#error "ARM64 Codegen VIXL macro-assembler macro already defined."
51#endif
52
Alexandre Rames5319def2014-10-23 10:03:10 +010053namespace art {
54
Roland Levillain22ccc3a2015-11-24 13:10:05 +000055template<class MirrorType>
56class GcRoot;
57
Alexandre Rames5319def2014-10-23 10:03:10 +010058namespace arm64 {
59
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::ARM64EncodableConstantOrRegister;
61using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080062using helpers::CPURegisterFrom;
63using helpers::DRegisterFrom;
64using helpers::FPRegisterFrom;
65using helpers::HeapOperand;
66using helpers::HeapOperandFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010067using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080068using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080069using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010070using helpers::InputRegisterAt;
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +010071using helpers::Int64FromLocation;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010072using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080073using helpers::LocationFrom;
74using helpers::OperandFromMemOperand;
75using helpers::OutputCPURegister;
76using helpers::OutputFPRegister;
77using helpers::OutputRegister;
Artem Serovd4bccf12017-04-03 18:47:32 +010078using helpers::QRegisterFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080079using helpers::RegisterFrom;
80using helpers::StackOperandFrom;
81using helpers::VIXLRegCodeFromART;
82using helpers::WRegisterFrom;
83using helpers::XRegisterFrom;
84
Vladimir Markof3e0ee22015-12-17 15:23:13 +000085// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080086// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
87// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000088static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010089
Vladimir Markof4f2daa2017-03-20 18:26:59 +000090// Reference load (except object array loads) is using LDR Wt, [Xn, #offset] which can handle
91// offset < 16KiB. For offsets >= 16KiB, the load shall be emitted as two or more instructions.
Vladimir Marko008e09f32018-08-06 15:42:43 +010092// For the Baker read barrier implementation using link-time generated thunks we need to split
Vladimir Markof4f2daa2017-03-20 18:26:59 +000093// the offset explicitly.
94constexpr uint32_t kReferenceLoadMinFarOffset = 16 * KB;
95
Alexandre Rames5319def2014-10-23 10:03:10 +010096inline Condition ARM64Condition(IfCondition cond) {
97 switch (cond) {
98 case kCondEQ: return eq;
99 case kCondNE: return ne;
100 case kCondLT: return lt;
101 case kCondLE: return le;
102 case kCondGT: return gt;
103 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -0700104 case kCondB: return lo;
105 case kCondBE: return ls;
106 case kCondA: return hi;
107 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +0100108 }
Roland Levillain7f63c522015-07-13 15:54:55 +0000109 LOG(FATAL) << "Unreachable";
110 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +0100111}
112
Vladimir Markod6e069b2016-01-18 11:11:01 +0000113inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
114 // The ARM64 condition codes can express all the necessary branches, see the
115 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
116 // There is no dex instruction or HIR that would need the missing conditions
117 // "equal or unordered" or "not equal".
118 switch (cond) {
119 case kCondEQ: return eq;
120 case kCondNE: return ne /* unordered */;
121 case kCondLT: return gt_bias ? cc : lt /* unordered */;
122 case kCondLE: return gt_bias ? ls : le /* unordered */;
123 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
124 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
125 default:
126 LOG(FATAL) << "UNREACHABLE";
127 UNREACHABLE();
128 }
129}
130
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100131Location ARM64ReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000132 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
133 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
134 // but we use the exact registers for clarity.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100135 if (return_type == DataType::Type::kFloat32) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000136 return LocationFrom(s0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100137 } else if (return_type == DataType::Type::kFloat64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000138 return LocationFrom(d0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100139 } else if (return_type == DataType::Type::kInt64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000140 return LocationFrom(x0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100141 } else if (return_type == DataType::Type::kVoid) {
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100142 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000143 } else {
144 return LocationFrom(w0);
145 }
146}
147
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100148Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000149 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100150}
151
Vladimir Marko3232dbb2018-07-25 15:42:46 +0100152static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
153 InvokeRuntimeCallingConvention calling_convention;
154 RegisterSet caller_saves = RegisterSet::Empty();
155 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
156 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
157 RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference),
158 DataType::Type::kReference).GetCode());
159 return caller_saves;
160}
161
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100162// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
163#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700164#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100165
Zheng Xuda403092015-04-24 17:35:39 +0800166// Calculate memory accessing operand for save/restore live registers.
167static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100168 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800169 int64_t spill_offset,
170 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100171 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
172 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
173 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800174 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100175 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800176 codegen->GetNumberOfFloatingPointRegisters()));
177
Vladimir Marko804b03f2016-09-14 16:26:36 +0100178 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
Artem Serov7957d952017-04-04 15:44:09 +0100179 unsigned v_reg_size = codegen->GetGraph()->HasSIMD() ? kQRegSize : kDRegSize;
180 CPURegList fp_list = CPURegList(CPURegister::kVRegister, v_reg_size, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800181
182 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
183 UseScratchRegisterScope temps(masm);
184
185 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100186 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
187 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800188 int64_t reg_size = kXRegSizeInBytes;
189 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
190 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100191 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800192 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
193 // If the offset does not fit in the instruction's immediate field, use an alternate register
194 // to compute the base address(float point registers spill base address).
195 Register new_base = temps.AcquireSameSizeAs(base);
196 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
197 base = new_base;
198 spill_offset = -core_spill_size;
199 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
200 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
201 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
202 }
203
204 if (is_save) {
205 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
206 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
207 } else {
208 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
209 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
210 }
211}
212
213void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800214 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100215 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
216 for (uint32_t i : LowToHighBits(core_spills)) {
217 // If the register holds an object, update the stack mask.
218 if (locations->RegisterContainsObject(i)) {
219 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800220 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100221 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
222 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
223 saved_core_stack_offsets_[i] = stack_offset;
224 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800225 }
226
Vladimir Marko804b03f2016-09-14 16:26:36 +0100227 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
228 for (uint32_t i : LowToHighBits(fp_spills)) {
229 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
230 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
231 saved_fpu_stack_offsets_[i] = stack_offset;
232 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800233 }
234
Vladimir Marko804b03f2016-09-14 16:26:36 +0100235 SaveRestoreLiveRegistersHelper(codegen,
236 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800237 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
238}
239
240void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100241 SaveRestoreLiveRegistersHelper(codegen,
242 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800243 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
244}
245
Alexandre Rames5319def2014-10-23 10:03:10 +0100246class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
247 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000248 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100249
Alexandre Rames67555f72014-11-18 10:55:16 +0000250 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100251 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000252 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100253
Alexandre Rames5319def2014-10-23 10:03:10 +0100254 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000255 if (instruction_->CanThrowIntoCatchBlock()) {
256 // Live registers will be restored in the catch block if caught.
257 SaveLiveRegisters(codegen, instruction_->GetLocations());
258 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000259 // We're moving two locations to locations that could overlap, so we need a parallel
260 // move resolver.
261 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100262 codegen->EmitParallelMoves(locations->InAt(0),
263 LocationFrom(calling_convention.GetRegisterAt(0)),
264 DataType::Type::kInt32,
265 locations->InAt(1),
266 LocationFrom(calling_convention.GetRegisterAt(1)),
267 DataType::Type::kInt32);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000268 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
269 ? kQuickThrowStringBounds
270 : kQuickThrowArrayBounds;
271 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100272 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800273 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100274 }
275
Alexandre Rames8158f282015-08-07 10:26:17 +0100276 bool IsFatal() const OVERRIDE { return true; }
277
Alexandre Rames9931f312015-06-19 14:47:01 +0100278 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
279
Alexandre Rames5319def2014-10-23 10:03:10 +0100280 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100281 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
282};
283
Alexandre Rames67555f72014-11-18 10:55:16 +0000284class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
285 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000286 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000287
288 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
289 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
290 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000291 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800292 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000293 }
294
Alexandre Rames8158f282015-08-07 10:26:17 +0100295 bool IsFatal() const OVERRIDE { return true; }
296
Alexandre Rames9931f312015-06-19 14:47:01 +0100297 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
298
Alexandre Rames67555f72014-11-18 10:55:16 +0000299 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000300 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
301};
302
303class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
304 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100305 LoadClassSlowPathARM64(HLoadClass* cls, HInstruction* at)
306 : SlowPathCodeARM64(at), cls_(cls) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000307 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100308 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Alexandre Rames67555f72014-11-18 10:55:16 +0000309 }
310
311 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000312 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000313 Location out = locations->Out();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100314 const uint32_t dex_pc = instruction_->GetDexPc();
315 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
316 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
Alexandre Rames67555f72014-11-18 10:55:16 +0000317
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100318 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames67555f72014-11-18 10:55:16 +0000319 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000320 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000321
Vladimir Markof3c52b42017-11-17 17:32:12 +0000322 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100323 if (must_resolve_type) {
324 DCHECK(IsSameDexFile(cls_->GetDexFile(), arm64_codegen->GetGraph()->GetDexFile()));
325 dex::TypeIndex type_index = cls_->GetTypeIndex();
326 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Vladimir Marko9d479252018-07-24 11:35:20 +0100327 arm64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
328 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100329 // If we also must_do_clinit, the resolved type is now in the correct register.
330 } else {
331 DCHECK(must_do_clinit);
332 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
333 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)),
334 source,
335 cls_->GetType());
336 }
337 if (must_do_clinit) {
338 arm64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
339 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800340 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000341
342 // Move the class to the desired location.
Alexandre Rames67555f72014-11-18 10:55:16 +0000343 if (out.IsValid()) {
344 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100345 DataType::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000346 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000347 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000348 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000349 __ B(GetExitLabel());
350 }
351
Alexandre Rames9931f312015-06-19 14:47:01 +0100352 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
353
Alexandre Rames67555f72014-11-18 10:55:16 +0000354 private:
355 // The class this slow path will load.
356 HLoadClass* const cls_;
357
Alexandre Rames67555f72014-11-18 10:55:16 +0000358 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
359};
360
Vladimir Markoaad75c62016-10-03 08:46:48 +0000361class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
362 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000363 explicit LoadStringSlowPathARM64(HLoadString* instruction)
364 : SlowPathCodeARM64(instruction) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000365
366 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
367 LocationSummary* locations = instruction_->GetLocations();
368 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
369 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
370
371 __ Bind(GetEntryLabel());
372 SaveLiveRegisters(codegen, locations);
373
Vladimir Markof3c52b42017-11-17 17:32:12 +0000374 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000375 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
376 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000377 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
378 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100379 DataType::Type type = instruction_->GetType();
Vladimir Markoaad75c62016-10-03 08:46:48 +0000380 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
381
382 RestoreLiveRegisters(codegen, locations);
383
Vladimir Markoaad75c62016-10-03 08:46:48 +0000384 __ B(GetExitLabel());
385 }
386
387 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
388
389 private:
390 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
391};
392
Alexandre Rames5319def2014-10-23 10:03:10 +0100393class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
394 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000395 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100396
Alexandre Rames67555f72014-11-18 10:55:16 +0000397 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
398 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100399 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000400 if (instruction_->CanThrowIntoCatchBlock()) {
401 // Live registers will be restored in the catch block if caught.
402 SaveLiveRegisters(codegen, instruction_->GetLocations());
403 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000404 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
405 instruction_,
406 instruction_->GetDexPc(),
407 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800408 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100409 }
410
Alexandre Rames8158f282015-08-07 10:26:17 +0100411 bool IsFatal() const OVERRIDE { return true; }
412
Alexandre Rames9931f312015-06-19 14:47:01 +0100413 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
414
Alexandre Rames5319def2014-10-23 10:03:10 +0100415 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100416 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
417};
418
419class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
420 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100421 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000422 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100423
Alexandre Rames67555f72014-11-18 10:55:16 +0000424 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Artem Serov7957d952017-04-04 15:44:09 +0100425 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +0000426 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100427 __ Bind(GetEntryLabel());
Artem Serov7957d952017-04-04 15:44:09 +0100428 SaveLiveRegisters(codegen, locations); // Only saves live 128-bit regs for SIMD.
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000429 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800430 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Artem Serov7957d952017-04-04 15:44:09 +0100431 RestoreLiveRegisters(codegen, locations); // Only restores live 128-bit regs for SIMD.
Alexandre Rames67555f72014-11-18 10:55:16 +0000432 if (successor_ == nullptr) {
433 __ B(GetReturnLabel());
434 } else {
435 __ B(arm64_codegen->GetLabelOf(successor_));
436 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100437 }
438
Scott Wakeling97c72b72016-06-24 16:19:36 +0100439 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100440 DCHECK(successor_ == nullptr);
441 return &return_label_;
442 }
443
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100444 HBasicBlock* GetSuccessor() const {
445 return successor_;
446 }
447
Alexandre Rames9931f312015-06-19 14:47:01 +0100448 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
449
Alexandre Rames5319def2014-10-23 10:03:10 +0100450 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100451 // If not null, the block to branch to after the suspend check.
452 HBasicBlock* const successor_;
453
454 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100455 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100456
457 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
458};
459
Alexandre Rames67555f72014-11-18 10:55:16 +0000460class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
461 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000462 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000463 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000464
465 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000466 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800467
Alexandre Rames3e69f162014-12-10 10:36:50 +0000468 DCHECK(instruction_->IsCheckCast()
469 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
470 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100471 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000472
Alexandre Rames67555f72014-11-18 10:55:16 +0000473 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000474
Vladimir Marko87584542017-12-12 17:47:52 +0000475 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000476 SaveLiveRegisters(codegen, locations);
477 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000478
479 // We're moving two locations to locations that could overlap, so we need a parallel
480 // move resolver.
481 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800482 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800483 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100484 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800485 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800486 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100487 DataType::Type::kReference);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000488 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000489 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800490 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100491 DataType::Type ret_type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000492 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
493 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
494 } else {
495 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800496 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
497 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000498 }
499
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000500 if (!is_fatal_) {
501 RestoreLiveRegisters(codegen, locations);
502 __ B(GetExitLabel());
503 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000504 }
505
Alexandre Rames9931f312015-06-19 14:47:01 +0100506 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100507 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100508
Alexandre Rames67555f72014-11-18 10:55:16 +0000509 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000510 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000511
Alexandre Rames67555f72014-11-18 10:55:16 +0000512 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
513};
514
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700515class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
516 public:
Aart Bik42249c32016-01-07 15:33:50 -0800517 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000518 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700519
520 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800521 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700522 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100523 LocationSummary* locations = instruction_->GetLocations();
524 SaveLiveRegisters(codegen, locations);
525 InvokeRuntimeCallingConvention calling_convention;
526 __ Mov(calling_convention.GetRegisterAt(0),
527 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000528 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100529 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700530 }
531
Alexandre Rames9931f312015-06-19 14:47:01 +0100532 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
533
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700534 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700535 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
536};
537
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100538class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
539 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000540 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100541
542 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
543 LocationSummary* locations = instruction_->GetLocations();
544 __ Bind(GetEntryLabel());
545 SaveLiveRegisters(codegen, locations);
546
547 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100548 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100549 parallel_move.AddMove(
550 locations->InAt(0),
551 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100552 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100553 nullptr);
554 parallel_move.AddMove(
555 locations->InAt(1),
556 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100557 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100558 nullptr);
559 parallel_move.AddMove(
560 locations->InAt(2),
561 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100562 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100563 nullptr);
564 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
565
566 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000567 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100568 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
569 RestoreLiveRegisters(codegen, locations);
570 __ B(GetExitLabel());
571 }
572
573 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
574
575 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100576 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
577};
578
Zheng Xu3927c8b2015-11-18 17:46:25 +0800579void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
580 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000581 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800582
583 // We are about to use the assembler to place literals directly. Make sure we have enough
584 // underlying code buffer and we have generated the jump table with right size.
Artem Serov914d7a82017-02-07 14:33:49 +0000585 EmissionCheckScope scope(codegen->GetVIXLAssembler(),
586 num_entries * sizeof(int32_t),
587 CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800588
589 __ Bind(&table_start_);
590 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
591 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100592 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800593 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100594 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800595 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
596 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
597 Literal<int32_t> literal(jump_offset);
598 __ place(&literal);
599 }
600}
601
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000602// Slow path generating a read barrier for a heap reference.
603class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
604 public:
605 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
606 Location out,
607 Location ref,
608 Location obj,
609 uint32_t offset,
610 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000611 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000612 out_(out),
613 ref_(ref),
614 obj_(obj),
615 offset_(offset),
616 index_(index) {
617 DCHECK(kEmitCompilerReadBarrier);
618 // If `obj` is equal to `out` or `ref`, it means the initial object
619 // has been overwritten by (or after) the heap object reference load
620 // to be instrumented, e.g.:
621 //
622 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000623 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000624 //
625 // In that case, we have lost the information about the original
626 // object, and the emitted read barrier cannot work properly.
627 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
628 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
629 }
630
631 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
632 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
633 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100634 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000635 DCHECK(locations->CanCall());
636 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100637 DCHECK(instruction_->IsInstanceFieldGet() ||
638 instruction_->IsStaticFieldGet() ||
639 instruction_->IsArrayGet() ||
640 instruction_->IsInstanceOf() ||
641 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700642 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000643 << "Unexpected instruction in read barrier for heap reference slow path: "
644 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +0000645 // The read barrier instrumentation of object ArrayGet
646 // instructions does not support the HIntermediateAddress
647 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000648 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100649 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000650
651 __ Bind(GetEntryLabel());
652
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000653 SaveLiveRegisters(codegen, locations);
654
655 // We may have to change the index's value, but as `index_` is a
656 // constant member (like other "inputs" of this slow path),
657 // introduce a copy of it, `index`.
658 Location index = index_;
659 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100660 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000661 if (instruction_->IsArrayGet()) {
662 // Compute the actual memory offset and store it in `index`.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100663 Register index_reg = RegisterFrom(index_, DataType::Type::kInt32);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000664 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
665 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
666 // We are about to change the value of `index_reg` (see the
667 // calls to vixl::MacroAssembler::Lsl and
668 // vixl::MacroAssembler::Mov below), but it has
669 // not been saved by the previous call to
670 // art::SlowPathCode::SaveLiveRegisters, as it is a
671 // callee-save register --
672 // art::SlowPathCode::SaveLiveRegisters does not consider
673 // callee-save registers, as it has been designed with the
674 // assumption that callee-save registers are supposed to be
675 // handled by the called function. So, as a callee-save
676 // register, `index_reg` _would_ eventually be saved onto
677 // the stack, but it would be too late: we would have
678 // changed its value earlier. Therefore, we manually save
679 // it here into another freely available register,
680 // `free_reg`, chosen of course among the caller-save
681 // registers (as a callee-save `free_reg` register would
682 // exhibit the same problem).
683 //
684 // Note we could have requested a temporary register from
685 // the register allocator instead; but we prefer not to, as
686 // this is a slow path, and we know we can find a
687 // caller-save register that is available.
688 Register free_reg = FindAvailableCallerSaveRegister(codegen);
689 __ Mov(free_reg.W(), index_reg);
690 index_reg = free_reg;
691 index = LocationFrom(index_reg);
692 } else {
693 // The initial register stored in `index_` has already been
694 // saved in the call to art::SlowPathCode::SaveLiveRegisters
695 // (as it is not a callee-save register), so we can freely
696 // use it.
697 }
698 // Shifting the index value contained in `index_reg` by the scale
699 // factor (2) cannot overflow in practice, as the runtime is
700 // unable to allocate object arrays with a size larger than
701 // 2^26 - 1 (that is, 2^28 - 4 bytes).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100702 __ Lsl(index_reg, index_reg, DataType::SizeShift(type));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000703 static_assert(
704 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
705 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
706 __ Add(index_reg, index_reg, Operand(offset_));
707 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100708 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
709 // intrinsics, `index_` is not shifted by a scale factor of 2
710 // (as in the case of ArrayGet), as it is actually an offset
711 // to an object field within an object.
712 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000713 DCHECK(instruction_->GetLocations()->Intrinsified());
714 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
715 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
716 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100717 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +0100718 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000719 }
720 }
721
722 // We're moving two or three locations to locations that could
723 // overlap, so we need a parallel move resolver.
724 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100725 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000726 parallel_move.AddMove(ref_,
727 LocationFrom(calling_convention.GetRegisterAt(0)),
728 type,
729 nullptr);
730 parallel_move.AddMove(obj_,
731 LocationFrom(calling_convention.GetRegisterAt(1)),
732 type,
733 nullptr);
734 if (index.IsValid()) {
735 parallel_move.AddMove(index,
736 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100737 DataType::Type::kInt32,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000738 nullptr);
739 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
740 } else {
741 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
742 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
743 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000744 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000745 instruction_,
746 instruction_->GetDexPc(),
747 this);
748 CheckEntrypointTypes<
749 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
750 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
751
752 RestoreLiveRegisters(codegen, locations);
753
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000754 __ B(GetExitLabel());
755 }
756
757 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
758
759 private:
760 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100761 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
762 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000763 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
764 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
765 return Register(VIXLRegCodeFromART(i), kXRegSize);
766 }
767 }
768 // We shall never fail to find a free caller-save register, as
769 // there are more than two core caller-save registers on ARM64
770 // (meaning it is possible to find one which is different from
771 // `ref` and `obj`).
772 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
773 LOG(FATAL) << "Could not find a free register";
774 UNREACHABLE();
775 }
776
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000777 const Location out_;
778 const Location ref_;
779 const Location obj_;
780 const uint32_t offset_;
781 // An additional location containing an index to an array.
782 // Only used for HArrayGet and the UnsafeGetObject &
783 // UnsafeGetObjectVolatile intrinsics.
784 const Location index_;
785
786 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
787};
788
789// Slow path generating a read barrier for a GC root.
790class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
791 public:
792 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000793 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +0000794 DCHECK(kEmitCompilerReadBarrier);
795 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000796
797 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
798 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100799 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000800 DCHECK(locations->CanCall());
801 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +0000802 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
803 << "Unexpected instruction in read barrier for GC root slow path: "
804 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000805
806 __ Bind(GetEntryLabel());
807 SaveLiveRegisters(codegen, locations);
808
809 InvokeRuntimeCallingConvention calling_convention;
810 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
811 // The argument of the ReadBarrierForRootSlow is not a managed
812 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
813 // thus we need a 64-bit move here, and we cannot use
814 //
815 // arm64_codegen->MoveLocation(
816 // LocationFrom(calling_convention.GetRegisterAt(0)),
817 // root_,
818 // type);
819 //
820 // which would emit a 32-bit move, as `type` is a (32-bit wide)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100821 // reference type (`DataType::Type::kReference`).
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000822 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000823 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000824 instruction_,
825 instruction_->GetDexPc(),
826 this);
827 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
828 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
829
830 RestoreLiveRegisters(codegen, locations);
831 __ B(GetExitLabel());
832 }
833
834 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
835
836 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000837 const Location out_;
838 const Location root_;
839
840 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
841};
842
Alexandre Rames5319def2014-10-23 10:03:10 +0100843#undef __
844
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100845Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(DataType::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100846 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100847 if (type == DataType::Type::kVoid) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100848 LOG(FATAL) << "Unreachable type " << type;
849 }
850
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100851 if (DataType::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100852 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
853 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100854 } else if (!DataType::IsFloatingPointType(type) &&
Alexandre Rames542361f2015-01-29 16:57:31 +0000855 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000856 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
857 } else {
858 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100859 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
860 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100861 }
862
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000863 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100864 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100865 return next_location;
866}
867
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100868Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +0100869 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100870}
871
Serban Constantinescu579885a2015-02-22 20:51:33 +0000872CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100873 const CompilerOptions& compiler_options,
874 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +0100875 : CodeGenerator(graph,
876 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000877 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000878 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100879 callee_saved_core_registers.GetList(),
880 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100881 compiler_options,
882 stats),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100883 block_labels_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
884 jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +0100885 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000886 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100887 move_resolver_(graph->GetAllocator(), this),
888 assembler_(graph->GetAllocator()),
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000889 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100890 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +0100891 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100892 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000893 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100894 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000895 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100896 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000897 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100898 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6fd16062018-06-26 11:02:04 +0100899 boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100900 baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +0000901 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100902 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +0000903 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Marko966b46f2018-08-03 10:20:19 +0000904 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
905 jit_baker_read_barrier_slow_paths_(std::less<uint32_t>(),
906 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000907 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +0000908 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000909}
Alexandre Rames5319def2014-10-23 10:03:10 +0100910
Alexandre Rames67555f72014-11-18 10:55:16 +0000911#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100912
Zheng Xu3927c8b2015-11-18 17:46:25 +0800913void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100914 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800915 jump_table->EmitTable(this);
916 }
917}
918
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000919void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800920 EmitJumpTables();
Vladimir Marko966b46f2018-08-03 10:20:19 +0000921
922 // Emit JIT baker read barrier slow paths.
923 DCHECK(Runtime::Current()->UseJitCompilation() || jit_baker_read_barrier_slow_paths_.empty());
924 for (auto& entry : jit_baker_read_barrier_slow_paths_) {
925 uint32_t encoded_data = entry.first;
926 vixl::aarch64::Label* slow_path_entry = &entry.second.label;
927 __ Bind(slow_path_entry);
928 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name */ nullptr);
929 }
930
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000931 // Ensure we emit the literal pool.
932 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +0000933
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000934 CodeGenerator::Finalize(allocator);
Vladimir Markoca1e0382018-04-11 09:58:41 +0000935
936 // Verify Baker read barrier linker patches.
937 if (kIsDebugBuild) {
938 ArrayRef<const uint8_t> code = allocator->GetMemory();
939 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
940 DCHECK(info.label.IsBound());
941 uint32_t literal_offset = info.label.GetLocation();
942 DCHECK_ALIGNED(literal_offset, 4u);
943
944 auto GetInsn = [&code](uint32_t offset) {
945 DCHECK_ALIGNED(offset, 4u);
946 return
947 (static_cast<uint32_t>(code[offset + 0]) << 0) +
948 (static_cast<uint32_t>(code[offset + 1]) << 8) +
949 (static_cast<uint32_t>(code[offset + 2]) << 16)+
950 (static_cast<uint32_t>(code[offset + 3]) << 24);
951 };
952
953 const uint32_t encoded_data = info.custom_data;
954 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
955 // Check that the next instruction matches the expected LDR.
956 switch (kind) {
Vladimir Marko0ecac682018-08-07 10:40:38 +0100957 case BakerReadBarrierKind::kField:
958 case BakerReadBarrierKind::kAcquire: {
Vladimir Markoca1e0382018-04-11 09:58:41 +0000959 DCHECK_GE(code.size() - literal_offset, 8u);
960 uint32_t next_insn = GetInsn(literal_offset + 4u);
Vladimir Markoca1e0382018-04-11 09:58:41 +0000961 CheckValidReg(next_insn & 0x1fu); // Check destination register.
962 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Marko0ecac682018-08-07 10:40:38 +0100963 if (kind == BakerReadBarrierKind::kField) {
964 // LDR (immediate) with correct base_reg.
965 CHECK_EQ(next_insn & 0xffc003e0u, 0xb9400000u | (base_reg << 5));
966 } else {
967 DCHECK(kind == BakerReadBarrierKind::kAcquire);
968 // LDAR with correct base_reg.
969 CHECK_EQ(next_insn & 0xffffffe0u, 0x88dffc00u | (base_reg << 5));
970 }
Vladimir Markoca1e0382018-04-11 09:58:41 +0000971 break;
972 }
973 case BakerReadBarrierKind::kArray: {
974 DCHECK_GE(code.size() - literal_offset, 8u);
975 uint32_t next_insn = GetInsn(literal_offset + 4u);
976 // LDR (register) with the correct base_reg, size=10 (32-bit), option=011 (extend = LSL),
977 // and S=1 (shift amount = 2 for 32-bit version), i.e. LDR Wt, [Xn, Xm, LSL #2].
978 CheckValidReg(next_insn & 0x1fu); // Check destination register.
979 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
980 CHECK_EQ(next_insn & 0xffe0ffe0u, 0xb8607800u | (base_reg << 5));
981 CheckValidReg((next_insn >> 16) & 0x1f); // Check index register
982 break;
983 }
984 case BakerReadBarrierKind::kGcRoot: {
985 DCHECK_GE(literal_offset, 4u);
986 uint32_t prev_insn = GetInsn(literal_offset - 4u);
Vladimir Markoca1e0382018-04-11 09:58:41 +0000987 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Marko94796f82018-08-08 15:15:33 +0100988 // Usually LDR (immediate) with correct root_reg but
989 // we may have a "MOV marked, old_value" for UnsafeCASObject.
990 if ((prev_insn & 0xffe0ffff) != (0x2a0003e0 | root_reg)) { // MOV?
991 CHECK_EQ(prev_insn & 0xffc0001fu, 0xb9400000u | root_reg); // LDR?
992 }
Vladimir Markoca1e0382018-04-11 09:58:41 +0000993 break;
994 }
995 default:
996 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
997 UNREACHABLE();
998 }
999 }
1000 }
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001001}
1002
Zheng Xuad4450e2015-04-17 18:48:56 +08001003void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1004 // Note: There are 6 kinds of moves:
1005 // 1. constant -> GPR/FPR (non-cycle)
1006 // 2. constant -> stack (non-cycle)
1007 // 3. GPR/FPR -> GPR/FPR
1008 // 4. GPR/FPR -> stack
1009 // 5. stack -> GPR/FPR
1010 // 6. stack -> stack (non-cycle)
1011 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1012 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1013 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1014 // dependency.
1015 vixl_temps_.Open(GetVIXLAssembler());
1016}
1017
1018void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1019 vixl_temps_.Close();
1020}
1021
1022Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
Artem Serovd4bccf12017-04-03 18:47:32 +01001023 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister
1024 || kind == Location::kStackSlot || kind == Location::kDoubleStackSlot
1025 || kind == Location::kSIMDStackSlot);
1026 kind = (kind == Location::kFpuRegister || kind == Location::kSIMDStackSlot)
1027 ? Location::kFpuRegister
1028 : Location::kRegister;
Zheng Xuad4450e2015-04-17 18:48:56 +08001029 Location scratch = GetScratchLocation(kind);
1030 if (!scratch.Equals(Location::NoLocation())) {
1031 return scratch;
1032 }
1033 // Allocate from VIXL temp registers.
1034 if (kind == Location::kRegister) {
1035 scratch = LocationFrom(vixl_temps_.AcquireX());
1036 } else {
Roland Levillain952b2352017-05-03 19:49:14 +01001037 DCHECK_EQ(kind, Location::kFpuRegister);
Artem Serovd4bccf12017-04-03 18:47:32 +01001038 scratch = LocationFrom(codegen_->GetGraph()->HasSIMD()
1039 ? vixl_temps_.AcquireVRegisterOfSize(kQRegSize)
1040 : vixl_temps_.AcquireD());
Zheng Xuad4450e2015-04-17 18:48:56 +08001041 }
1042 AddScratchLocation(scratch);
1043 return scratch;
1044}
1045
1046void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1047 if (loc.IsRegister()) {
1048 vixl_temps_.Release(XRegisterFrom(loc));
1049 } else {
1050 DCHECK(loc.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001051 vixl_temps_.Release(codegen_->GetGraph()->HasSIMD() ? QRegisterFrom(loc) : DRegisterFrom(loc));
Zheng Xuad4450e2015-04-17 18:48:56 +08001052 }
1053 RemoveScratchLocation(loc);
1054}
1055
Alexandre Rames3e69f162014-12-10 10:36:50 +00001056void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001057 MoveOperands* move = moves_[index];
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001058 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001059}
1060
Alexandre Rames5319def2014-10-23 10:03:10 +01001061void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001062 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001063 __ Bind(&frame_entry_label_);
1064
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001065 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1066 UseScratchRegisterScope temps(masm);
1067 Register temp = temps.AcquireX();
1068 __ Ldrh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1069 __ Add(temp, temp, 1);
1070 __ Strh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1071 }
1072
Vladimir Marko33bff252017-11-01 14:35:42 +00001073 bool do_overflow_check =
1074 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm64) || !IsLeafMethod();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001075 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001076 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001077 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001078 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Vladimir Marko33bff252017-11-01 14:35:42 +00001079 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kArm64)));
Artem Serov914d7a82017-02-07 14:33:49 +00001080 {
1081 // Ensure that between load and RecordPcInfo there are no pools emitted.
1082 ExactAssemblyScope eas(GetVIXLAssembler(),
1083 kInstructionSize,
1084 CodeBufferCheckScope::kExactSize);
1085 __ ldr(wzr, MemOperand(temp, 0));
1086 RecordPcInfo(nullptr, 0);
1087 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001088 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001089
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001090 if (!HasEmptyFrame()) {
1091 int frame_size = GetFrameSize();
1092 // Stack layout:
1093 // sp[frame_size - 8] : lr.
1094 // ... : other preserved core registers.
1095 // ... : other preserved fp registers.
1096 // ... : reserved frame space.
1097 // sp[0] : current method.
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001098
1099 // Save the current method if we need it. Note that we do not
1100 // do this in HCurrentMethod, as the instruction might have been removed
1101 // in the SSA graph.
1102 if (RequiresCurrentMethod()) {
1103 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001104 } else {
1105 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001106 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001107 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001108 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1109 frame_size - GetCoreSpillSize());
1110 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1111 frame_size - FrameEntrySpillSize());
Mingyao Yang063fc772016-08-02 11:02:54 -07001112
1113 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1114 // Initialize should_deoptimize flag to 0.
1115 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1116 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1117 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001118 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01001119
1120 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01001121}
1122
1123void CodeGeneratorARM64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001124 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001125 if (!HasEmptyFrame()) {
1126 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001127 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1128 frame_size - FrameEntrySpillSize());
1129 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1130 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001131 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001132 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001133 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001134 __ Ret();
1135 GetAssembler()->cfi().RestoreState();
1136 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001137}
1138
Scott Wakeling97c72b72016-06-24 16:19:36 +01001139CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001140 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001141 return CPURegList(CPURegister::kRegister, kXRegSize,
1142 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001143}
1144
Scott Wakeling97c72b72016-06-24 16:19:36 +01001145CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001146 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1147 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001148 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1149 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001150}
1151
Alexandre Rames5319def2014-10-23 10:03:10 +01001152void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1153 __ Bind(GetLabelOf(block));
1154}
1155
Calin Juravle175dc732015-08-25 15:42:32 +01001156void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1157 DCHECK(location.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001158 __ Mov(RegisterFrom(location, DataType::Type::kInt32), value);
Calin Juravle175dc732015-08-25 15:42:32 +01001159}
1160
Calin Juravlee460d1d2015-09-29 04:52:17 +01001161void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1162 if (location.IsRegister()) {
1163 locations->AddTemp(location);
1164 } else {
1165 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1166 }
1167}
1168
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001169void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001170 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001171 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001172 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001173 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001174 if (value_can_be_null) {
1175 __ Cbz(value, &done);
1176 }
Roland Levillainc73f0522018-08-14 15:16:50 +01001177 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07001178 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Roland Levillainc73f0522018-08-14 15:16:50 +01001179 // Calculate the offset (in the card table) of the card corresponding to
1180 // `object`.
Alexandre Rames5319def2014-10-23 10:03:10 +01001181 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Roland Levillainc73f0522018-08-14 15:16:50 +01001182 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
1183 // `object`'s card.
1184 //
1185 // Register `card` contains the address of the card table. Note that the card
1186 // table's base is biased during its creation so that it always starts at an
1187 // address whose least-significant byte is equal to `kCardDirty` (see
1188 // art::gc::accounting::CardTable::Create). Therefore the STRB instruction
1189 // below writes the `kCardDirty` (byte) value into the `object`'s card
1190 // (located at `card + object >> kCardShift`).
1191 //
1192 // This dual use of the value in register `card` (1. to calculate the location
1193 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
1194 // (no need to explicitly load `kCardDirty` as an immediate value).
Serban Constantinescu02164b32014-11-13 14:05:07 +00001195 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001196 if (value_can_be_null) {
1197 __ Bind(&done);
1198 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001199}
1200
David Brazdil58282f42016-01-14 12:45:10 +00001201void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001202 // Blocked core registers:
1203 // lr : Runtime reserved.
1204 // tr : Runtime reserved.
Roland Levillain97c46462017-05-11 14:04:03 +01001205 // mr : Runtime reserved.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001206 // ip1 : VIXL core temp.
1207 // ip0 : VIXL core temp.
1208 //
1209 // Blocked fp registers:
1210 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001211 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1212 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001213 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001214 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001215 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001216
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001217 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001218 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001219 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001220 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001221
David Brazdil58282f42016-01-14 12:45:10 +00001222 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001223 // Stubs do not save callee-save floating point registers. If the graph
1224 // is debuggable, we need to deal with these registers differently. For
1225 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001226 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1227 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001228 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001229 }
1230 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001231}
1232
Alexandre Rames3e69f162014-12-10 10:36:50 +00001233size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1234 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1235 __ Str(reg, MemOperand(sp, stack_index));
1236 return kArm64WordSize;
1237}
1238
1239size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1240 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1241 __ Ldr(reg, MemOperand(sp, stack_index));
1242 return kArm64WordSize;
1243}
1244
1245size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1246 FPRegister reg = FPRegister(reg_id, kDRegSize);
1247 __ Str(reg, MemOperand(sp, stack_index));
1248 return kArm64WordSize;
1249}
1250
1251size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1252 FPRegister reg = FPRegister(reg_id, kDRegSize);
1253 __ Ldr(reg, MemOperand(sp, stack_index));
1254 return kArm64WordSize;
1255}
1256
Alexandre Rames5319def2014-10-23 10:03:10 +01001257void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001258 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001259}
1260
1261void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001262 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001263}
1264
Vladimir Markoa0431112018-06-25 09:32:54 +01001265const Arm64InstructionSetFeatures& CodeGeneratorARM64::GetInstructionSetFeatures() const {
1266 return *GetCompilerOptions().GetInstructionSetFeatures()->AsArm64InstructionSetFeatures();
1267}
1268
Alexandre Rames67555f72014-11-18 10:55:16 +00001269void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001270 if (constant->IsIntConstant()) {
1271 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1272 } else if (constant->IsLongConstant()) {
1273 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1274 } else if (constant->IsNullConstant()) {
1275 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001276 } else if (constant->IsFloatConstant()) {
1277 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1278 } else {
1279 DCHECK(constant->IsDoubleConstant());
1280 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1281 }
1282}
1283
Alexandre Rames3e69f162014-12-10 10:36:50 +00001284
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001285static bool CoherentConstantAndType(Location constant, DataType::Type type) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001286 DCHECK(constant.IsConstant());
1287 HConstant* cst = constant.GetConstant();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001288 return (cst->IsIntConstant() && type == DataType::Type::kInt32) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001289 // Null is mapped to a core W register, which we associate with kPrimInt.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001290 (cst->IsNullConstant() && type == DataType::Type::kInt32) ||
1291 (cst->IsLongConstant() && type == DataType::Type::kInt64) ||
1292 (cst->IsFloatConstant() && type == DataType::Type::kFloat32) ||
1293 (cst->IsDoubleConstant() && type == DataType::Type::kFloat64);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001294}
1295
Roland Levillain952b2352017-05-03 19:49:14 +01001296// Allocate a scratch register from the VIXL pool, querying first
1297// the floating-point register pool, and then the core register
1298// pool. This is essentially a reimplementation of
Roland Levillain558dea12017-01-27 19:40:44 +00001299// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
1300// using a different allocation strategy.
1301static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
1302 vixl::aarch64::UseScratchRegisterScope* temps,
1303 int size_in_bits) {
1304 return masm->GetScratchFPRegisterList()->IsEmpty()
1305 ? CPURegister(temps->AcquireRegisterOfSize(size_in_bits))
1306 : CPURegister(temps->AcquireVRegisterOfSize(size_in_bits));
1307}
1308
Calin Juravlee460d1d2015-09-29 04:52:17 +01001309void CodeGeneratorARM64::MoveLocation(Location destination,
1310 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001311 DataType::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001312 if (source.Equals(destination)) {
1313 return;
1314 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001315
1316 // A valid move can always be inferred from the destination and source
1317 // locations. When moving from and to a register, the argument type can be
1318 // used to generate 32bit instead of 64bit moves. In debug mode we also
1319 // checks the coherency of the locations and the type.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001320 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001321
1322 if (destination.IsRegister() || destination.IsFpuRegister()) {
1323 if (unspecified_type) {
1324 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1325 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001326 (src_cst != nullptr && (src_cst->IsIntConstant()
1327 || src_cst->IsFloatConstant()
1328 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001329 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001330 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexandre Rames67555f72014-11-18 10:55:16 +00001331 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001332 // If the source is a double stack slot or a 64bit constant, a 64bit
1333 // type is appropriate. Else the source is a register, and since the
1334 // type has not been specified, we chose a 64bit type to force a 64bit
1335 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001336 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexandre Rames67555f72014-11-18 10:55:16 +00001337 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001338 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001339 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1340 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001341 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001342 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1343 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1344 __ Ldr(dst, StackOperandFrom(source));
Artem Serovd4bccf12017-04-03 18:47:32 +01001345 } else if (source.IsSIMDStackSlot()) {
1346 __ Ldr(QRegisterFrom(destination), StackOperandFrom(source));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001347 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001348 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001349 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001350 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001351 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001352 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001353 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001354 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001355 DataType::Type source_type = DataType::Is64BitType(dst_type)
1356 ? DataType::Type::kInt64
1357 : DataType::Type::kInt32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001358 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1359 }
1360 } else {
1361 DCHECK(source.IsFpuRegister());
1362 if (destination.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001363 DataType::Type source_type = DataType::Is64BitType(dst_type)
1364 ? DataType::Type::kFloat64
1365 : DataType::Type::kFloat32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001366 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1367 } else {
1368 DCHECK(destination.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001369 if (GetGraph()->HasSIMD()) {
1370 __ Mov(QRegisterFrom(destination), QRegisterFrom(source));
1371 } else {
1372 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
1373 }
1374 }
1375 }
1376 } else if (destination.IsSIMDStackSlot()) {
1377 if (source.IsFpuRegister()) {
1378 __ Str(QRegisterFrom(source), StackOperandFrom(destination));
1379 } else {
1380 DCHECK(source.IsSIMDStackSlot());
1381 UseScratchRegisterScope temps(GetVIXLAssembler());
1382 if (GetVIXLAssembler()->GetScratchFPRegisterList()->IsEmpty()) {
1383 Register temp = temps.AcquireX();
1384 __ Ldr(temp, MemOperand(sp, source.GetStackIndex()));
1385 __ Str(temp, MemOperand(sp, destination.GetStackIndex()));
1386 __ Ldr(temp, MemOperand(sp, source.GetStackIndex() + kArm64WordSize));
1387 __ Str(temp, MemOperand(sp, destination.GetStackIndex() + kArm64WordSize));
1388 } else {
1389 FPRegister temp = temps.AcquireVRegisterOfSize(kQRegSize);
1390 __ Ldr(temp, StackOperandFrom(source));
1391 __ Str(temp, StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001392 }
1393 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001394 } else { // The destination is not a register. It must be a stack slot.
1395 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1396 if (source.IsRegister() || source.IsFpuRegister()) {
1397 if (unspecified_type) {
1398 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001399 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001400 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001401 dst_type =
1402 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001403 }
1404 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001405 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1406 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001407 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001408 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001409 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1410 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001411 UseScratchRegisterScope temps(GetVIXLAssembler());
1412 HConstant* src_cst = source.GetConstant();
1413 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001414 if (src_cst->IsZeroBitPattern()) {
Scott Wakeling79db9972017-01-19 14:08:42 +00001415 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant())
1416 ? Register(xzr)
1417 : Register(wzr);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001418 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001419 if (src_cst->IsIntConstant()) {
1420 temp = temps.AcquireW();
1421 } else if (src_cst->IsLongConstant()) {
1422 temp = temps.AcquireX();
1423 } else if (src_cst->IsFloatConstant()) {
1424 temp = temps.AcquireS();
1425 } else {
1426 DCHECK(src_cst->IsDoubleConstant());
1427 temp = temps.AcquireD();
1428 }
1429 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001430 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001431 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001432 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001433 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001434 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001435 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001436 // Use any scratch register (a core or a floating-point one)
1437 // from VIXL scratch register pools as a temporary.
1438 //
1439 // We used to only use the FP scratch register pool, but in some
1440 // rare cases the only register from this pool (D31) would
1441 // already be used (e.g. within a ParallelMove instruction, when
1442 // a move is blocked by a another move requiring a scratch FP
1443 // register, which would reserve D31). To prevent this issue, we
1444 // ask for a scratch register of any type (core or FP).
Roland Levillain558dea12017-01-27 19:40:44 +00001445 //
1446 // Also, we start by asking for a FP scratch register first, as the
Roland Levillain952b2352017-05-03 19:49:14 +01001447 // demand of scratch core registers is higher. This is why we
Roland Levillain558dea12017-01-27 19:40:44 +00001448 // use AcquireFPOrCoreCPURegisterOfSize instead of
1449 // UseScratchRegisterScope::AcquireCPURegisterOfSize, which
1450 // allocates core scratch registers first.
1451 CPURegister temp = AcquireFPOrCoreCPURegisterOfSize(
1452 GetVIXLAssembler(),
1453 &temps,
1454 (destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001455 __ Ldr(temp, StackOperandFrom(source));
1456 __ Str(temp, StackOperandFrom(destination));
1457 }
1458 }
1459}
1460
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001461void CodeGeneratorARM64::Load(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001462 CPURegister dst,
1463 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001464 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001465 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001466 case DataType::Type::kUint8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001467 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001468 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001469 case DataType::Type::kInt8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001470 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001471 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001472 case DataType::Type::kUint16:
Alexandre Rames67555f72014-11-18 10:55:16 +00001473 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001474 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001475 case DataType::Type::kInt16:
1476 __ Ldrsh(Register(dst), src);
1477 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001478 case DataType::Type::kInt32:
1479 case DataType::Type::kReference:
1480 case DataType::Type::kInt64:
1481 case DataType::Type::kFloat32:
1482 case DataType::Type::kFloat64:
1483 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001484 __ Ldr(dst, src);
1485 break;
Aart Bik66c158e2018-01-31 12:55:04 -08001486 case DataType::Type::kUint32:
1487 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001488 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001489 LOG(FATAL) << "Unreachable type " << type;
1490 }
1491}
1492
Calin Juravle77520bc2015-01-12 18:45:46 +00001493void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001494 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001495 const MemOperand& src,
1496 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001497 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001498 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001499 Register temp_base = temps.AcquireX();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001500 DataType::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001501
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001502 DCHECK(!src.IsPreIndex());
1503 DCHECK(!src.IsPostIndex());
1504
1505 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001506 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Artem Serov914d7a82017-02-07 14:33:49 +00001507 {
1508 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1509 MemOperand base = MemOperand(temp_base);
1510 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001511 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001512 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001513 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00001514 {
1515 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1516 __ ldarb(Register(dst), base);
1517 if (needs_null_check) {
1518 MaybeRecordImplicitNullCheck(instruction);
1519 }
1520 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001521 if (type == DataType::Type::kInt8) {
1522 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
Artem Serov914d7a82017-02-07 14:33:49 +00001523 }
1524 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001525 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001526 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00001527 {
1528 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1529 __ ldarh(Register(dst), base);
1530 if (needs_null_check) {
1531 MaybeRecordImplicitNullCheck(instruction);
1532 }
1533 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001534 if (type == DataType::Type::kInt16) {
1535 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
1536 }
Artem Serov914d7a82017-02-07 14:33:49 +00001537 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001538 case DataType::Type::kInt32:
1539 case DataType::Type::kReference:
1540 case DataType::Type::kInt64:
1541 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001542 {
1543 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1544 __ ldar(Register(dst), base);
1545 if (needs_null_check) {
1546 MaybeRecordImplicitNullCheck(instruction);
1547 }
1548 }
1549 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001550 case DataType::Type::kFloat32:
1551 case DataType::Type::kFloat64: {
Artem Serov914d7a82017-02-07 14:33:49 +00001552 DCHECK(dst.IsFPRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001553 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001554
Artem Serov914d7a82017-02-07 14:33:49 +00001555 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1556 {
1557 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1558 __ ldar(temp, base);
1559 if (needs_null_check) {
1560 MaybeRecordImplicitNullCheck(instruction);
1561 }
1562 }
1563 __ Fmov(FPRegister(dst), temp);
1564 break;
Roland Levillain44015862016-01-22 11:47:17 +00001565 }
Aart Bik66c158e2018-01-31 12:55:04 -08001566 case DataType::Type::kUint32:
1567 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001568 case DataType::Type::kVoid:
Artem Serov914d7a82017-02-07 14:33:49 +00001569 LOG(FATAL) << "Unreachable type " << type;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001570 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001571 }
1572}
1573
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001574void CodeGeneratorARM64::Store(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001575 CPURegister src,
1576 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001577 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001578 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001579 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001580 case DataType::Type::kInt8:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001581 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001582 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001583 case DataType::Type::kUint16:
1584 case DataType::Type::kInt16:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001585 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001586 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001587 case DataType::Type::kInt32:
1588 case DataType::Type::kReference:
1589 case DataType::Type::kInt64:
1590 case DataType::Type::kFloat32:
1591 case DataType::Type::kFloat64:
1592 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001593 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001594 break;
Aart Bik66c158e2018-01-31 12:55:04 -08001595 case DataType::Type::kUint32:
1596 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001597 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001598 LOG(FATAL) << "Unreachable type " << type;
1599 }
1600}
1601
Artem Serov914d7a82017-02-07 14:33:49 +00001602void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001603 DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001604 CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +00001605 const MemOperand& dst,
1606 bool needs_null_check) {
1607 MacroAssembler* masm = GetVIXLAssembler();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001608 UseScratchRegisterScope temps(GetVIXLAssembler());
1609 Register temp_base = temps.AcquireX();
1610
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001611 DCHECK(!dst.IsPreIndex());
1612 DCHECK(!dst.IsPostIndex());
1613
1614 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001615 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001616 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001617 MemOperand base = MemOperand(temp_base);
Artem Serov914d7a82017-02-07 14:33:49 +00001618 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001619 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001620 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001621 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001622 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00001623 {
1624 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1625 __ stlrb(Register(src), base);
1626 if (needs_null_check) {
1627 MaybeRecordImplicitNullCheck(instruction);
1628 }
1629 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001630 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001631 case DataType::Type::kUint16:
1632 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00001633 {
1634 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1635 __ stlrh(Register(src), base);
1636 if (needs_null_check) {
1637 MaybeRecordImplicitNullCheck(instruction);
1638 }
1639 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001640 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001641 case DataType::Type::kInt32:
1642 case DataType::Type::kReference:
1643 case DataType::Type::kInt64:
1644 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001645 {
1646 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1647 __ stlr(Register(src), base);
1648 if (needs_null_check) {
1649 MaybeRecordImplicitNullCheck(instruction);
1650 }
1651 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001652 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001653 case DataType::Type::kFloat32:
1654 case DataType::Type::kFloat64: {
1655 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001656 Register temp_src;
1657 if (src.IsZero()) {
1658 // The zero register is used to avoid synthesizing zero constants.
1659 temp_src = Register(src);
1660 } else {
1661 DCHECK(src.IsFPRegister());
1662 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1663 __ Fmov(temp_src, FPRegister(src));
1664 }
Artem Serov914d7a82017-02-07 14:33:49 +00001665 {
1666 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1667 __ stlr(temp_src, base);
1668 if (needs_null_check) {
1669 MaybeRecordImplicitNullCheck(instruction);
1670 }
1671 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001672 break;
1673 }
Aart Bik66c158e2018-01-31 12:55:04 -08001674 case DataType::Type::kUint32:
1675 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001676 case DataType::Type::kVoid:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001677 LOG(FATAL) << "Unreachable type " << type;
1678 }
1679}
1680
Calin Juravle175dc732015-08-25 15:42:32 +01001681void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1682 HInstruction* instruction,
1683 uint32_t dex_pc,
1684 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001685 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00001686
1687 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value()));
1688 {
1689 // Ensure the pc position is recorded immediately after the `blr` instruction.
1690 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
1691 __ blr(lr);
1692 if (EntrypointRequiresStackMap(entrypoint)) {
1693 RecordPcInfo(instruction, dex_pc, slow_path);
1694 }
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00001695 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001696}
1697
Roland Levillaindec8f632016-07-22 17:10:06 +01001698void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1699 HInstruction* instruction,
1700 SlowPathCode* slow_path) {
1701 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Roland Levillaindec8f632016-07-22 17:10:06 +01001702 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1703 __ Blr(lr);
1704}
1705
Alexandre Rames67555f72014-11-18 10:55:16 +00001706void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001707 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001708 UseScratchRegisterScope temps(GetVIXLAssembler());
1709 Register temp = temps.AcquireW();
Vladimir Markodc682aa2018-01-04 18:42:57 +00001710 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
1711 const size_t status_byte_offset =
1712 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
1713 constexpr uint32_t shifted_initialized_value =
1714 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001715
Serban Constantinescu02164b32014-11-13 14:05:07 +00001716 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001717 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Vladimir Markodc682aa2018-01-04 18:42:57 +00001718 __ Add(temp, class_reg, status_byte_offset);
Igor Murashkin86083f72017-10-27 10:59:04 -07001719 __ Ldarb(temp, HeapOperand(temp));
Vladimir Markodc682aa2018-01-04 18:42:57 +00001720 __ Cmp(temp, shifted_initialized_value);
Vladimir Marko2c64a832018-01-04 11:31:56 +00001721 __ B(lo, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001722 __ Bind(slow_path->GetExitLabel());
1723}
Alexandre Rames5319def2014-10-23 10:03:10 +01001724
Vladimir Marko175e7862018-03-27 09:03:13 +00001725void InstructionCodeGeneratorARM64::GenerateBitstringTypeCheckCompare(
1726 HTypeCheckInstruction* check, vixl::aarch64::Register temp) {
1727 uint32_t path_to_root = check->GetBitstringPathToRoot();
1728 uint32_t mask = check->GetBitstringMask();
1729 DCHECK(IsPowerOfTwo(mask + 1));
1730 size_t mask_bits = WhichPowerOf2(mask + 1);
1731
1732 if (mask_bits == 16u) {
1733 // Load only the bitstring part of the status word.
1734 __ Ldrh(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
1735 } else {
1736 // /* uint32_t */ temp = temp->status_
1737 __ Ldr(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
1738 // Extract the bitstring bits.
1739 __ Ubfx(temp, temp, 0, mask_bits);
1740 }
1741 // Compare the bitstring bits to `path_to_root`.
1742 __ Cmp(temp, path_to_root);
1743}
1744
Roland Levillain44015862016-01-22 11:47:17 +00001745void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001746 BarrierType type = BarrierAll;
1747
1748 switch (kind) {
1749 case MemBarrierKind::kAnyAny:
1750 case MemBarrierKind::kAnyStore: {
1751 type = BarrierAll;
1752 break;
1753 }
1754 case MemBarrierKind::kLoadAny: {
1755 type = BarrierReads;
1756 break;
1757 }
1758 case MemBarrierKind::kStoreStore: {
1759 type = BarrierWrites;
1760 break;
1761 }
1762 default:
1763 LOG(FATAL) << "Unexpected memory barrier " << kind;
1764 }
1765 __ Dmb(InnerShareable, type);
1766}
1767
Serban Constantinescu02164b32014-11-13 14:05:07 +00001768void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1769 HBasicBlock* successor) {
1770 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001771 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1772 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001773 slow_path =
1774 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathARM64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001775 instruction->SetSlowPath(slow_path);
1776 codegen_->AddSlowPath(slow_path);
1777 if (successor != nullptr) {
1778 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001779 }
1780 } else {
1781 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1782 }
1783
Serban Constantinescu02164b32014-11-13 14:05:07 +00001784 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1785 Register temp = temps.AcquireW();
1786
Andreas Gampe542451c2016-07-26 09:02:02 -07001787 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001788 if (successor == nullptr) {
1789 __ Cbnz(temp, slow_path->GetEntryLabel());
1790 __ Bind(slow_path->GetReturnLabel());
1791 } else {
1792 __ Cbz(temp, codegen_->GetLabelOf(successor));
1793 __ B(slow_path->GetEntryLabel());
1794 // slow_path will return to GetLabelOf(successor).
1795 }
1796}
1797
Alexandre Rames5319def2014-10-23 10:03:10 +01001798InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1799 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001800 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001801 assembler_(codegen->GetAssembler()),
1802 codegen_(codegen) {}
1803
Alexandre Rames67555f72014-11-18 10:55:16 +00001804void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001805 DCHECK_EQ(instr->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001806 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001807 DataType::Type type = instr->GetResultType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001808 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001809 case DataType::Type::kInt32:
1810 case DataType::Type::kInt64:
Alexandre Rames5319def2014-10-23 10:03:10 +01001811 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001812 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001813 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001814 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001815
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001816 case DataType::Type::kFloat32:
1817 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001818 locations->SetInAt(0, Location::RequiresFpuRegister());
1819 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001820 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001821 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001822
Alexandre Rames5319def2014-10-23 10:03:10 +01001823 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001824 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001825 }
1826}
1827
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001828void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction,
1829 const FieldInfo& field_info) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001830 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1831
1832 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001833 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Rames09a99962015-04-15 11:47:56 +01001834 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001835 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
1836 object_field_get_with_read_barrier
1837 ? LocationSummary::kCallOnSlowPath
1838 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01001839 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001840 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko0ecac682018-08-07 10:40:38 +01001841 // We need a temporary register for the read barrier load in
1842 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier()
1843 // only if the field is volatile or the offset is too big.
1844 if (field_info.IsVolatile() ||
1845 field_info.GetFieldOffset().Uint32Value() >= kReferenceLoadMinFarOffset) {
1846 locations->AddTemp(FixedTempLocation());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001847 }
Vladimir Marko70e97462016-08-09 11:04:26 +01001848 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001849 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001850 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001851 locations->SetOut(Location::RequiresFpuRegister());
1852 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001853 // The output overlaps for an object field get when read barriers
1854 // are enabled: we do not want the load to overwrite the object's
1855 // location, as we need it to emit the read barrier.
1856 locations->SetOut(
1857 Location::RequiresRegister(),
1858 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01001859 }
1860}
1861
1862void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
1863 const FieldInfo& field_info) {
1864 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00001865 LocationSummary* locations = instruction->GetLocations();
1866 Location base_loc = locations->InAt(0);
1867 Location out = locations->Out();
1868 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Vladimir Marko61b92282017-10-11 13:23:17 +01001869 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
1870 DataType::Type load_type = instruction->GetType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001871 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01001872
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001873 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier &&
Vladimir Marko61b92282017-10-11 13:23:17 +01001874 load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00001875 // Object FieldGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00001876 // /* HeapReference<Object> */ out = *(base + offset)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001877 Register base = RegisterFrom(base_loc, DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001878 Location maybe_temp =
1879 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
Roland Levillain44015862016-01-22 11:47:17 +00001880 // Note that potential implicit null checks are handled in this
1881 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
1882 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1883 instruction,
1884 out,
1885 base,
1886 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00001887 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00001888 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001889 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00001890 } else {
1891 // General case.
1892 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001893 // Note that a potential implicit null check is handled in this
1894 // CodeGeneratorARM64::LoadAcquire call.
1895 // NB: LoadAcquire will record the pc info if needed.
1896 codegen_->LoadAcquire(
1897 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01001898 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00001899 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1900 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Vladimir Marko61b92282017-10-11 13:23:17 +01001901 codegen_->Load(load_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01001902 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01001903 }
Vladimir Marko61b92282017-10-11 13:23:17 +01001904 if (load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00001905 // If read barriers are enabled, emit read barriers other than
1906 // Baker's using a slow path (and also unpoison the loaded
1907 // reference, if heap poisoning is enabled).
1908 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
1909 }
Roland Levillain4d027112015-07-01 15:41:14 +01001910 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001911}
1912
1913void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
1914 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01001915 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01001916 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001917 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
1918 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001919 } else if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001920 locations->SetInAt(1, Location::RequiresFpuRegister());
1921 } else {
1922 locations->SetInAt(1, Location::RequiresRegister());
1923 }
1924}
1925
1926void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001927 const FieldInfo& field_info,
1928 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01001929 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
1930
1931 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001932 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01001933 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01001934 Offset offset = field_info.GetFieldOffset();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001935 DataType::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01001936
Roland Levillain4d027112015-07-01 15:41:14 +01001937 {
1938 // We use a block to end the scratch scope before the write barrier, thus
1939 // freeing the temporary registers so they can be used in `MarkGCCard`.
1940 UseScratchRegisterScope temps(GetVIXLAssembler());
1941
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001942 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01001943 DCHECK(value.IsW());
1944 Register temp = temps.AcquireW();
1945 __ Mov(temp, value.W());
1946 GetAssembler()->PoisonHeapReference(temp.W());
1947 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01001948 }
Roland Levillain4d027112015-07-01 15:41:14 +01001949
1950 if (field_info.IsVolatile()) {
Artem Serov914d7a82017-02-07 14:33:49 +00001951 codegen_->StoreRelease(
1952 instruction, field_type, source, HeapOperand(obj, offset), /* needs_null_check */ true);
Roland Levillain4d027112015-07-01 15:41:14 +01001953 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00001954 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
1955 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01001956 codegen_->Store(field_type, source, HeapOperand(obj, offset));
1957 codegen_->MaybeRecordImplicitNullCheck(instruction);
1958 }
Alexandre Rames09a99962015-04-15 11:47:56 +01001959 }
1960
1961 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001962 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01001963 }
1964}
1965
Alexandre Rames67555f72014-11-18 10:55:16 +00001966void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001967 DataType::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001968
1969 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001970 case DataType::Type::kInt32:
1971 case DataType::Type::kInt64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001972 Register dst = OutputRegister(instr);
1973 Register lhs = InputRegisterAt(instr, 0);
1974 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001975 if (instr->IsAdd()) {
1976 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001977 } else if (instr->IsAnd()) {
1978 __ And(dst, lhs, rhs);
1979 } else if (instr->IsOr()) {
1980 __ Orr(dst, lhs, rhs);
1981 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001982 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001983 } else if (instr->IsRor()) {
1984 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001985 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00001986 __ Ror(dst, lhs, shift);
1987 } else {
1988 // Ensure shift distance is in the same size register as the result. If
1989 // we are rotating a long and the shift comes in a w register originally,
1990 // we don't need to sxtw for use as an x since the shift distances are
1991 // all & reg_bits - 1.
1992 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
1993 }
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01001994 } else if (instr->IsMin() || instr->IsMax()) {
1995 __ Cmp(lhs, rhs);
1996 __ Csel(dst, lhs, rhs, instr->IsMin() ? lt : gt);
Alexandre Rames67555f72014-11-18 10:55:16 +00001997 } else {
1998 DCHECK(instr->IsXor());
1999 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002000 }
2001 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002002 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002003 case DataType::Type::kFloat32:
2004 case DataType::Type::kFloat64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002005 FPRegister dst = OutputFPRegister(instr);
2006 FPRegister lhs = InputFPRegisterAt(instr, 0);
2007 FPRegister rhs = InputFPRegisterAt(instr, 1);
2008 if (instr->IsAdd()) {
2009 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002010 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002011 __ Fsub(dst, lhs, rhs);
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002012 } else if (instr->IsMin()) {
2013 __ Fmin(dst, lhs, rhs);
2014 } else if (instr->IsMax()) {
2015 __ Fmax(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002016 } else {
2017 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002018 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002019 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002020 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002021 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002022 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002023 }
2024}
2025
Serban Constantinescu02164b32014-11-13 14:05:07 +00002026void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2027 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2028
Vladimir Markoca6fff82017-10-03 14:49:14 +01002029 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002030 DataType::Type type = instr->GetResultType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002031 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002032 case DataType::Type::kInt32:
2033 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002034 locations->SetInAt(0, Location::RequiresRegister());
2035 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Artem Serov87c97052016-09-23 13:34:31 +01002036 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002037 break;
2038 }
2039 default:
2040 LOG(FATAL) << "Unexpected shift type " << type;
2041 }
2042}
2043
2044void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2045 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2046
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002047 DataType::Type type = instr->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002048 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002049 case DataType::Type::kInt32:
2050 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002051 Register dst = OutputRegister(instr);
2052 Register lhs = InputRegisterAt(instr, 0);
2053 Operand rhs = InputOperandAt(instr, 1);
2054 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002055 uint32_t shift_value = rhs.GetImmediate() &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002056 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002057 if (instr->IsShl()) {
2058 __ Lsl(dst, lhs, shift_value);
2059 } else if (instr->IsShr()) {
2060 __ Asr(dst, lhs, shift_value);
2061 } else {
2062 __ Lsr(dst, lhs, shift_value);
2063 }
2064 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002065 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002066
2067 if (instr->IsShl()) {
2068 __ Lsl(dst, lhs, rhs_reg);
2069 } else if (instr->IsShr()) {
2070 __ Asr(dst, lhs, rhs_reg);
2071 } else {
2072 __ Lsr(dst, lhs, rhs_reg);
2073 }
2074 }
2075 break;
2076 }
2077 default:
2078 LOG(FATAL) << "Unexpected shift operation type " << type;
2079 }
2080}
2081
Alexandre Rames5319def2014-10-23 10:03:10 +01002082void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002083 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002084}
2085
2086void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002087 HandleBinaryOp(instruction);
2088}
2089
2090void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2091 HandleBinaryOp(instruction);
2092}
2093
2094void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2095 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002096}
2097
Artem Serov7fc63502016-02-09 17:15:29 +00002098void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002099 DCHECK(DataType::IsIntegralType(instr->GetType())) << instr->GetType();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002100 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002101 locations->SetInAt(0, Location::RequiresRegister());
2102 // There is no immediate variant of negated bitwise instructions in AArch64.
2103 locations->SetInAt(1, Location::RequiresRegister());
2104 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2105}
2106
Artem Serov7fc63502016-02-09 17:15:29 +00002107void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002108 Register dst = OutputRegister(instr);
2109 Register lhs = InputRegisterAt(instr, 0);
2110 Register rhs = InputRegisterAt(instr, 1);
2111
2112 switch (instr->GetOpKind()) {
2113 case HInstruction::kAnd:
2114 __ Bic(dst, lhs, rhs);
2115 break;
2116 case HInstruction::kOr:
2117 __ Orn(dst, lhs, rhs);
2118 break;
2119 case HInstruction::kXor:
2120 __ Eon(dst, lhs, rhs);
2121 break;
2122 default:
2123 LOG(FATAL) << "Unreachable";
2124 }
2125}
2126
Anton Kirilov74234da2017-01-13 14:42:47 +00002127void LocationsBuilderARM64::VisitDataProcWithShifterOp(
2128 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002129 DCHECK(instruction->GetType() == DataType::Type::kInt32 ||
2130 instruction->GetType() == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002131 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002132 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames8626b742015-11-25 16:28:08 +00002133 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2134 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2135 } else {
2136 locations->SetInAt(0, Location::RequiresRegister());
2137 }
2138 locations->SetInAt(1, Location::RequiresRegister());
2139 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2140}
2141
Anton Kirilov74234da2017-01-13 14:42:47 +00002142void InstructionCodeGeneratorARM64::VisitDataProcWithShifterOp(
2143 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002144 DataType::Type type = instruction->GetType();
Alexandre Rames8626b742015-11-25 16:28:08 +00002145 HInstruction::InstructionKind kind = instruction->GetInstrKind();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002146 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002147 Register out = OutputRegister(instruction);
2148 Register left;
2149 if (kind != HInstruction::kNeg) {
2150 left = InputRegisterAt(instruction, 0);
2151 }
Anton Kirilov74234da2017-01-13 14:42:47 +00002152 // If this `HDataProcWithShifterOp` was created by merging a type conversion as the
Alexandre Rames8626b742015-11-25 16:28:08 +00002153 // shifter operand operation, the IR generating `right_reg` (input to the type
2154 // conversion) can have a different type from the current instruction's type,
2155 // so we manually indicate the type.
2156 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Alexandre Rames8626b742015-11-25 16:28:08 +00002157 Operand right_operand(0);
2158
Anton Kirilov74234da2017-01-13 14:42:47 +00002159 HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2160 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002161 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2162 } else {
Anton Kirilov74234da2017-01-13 14:42:47 +00002163 right_operand = Operand(right_reg,
2164 helpers::ShiftFromOpKind(op_kind),
2165 instruction->GetShiftAmount());
Alexandre Rames8626b742015-11-25 16:28:08 +00002166 }
2167
2168 // Logical binary operations do not support extension operations in the
2169 // operand. Note that VIXL would still manage if it was passed by generating
2170 // the extension as a separate instruction.
2171 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2172 DCHECK(!right_operand.IsExtendedRegister() ||
2173 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2174 kind != HInstruction::kNeg));
2175 switch (kind) {
2176 case HInstruction::kAdd:
2177 __ Add(out, left, right_operand);
2178 break;
2179 case HInstruction::kAnd:
2180 __ And(out, left, right_operand);
2181 break;
2182 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002183 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002184 __ Neg(out, right_operand);
2185 break;
2186 case HInstruction::kOr:
2187 __ Orr(out, left, right_operand);
2188 break;
2189 case HInstruction::kSub:
2190 __ Sub(out, left, right_operand);
2191 break;
2192 case HInstruction::kXor:
2193 __ Eor(out, left, right_operand);
2194 break;
2195 default:
2196 LOG(FATAL) << "Unexpected operation kind: " << kind;
2197 UNREACHABLE();
2198 }
2199}
2200
Artem Serov328429f2016-07-06 16:23:04 +01002201void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002202 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002203 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002204 locations->SetInAt(0, Location::RequiresRegister());
2205 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
Artem Serov87c97052016-09-23 13:34:31 +01002206 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002207}
2208
Roland Levillain19c54192016-11-04 13:44:09 +00002209void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002210 __ Add(OutputRegister(instruction),
2211 InputRegisterAt(instruction, 0),
2212 Operand(InputOperandAt(instruction, 1)));
2213}
2214
Artem Serove1811ed2017-04-27 16:50:47 +01002215void LocationsBuilderARM64::VisitIntermediateAddressIndex(HIntermediateAddressIndex* instruction) {
2216 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002217 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serove1811ed2017-04-27 16:50:47 +01002218
2219 HIntConstant* shift = instruction->GetShift()->AsIntConstant();
2220
2221 locations->SetInAt(0, Location::RequiresRegister());
2222 // For byte case we don't need to shift the index variable so we can encode the data offset into
2223 // ADD instruction. For other cases we prefer the data_offset to be in register; that will hoist
2224 // data offset constant generation out of the loop and reduce the critical path length in the
2225 // loop.
2226 locations->SetInAt(1, shift->GetValue() == 0
2227 ? Location::ConstantLocation(instruction->GetOffset()->AsIntConstant())
2228 : Location::RequiresRegister());
2229 locations->SetInAt(2, Location::ConstantLocation(shift));
2230 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2231}
2232
2233void InstructionCodeGeneratorARM64::VisitIntermediateAddressIndex(
2234 HIntermediateAddressIndex* instruction) {
2235 Register index_reg = InputRegisterAt(instruction, 0);
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002236 uint32_t shift = Int64FromLocation(instruction->GetLocations()->InAt(2));
Artem Serove1811ed2017-04-27 16:50:47 +01002237 uint32_t offset = instruction->GetOffset()->AsIntConstant()->GetValue();
2238
2239 if (shift == 0) {
2240 __ Add(OutputRegister(instruction), index_reg, offset);
2241 } else {
2242 Register offset_reg = InputRegisterAt(instruction, 1);
2243 __ Add(OutputRegister(instruction), offset_reg, Operand(index_reg, LSL, shift));
2244 }
2245}
2246
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002247void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002248 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002249 new (GetGraph()->GetAllocator()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002250 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2251 if (instr->GetOpKind() == HInstruction::kSub &&
2252 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002253 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002254 // Don't allocate register for Mneg instruction.
2255 } else {
2256 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2257 Location::RequiresRegister());
2258 }
2259 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2260 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002261 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2262}
2263
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002264void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002265 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002266 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2267 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002268
2269 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2270 // This fixup should be carried out for all multiply-accumulate instructions:
2271 // madd, msub, smaddl, smsubl, umaddl and umsubl.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002272 if (instr->GetType() == DataType::Type::kInt64 &&
Alexandre Rames418318f2015-11-20 15:55:47 +00002273 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2274 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002275 vixl::aarch64::Instruction* prev =
2276 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002277 if (prev->IsLoadOrStore()) {
2278 // Make sure we emit only exactly one nop.
Artem Serov914d7a82017-02-07 14:33:49 +00002279 ExactAssemblyScope scope(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002280 __ nop();
2281 }
2282 }
2283
2284 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002285 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002286 __ Madd(res, mul_left, mul_right, accumulator);
2287 } else {
2288 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002289 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002290 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002291 __ Mneg(res, mul_left, mul_right);
2292 } else {
2293 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2294 __ Msub(res, mul_left, mul_right, accumulator);
2295 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002296 }
2297}
2298
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002299void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002300 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002301 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002302 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002303 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2304 object_array_get_with_read_barrier
2305 ? LocationSummary::kCallOnSlowPath
2306 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002307 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002308 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002309 if (instruction->GetIndex()->IsConstant()) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002310 // Array loads with constant index are treated as field loads.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002311 // We need a temporary register for the read barrier load in
2312 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier()
2313 // only if the offset is too big.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002314 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
2315 uint32_t index = instruction->GetIndex()->AsIntConstant()->GetValue();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002316 offset += index << DataType::SizeShift(DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002317 if (offset >= kReferenceLoadMinFarOffset) {
2318 locations->AddTemp(FixedTempLocation());
2319 }
2320 } else {
Vladimir Marko008e09f32018-08-06 15:42:43 +01002321 // We need a non-scratch temporary for the array data pointer in
2322 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier().
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002323 locations->AddTemp(Location::RequiresRegister());
2324 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002325 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002326 locations->SetInAt(0, Location::RequiresRegister());
2327 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002328 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002329 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2330 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002331 // The output overlaps in the case of an object array get with
2332 // read barriers enabled: we do not want the move to overwrite the
2333 // array's location, as we need it to emit the read barrier.
2334 locations->SetOut(
2335 Location::RequiresRegister(),
2336 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002337 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002338}
2339
2340void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002341 DataType::Type type = instruction->GetType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002342 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002343 LocationSummary* locations = instruction->GetLocations();
2344 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002345 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002346 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002347 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2348 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002349 MacroAssembler* masm = GetVIXLAssembler();
2350 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002351
Roland Levillain19c54192016-11-04 13:44:09 +00002352 // The read barrier instrumentation of object ArrayGet instructions
2353 // does not support the HIntermediateAddress instruction.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002354 DCHECK(!((type == DataType::Type::kReference) &&
Roland Levillain19c54192016-11-04 13:44:09 +00002355 instruction->GetArray()->IsIntermediateAddress() &&
2356 kEmitCompilerReadBarrier));
2357
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002358 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00002359 // Object ArrayGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002360 // Note that a potential implicit null check is handled in the
2361 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
Vladimir Marko66d691d2017-04-07 17:53:39 +01002362 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002363 if (index.IsConstant()) {
2364 // Array load with a constant index can be treated as a field load.
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002365 offset += Int64FromLocation(index) << DataType::SizeShift(type);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002366 Location maybe_temp =
2367 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
2368 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2369 out,
2370 obj.W(),
2371 offset,
2372 maybe_temp,
Vladimir Marko66d691d2017-04-07 17:53:39 +01002373 /* needs_null_check */ false,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002374 /* use_load_acquire */ false);
2375 } else {
2376 Register temp = WRegisterFrom(locations->GetTemp(0));
2377 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko008e09f32018-08-06 15:42:43 +01002378 out, obj.W(), offset, index, temp, /* needs_null_check */ false);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002379 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002380 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002381 // General case.
2382 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002383 Register length;
2384 if (maybe_compressed_char_at) {
2385 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2386 length = temps.AcquireW();
Artem Serov914d7a82017-02-07 14:33:49 +00002387 {
2388 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2389 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2390
2391 if (instruction->GetArray()->IsIntermediateAddress()) {
2392 DCHECK_LT(count_offset, offset);
2393 int64_t adjusted_offset =
2394 static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2395 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2396 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2397 } else {
2398 __ Ldr(length, HeapOperand(obj, count_offset));
2399 }
2400 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002401 }
jessicahandojo05765752016-09-09 19:01:32 -07002402 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002403 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002404 if (maybe_compressed_char_at) {
2405 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002406 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2407 "Expecting 0=compressed, 1=uncompressed");
2408 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002409 __ Ldrb(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002410 HeapOperand(obj, offset + Int64FromLocation(index)));
jessicahandojo05765752016-09-09 19:01:32 -07002411 __ B(&done);
2412 __ Bind(&uncompressed_load);
2413 __ Ldrh(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002414 HeapOperand(obj, offset + (Int64FromLocation(index) << 1)));
jessicahandojo05765752016-09-09 19:01:32 -07002415 __ Bind(&done);
2416 } else {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002417 offset += Int64FromLocation(index) << DataType::SizeShift(type);
jessicahandojo05765752016-09-09 19:01:32 -07002418 source = HeapOperand(obj, offset);
2419 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002420 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002421 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002422 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002423 // We do not need to compute the intermediate address from the array: the
2424 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002425 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002426 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002427 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002428 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2429 }
2430 temp = obj;
2431 } else {
2432 __ Add(temp, obj, offset);
2433 }
jessicahandojo05765752016-09-09 19:01:32 -07002434 if (maybe_compressed_char_at) {
2435 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002436 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2437 "Expecting 0=compressed, 1=uncompressed");
2438 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002439 __ Ldrb(Register(OutputCPURegister(instruction)),
2440 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2441 __ B(&done);
2442 __ Bind(&uncompressed_load);
2443 __ Ldrh(Register(OutputCPURegister(instruction)),
2444 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2445 __ Bind(&done);
2446 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002447 source = HeapOperand(temp, XRegisterFrom(index), LSL, DataType::SizeShift(type));
jessicahandojo05765752016-09-09 19:01:32 -07002448 }
Roland Levillain44015862016-01-22 11:47:17 +00002449 }
jessicahandojo05765752016-09-09 19:01:32 -07002450 if (!maybe_compressed_char_at) {
Artem Serov914d7a82017-02-07 14:33:49 +00002451 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2452 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
jessicahandojo05765752016-09-09 19:01:32 -07002453 codegen_->Load(type, OutputCPURegister(instruction), source);
2454 codegen_->MaybeRecordImplicitNullCheck(instruction);
2455 }
Roland Levillain44015862016-01-22 11:47:17 +00002456
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002457 if (type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002458 static_assert(
2459 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2460 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2461 Location obj_loc = locations->InAt(0);
2462 if (index.IsConstant()) {
2463 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2464 } else {
2465 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2466 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002467 }
Roland Levillain4d027112015-07-01 15:41:14 +01002468 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002469}
2470
Alexandre Rames5319def2014-10-23 10:03:10 +01002471void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002472 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002473 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002474 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002475}
2476
2477void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002478 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002479 vixl::aarch64::Register out = OutputRegister(instruction);
Artem Serov914d7a82017-02-07 14:33:49 +00002480 {
2481 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2482 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2483 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
2484 codegen_->MaybeRecordImplicitNullCheck(instruction);
2485 }
jessicahandojo05765752016-09-09 19:01:32 -07002486 // Mask out compression flag from String's array length.
2487 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002488 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002489 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002490}
2491
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002492void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002493 DataType::Type value_type = instruction->GetComponentType();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002494
2495 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002496 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002497 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002498 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002499 LocationSummary::kCallOnSlowPath :
2500 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002501 locations->SetInAt(0, Location::RequiresRegister());
2502 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002503 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2504 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002505 } else if (DataType::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002506 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002507 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002508 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002509 }
2510}
2511
2512void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002513 DataType::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002514 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002515 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002516 bool needs_write_barrier =
2517 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002518
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002519 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002520 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002521 CPURegister source = value;
2522 Location index = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002523 size_t offset = mirror::Array::DataOffset(DataType::Size(value_type)).Uint32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002524 MemOperand destination = HeapOperand(array);
2525 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002526
2527 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002528 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002529 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002530 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002531 destination = HeapOperand(array, offset);
2532 } else {
2533 UseScratchRegisterScope temps(masm);
2534 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002535 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002536 // We do not need to compute the intermediate address from the array: the
2537 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002538 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002539 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002540 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002541 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2542 }
2543 temp = array;
2544 } else {
2545 __ Add(temp, array, offset);
2546 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002547 destination = HeapOperand(temp,
2548 XRegisterFrom(index),
2549 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002550 DataType::SizeShift(value_type));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002551 }
Artem Serov914d7a82017-02-07 14:33:49 +00002552 {
2553 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2554 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2555 codegen_->Store(value_type, value, destination);
2556 codegen_->MaybeRecordImplicitNullCheck(instruction);
2557 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002558 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002559 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002560 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002561 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002562 {
2563 // We use a block to end the scratch scope before the write barrier, thus
2564 // freeing the temporary registers so they can be used in `MarkGCCard`.
2565 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002566 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002567 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002568 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002569 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002570 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002571 destination = HeapOperand(temp,
2572 XRegisterFrom(index),
2573 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002574 DataType::SizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002575 }
2576
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002577 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2578 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2579 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2580
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002581 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002582 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathARM64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002583 codegen_->AddSlowPath(slow_path);
2584 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002585 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002586 __ Cbnz(Register(value), &non_zero);
2587 if (!index.IsConstant()) {
2588 __ Add(temp, array, offset);
2589 }
Artem Serov914d7a82017-02-07 14:33:49 +00002590 {
2591 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools
2592 // emitted.
2593 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2594 __ Str(wzr, destination);
2595 codegen_->MaybeRecordImplicitNullCheck(instruction);
2596 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002597 __ B(&done);
2598 __ Bind(&non_zero);
2599 }
2600
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002601 // Note that when Baker read barriers are enabled, the type
2602 // checks are performed without read barriers. This is fine,
2603 // even in the case where a class object is in the from-space
2604 // after the flip, as a comparison involving such a type would
2605 // not produce a false positive; it may of course produce a
2606 // false negative, in which case we would take the ArraySet
2607 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01002608
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002609 Register temp2 = temps.AcquireSameSizeAs(array);
2610 // /* HeapReference<Class> */ temp = array->klass_
Artem Serov914d7a82017-02-07 14:33:49 +00002611 {
2612 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2613 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2614 __ Ldr(temp, HeapOperand(array, class_offset));
2615 codegen_->MaybeRecordImplicitNullCheck(instruction);
2616 }
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002617 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01002618
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002619 // /* HeapReference<Class> */ temp = temp->component_type_
2620 __ Ldr(temp, HeapOperand(temp, component_offset));
2621 // /* HeapReference<Class> */ temp2 = value->klass_
2622 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2623 // If heap poisoning is enabled, no need to unpoison `temp`
2624 // nor `temp2`, as we are comparing two poisoned references.
2625 __ Cmp(temp, temp2);
2626 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01002627
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002628 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2629 vixl::aarch64::Label do_put;
2630 __ B(eq, &do_put);
2631 // If heap poisoning is enabled, the `temp` reference has
2632 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002633 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2634
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002635 // /* HeapReference<Class> */ temp = temp->super_class_
2636 __ Ldr(temp, HeapOperand(temp, super_offset));
2637 // If heap poisoning is enabled, no need to unpoison
2638 // `temp`, as we are comparing against null below.
2639 __ Cbnz(temp, slow_path->GetEntryLabel());
2640 __ Bind(&do_put);
2641 } else {
2642 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002643 }
2644 }
2645
2646 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002647 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002648 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002649 __ Mov(temp2, value.W());
2650 GetAssembler()->PoisonHeapReference(temp2);
2651 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002652 }
2653
2654 if (!index.IsConstant()) {
2655 __ Add(temp, array, offset);
Vladimir Markod1ef8732017-04-18 13:55:13 +01002656 } else {
2657 // We no longer need the `temp` here so release it as the store below may
2658 // need a scratch register (if the constant index makes the offset too large)
2659 // and the poisoned `source` could be using the other scratch register.
2660 temps.Release(temp);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002661 }
Artem Serov914d7a82017-02-07 14:33:49 +00002662 {
2663 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2664 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2665 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002666
Artem Serov914d7a82017-02-07 14:33:49 +00002667 if (!may_need_runtime_call_for_type_check) {
2668 codegen_->MaybeRecordImplicitNullCheck(instruction);
2669 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002670 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002671 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002672
2673 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2674
2675 if (done.IsLinked()) {
2676 __ Bind(&done);
2677 }
2678
2679 if (slow_path != nullptr) {
2680 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002681 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002682 }
2683}
2684
Alexandre Rames67555f72014-11-18 10:55:16 +00002685void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002686 RegisterSet caller_saves = RegisterSet::Empty();
2687 InvokeRuntimeCallingConvention calling_convention;
2688 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
2689 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
2690 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00002691 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002692 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002693}
2694
2695void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002696 BoundsCheckSlowPathARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002697 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002698 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00002699 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2700 __ B(slow_path->GetEntryLabel(), hs);
2701}
2702
Alexandre Rames67555f72014-11-18 10:55:16 +00002703void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2704 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002705 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexandre Rames67555f72014-11-18 10:55:16 +00002706 locations->SetInAt(0, Location::RequiresRegister());
2707 if (check->HasUses()) {
2708 locations->SetOut(Location::SameAsFirstInput());
2709 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01002710 // Rely on the type initialization to save everything we need.
2711 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Alexandre Rames67555f72014-11-18 10:55:16 +00002712}
2713
2714void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2715 // We assume the class is not null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01002716 SlowPathCodeARM64* slow_path =
2717 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(check->GetLoadClass(), check);
Alexandre Rames67555f72014-11-18 10:55:16 +00002718 codegen_->AddSlowPath(slow_path);
2719 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2720}
2721
Roland Levillain1a653882016-03-18 18:05:57 +00002722static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2723 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2724 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2725}
2726
2727void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2728 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2729 Location rhs_loc = instruction->GetLocations()->InAt(1);
2730 if (rhs_loc.IsConstant()) {
2731 // 0.0 is the only immediate that can be encoded directly in
2732 // an FCMP instruction.
2733 //
2734 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2735 // specify that in a floating-point comparison, positive zero
2736 // and negative zero are considered equal, so we can use the
2737 // literal 0.0 for both cases here.
2738 //
2739 // Note however that some methods (Float.equal, Float.compare,
2740 // Float.compareTo, Double.equal, Double.compare,
2741 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2742 // StrictMath.min) consider 0.0 to be (strictly) greater than
2743 // -0.0. So if we ever translate calls to these methods into a
2744 // HCompare instruction, we must handle the -0.0 case with
2745 // care here.
2746 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2747 __ Fcmp(lhs_reg, 0.0);
2748 } else {
2749 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2750 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002751}
2752
Serban Constantinescu02164b32014-11-13 14:05:07 +00002753void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002754 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002755 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002756 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002757 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002758 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002759 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002760 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002761 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002762 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002763 case DataType::Type::kInt32:
2764 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002765 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002766 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002767 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2768 break;
2769 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002770 case DataType::Type::kFloat32:
2771 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002772 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002773 locations->SetInAt(1,
2774 IsFloatingPointZeroConstant(compare->InputAt(1))
2775 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2776 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002777 locations->SetOut(Location::RequiresRegister());
2778 break;
2779 }
2780 default:
2781 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2782 }
2783}
2784
2785void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002786 DataType::Type in_type = compare->InputAt(0)->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002787
2788 // 0 if: left == right
2789 // 1 if: left > right
2790 // -1 if: left < right
2791 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002792 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002793 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002794 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002795 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002796 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002797 case DataType::Type::kInt32:
2798 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002799 Register result = OutputRegister(compare);
2800 Register left = InputRegisterAt(compare, 0);
2801 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002802 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002803 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2804 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002805 break;
2806 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002807 case DataType::Type::kFloat32:
2808 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002809 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002810 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002811 __ Cset(result, ne);
2812 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002813 break;
2814 }
2815 default:
2816 LOG(FATAL) << "Unimplemented compare type " << in_type;
2817 }
2818}
2819
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002820void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002821 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002822
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002823 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain7f63c522015-07-13 15:54:55 +00002824 locations->SetInAt(0, Location::RequiresFpuRegister());
2825 locations->SetInAt(1,
2826 IsFloatingPointZeroConstant(instruction->InputAt(1))
2827 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2828 : Location::RequiresFpuRegister());
2829 } else {
2830 // Integer cases.
2831 locations->SetInAt(0, Location::RequiresRegister());
2832 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2833 }
2834
David Brazdilb3e773e2016-01-26 11:28:37 +00002835 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002836 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002837 }
2838}
2839
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002840void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002841 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002842 return;
2843 }
2844
2845 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002846 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002847 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002848
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002849 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002850 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002851 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002852 } else {
2853 // Integer cases.
2854 Register lhs = InputRegisterAt(instruction, 0);
2855 Operand rhs = InputOperandAt(instruction, 1);
2856 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002857 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002858 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002859}
2860
2861#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2862 M(Equal) \
2863 M(NotEqual) \
2864 M(LessThan) \
2865 M(LessThanOrEqual) \
2866 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002867 M(GreaterThanOrEqual) \
2868 M(Below) \
2869 M(BelowOrEqual) \
2870 M(Above) \
2871 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002872#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002873void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2874void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002875FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002876#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002877#undef FOR_EACH_CONDITION_INSTRUCTION
2878
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002879void InstructionCodeGeneratorARM64::GenerateIntDivForPower2Denom(HDiv* instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002880 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002881 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002882 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
2883
2884 Register out = OutputRegister(instruction);
2885 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01002886
2887 if (abs_imm == 2) {
2888 int bits = DataType::Size(instruction->GetResultType()) * kBitsPerByte;
2889 __ Add(out, dividend, Operand(dividend, LSR, bits - 1));
2890 } else {
2891 UseScratchRegisterScope temps(GetVIXLAssembler());
2892 Register temp = temps.AcquireSameSizeAs(out);
2893 __ Add(temp, dividend, abs_imm - 1);
2894 __ Cmp(dividend, 0);
2895 __ Csel(out, temp, dividend, lt);
2896 }
2897
Zheng Xuc6667102015-05-15 16:08:45 +08002898 int ctz_imm = CTZ(abs_imm);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002899 if (imm > 0) {
2900 __ Asr(out, out, ctz_imm);
Zheng Xuc6667102015-05-15 16:08:45 +08002901 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002902 __ Neg(out, Operand(out, ASR, ctz_imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002903 }
2904}
2905
2906void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2907 DCHECK(instruction->IsDiv() || instruction->IsRem());
2908
2909 LocationSummary* locations = instruction->GetLocations();
2910 Location second = locations->InAt(1);
2911 DCHECK(second.IsConstant());
2912
2913 Register out = OutputRegister(instruction);
2914 Register dividend = InputRegisterAt(instruction, 0);
2915 int64_t imm = Int64FromConstant(second.GetConstant());
2916
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002917 DataType::Type type = instruction->GetResultType();
2918 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Zheng Xuc6667102015-05-15 16:08:45 +08002919
2920 int64_t magic;
2921 int shift;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002922 CalculateMagicAndShiftForDivRem(
2923 imm, type == DataType::Type::kInt64 /* is_long */, &magic, &shift);
Zheng Xuc6667102015-05-15 16:08:45 +08002924
2925 UseScratchRegisterScope temps(GetVIXLAssembler());
2926 Register temp = temps.AcquireSameSizeAs(out);
2927
2928 // temp = get_high(dividend * magic)
2929 __ Mov(temp, magic);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002930 if (type == DataType::Type::kInt64) {
Zheng Xuc6667102015-05-15 16:08:45 +08002931 __ Smulh(temp, dividend, temp);
2932 } else {
2933 __ Smull(temp.X(), dividend, temp);
2934 __ Lsr(temp.X(), temp.X(), 32);
2935 }
2936
2937 if (imm > 0 && magic < 0) {
2938 __ Add(temp, temp, dividend);
2939 } else if (imm < 0 && magic > 0) {
2940 __ Sub(temp, temp, dividend);
2941 }
2942
2943 if (shift != 0) {
2944 __ Asr(temp, temp, shift);
2945 }
2946
2947 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002948 __ Sub(out, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08002949 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002950 __ Sub(temp, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08002951 // TODO: Strength reduction for msub.
2952 Register temp_imm = temps.AcquireSameSizeAs(out);
2953 __ Mov(temp_imm, imm);
2954 __ Msub(out, temp, temp_imm, dividend);
2955 }
2956}
2957
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002958void InstructionCodeGeneratorARM64::GenerateIntDivForConstDenom(HDiv *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002959 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Zheng Xuc6667102015-05-15 16:08:45 +08002960
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002961 if (imm == 0) {
2962 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2963 return;
2964 }
Zheng Xuc6667102015-05-15 16:08:45 +08002965
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002966 if (IsPowerOfTwo(AbsOrMin(imm))) {
2967 GenerateIntDivForPower2Denom(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08002968 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002969 // Cases imm == -1 or imm == 1 are handled by InstructionSimplifier.
2970 DCHECK(imm < -2 || imm > 2) << imm;
2971 GenerateDivRemWithAnyConstant(instruction);
2972 }
2973}
2974
2975void InstructionCodeGeneratorARM64::GenerateIntDiv(HDiv *instruction) {
2976 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
2977 << instruction->GetResultType();
2978
2979 if (instruction->GetLocations()->InAt(1).IsConstant()) {
2980 GenerateIntDivForConstDenom(instruction);
2981 } else {
2982 Register out = OutputRegister(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08002983 Register dividend = InputRegisterAt(instruction, 0);
2984 Register divisor = InputRegisterAt(instruction, 1);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01002985 __ Sdiv(out, dividend, divisor);
Zheng Xuc6667102015-05-15 16:08:45 +08002986 }
2987}
2988
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002989void LocationsBuilderARM64::VisitDiv(HDiv* div) {
2990 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002991 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002992 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002993 case DataType::Type::kInt32:
2994 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002995 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08002996 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002997 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2998 break;
2999
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003000 case DataType::Type::kFloat32:
3001 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003002 locations->SetInAt(0, Location::RequiresFpuRegister());
3003 locations->SetInAt(1, Location::RequiresFpuRegister());
3004 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3005 break;
3006
3007 default:
3008 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3009 }
3010}
3011
3012void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003013 DataType::Type type = div->GetResultType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003014 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003015 case DataType::Type::kInt32:
3016 case DataType::Type::kInt64:
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003017 GenerateIntDiv(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003018 break;
3019
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003020 case DataType::Type::kFloat32:
3021 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003022 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3023 break;
3024
3025 default:
3026 LOG(FATAL) << "Unexpected div type " << type;
3027 }
3028}
3029
Alexandre Rames67555f72014-11-18 10:55:16 +00003030void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003031 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003032 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003033}
3034
3035void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3036 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003037 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003038 codegen_->AddSlowPath(slow_path);
3039 Location value = instruction->GetLocations()->InAt(0);
3040
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003041 DataType::Type type = instruction->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +00003042
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003043 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003044 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00003045 return;
3046 }
3047
Alexandre Rames67555f72014-11-18 10:55:16 +00003048 if (value.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003049 int64_t divisor = Int64FromLocation(value);
Alexandre Rames67555f72014-11-18 10:55:16 +00003050 if (divisor == 0) {
3051 __ B(slow_path->GetEntryLabel());
3052 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003053 // A division by a non-null constant is valid. We don't need to perform
3054 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003055 }
3056 } else {
3057 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3058 }
3059}
3060
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003061void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3062 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003063 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003064 locations->SetOut(Location::ConstantLocation(constant));
3065}
3066
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003067void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3068 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003069 // Will be generated at use site.
3070}
3071
Alexandre Rames5319def2014-10-23 10:03:10 +01003072void LocationsBuilderARM64::VisitExit(HExit* exit) {
3073 exit->SetLocations(nullptr);
3074}
3075
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003076void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003077}
3078
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003079void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3080 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003081 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003082 locations->SetOut(Location::ConstantLocation(constant));
3083}
3084
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003085void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003086 // Will be generated at use site.
3087}
3088
David Brazdilfc6a86a2015-06-26 10:33:45 +00003089void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003090 if (successor->IsExitBlock()) {
3091 DCHECK(got->GetPrevious()->AlwaysThrows());
3092 return; // no code needed
3093 }
3094
Serban Constantinescu02164b32014-11-13 14:05:07 +00003095 HBasicBlock* block = got->GetBlock();
3096 HInstruction* previous = got->GetPrevious();
3097 HLoopInformation* info = block->GetLoopInformation();
3098
David Brazdil46e2a392015-03-16 17:31:52 +00003099 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00003100 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
3101 UseScratchRegisterScope temps(GetVIXLAssembler());
3102 Register temp1 = temps.AcquireX();
3103 Register temp2 = temps.AcquireX();
3104 __ Ldr(temp1, MemOperand(sp, 0));
3105 __ Ldrh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3106 __ Add(temp2, temp2, 1);
3107 __ Strh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3108 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003109 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3110 return;
3111 }
3112 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3113 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01003114 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003115 }
3116 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003117 __ B(codegen_->GetLabelOf(successor));
3118 }
3119}
3120
David Brazdilfc6a86a2015-06-26 10:33:45 +00003121void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3122 got->SetLocations(nullptr);
3123}
3124
3125void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3126 HandleGoto(got, got->GetSuccessor());
3127}
3128
3129void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3130 try_boundary->SetLocations(nullptr);
3131}
3132
3133void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3134 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3135 if (!successor->IsExitBlock()) {
3136 HandleGoto(try_boundary, successor);
3137 }
3138}
3139
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003140void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003141 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003142 vixl::aarch64::Label* true_target,
3143 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003144 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003145
David Brazdil0debae72015-11-12 18:37:00 +00003146 if (true_target == nullptr && false_target == nullptr) {
3147 // Nothing to do. The code always falls through.
3148 return;
3149 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003150 // Constant condition, statically compared against "true" (integer value 1).
3151 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003152 if (true_target != nullptr) {
3153 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003154 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003155 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003156 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003157 if (false_target != nullptr) {
3158 __ B(false_target);
3159 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003160 }
David Brazdil0debae72015-11-12 18:37:00 +00003161 return;
3162 }
3163
3164 // The following code generates these patterns:
3165 // (1) true_target == nullptr && false_target != nullptr
3166 // - opposite condition true => branch to false_target
3167 // (2) true_target != nullptr && false_target == nullptr
3168 // - condition true => branch to true_target
3169 // (3) true_target != nullptr && false_target != nullptr
3170 // - condition true => branch to true_target
3171 // - branch to false_target
3172 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003173 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003174 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003175 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003176 if (true_target == nullptr) {
3177 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3178 } else {
3179 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3180 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003181 } else {
3182 // The condition instruction has not been materialized, use its inputs as
3183 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003184 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003185
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003186 DataType::Type type = condition->InputAt(0)->GetType();
3187 if (DataType::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003188 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003189 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003190 IfCondition opposite_condition = condition->GetOppositeCondition();
3191 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003192 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003193 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003194 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003195 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003196 // Integer cases.
3197 Register lhs = InputRegisterAt(condition, 0);
3198 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003199
3200 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003201 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003202 if (true_target == nullptr) {
3203 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3204 non_fallthrough_target = false_target;
3205 } else {
3206 arm64_cond = ARM64Condition(condition->GetCondition());
3207 non_fallthrough_target = true_target;
3208 }
3209
Aart Bik086d27e2016-01-20 17:02:00 -08003210 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003211 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003212 switch (arm64_cond) {
3213 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003214 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003215 break;
3216 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003217 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003218 break;
3219 case lt:
3220 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003221 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003222 break;
3223 case ge:
3224 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003225 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003226 break;
3227 default:
3228 // Without the `static_cast` the compiler throws an error for
3229 // `-Werror=sign-promo`.
3230 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3231 }
3232 } else {
3233 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003234 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003235 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003236 }
3237 }
David Brazdil0debae72015-11-12 18:37:00 +00003238
3239 // If neither branch falls through (case 3), the conditional branch to `true_target`
3240 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3241 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003242 __ B(false_target);
3243 }
3244}
3245
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003246void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003247 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003248 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003249 locations->SetInAt(0, Location::RequiresRegister());
3250 }
3251}
3252
3253void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003254 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3255 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003256 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3257 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3258 true_target = nullptr;
3259 }
3260 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3261 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3262 false_target = nullptr;
3263 }
David Brazdil0debae72015-11-12 18:37:00 +00003264 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003265}
3266
3267void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003268 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003269 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01003270 InvokeRuntimeCallingConvention calling_convention;
3271 RegisterSet caller_saves = RegisterSet::Empty();
3272 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
3273 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00003274 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003275 locations->SetInAt(0, Location::RequiresRegister());
3276 }
3277}
3278
3279void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003280 SlowPathCodeARM64* slow_path =
3281 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003282 GenerateTestAndBranch(deoptimize,
3283 /* condition_input_index */ 0,
3284 slow_path->GetEntryLabel(),
3285 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003286}
3287
Mingyao Yang063fc772016-08-02 11:02:54 -07003288void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003289 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07003290 LocationSummary(flag, LocationSummary::kNoCall);
3291 locations->SetOut(Location::RequiresRegister());
3292}
3293
3294void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3295 __ Ldr(OutputRegister(flag),
3296 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3297}
3298
David Brazdilc0b601b2016-02-08 14:20:45 +00003299static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3300 return condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003301 DataType::IsFloatingPointType(condition->InputAt(0)->GetType());
David Brazdilc0b601b2016-02-08 14:20:45 +00003302}
3303
Alexandre Rames880f1192016-06-13 16:04:50 +01003304static inline Condition GetConditionForSelect(HCondition* condition) {
3305 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003306 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3307 : ARM64Condition(cond);
3308}
3309
David Brazdil74eb1b22015-12-14 11:44:01 +00003310void LocationsBuilderARM64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003311 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003312 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003313 locations->SetInAt(0, Location::RequiresFpuRegister());
3314 locations->SetInAt(1, Location::RequiresFpuRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08003315 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames880f1192016-06-13 16:04:50 +01003316 } else {
3317 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3318 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3319 bool is_true_value_constant = cst_true_value != nullptr;
3320 bool is_false_value_constant = cst_false_value != nullptr;
3321 // Ask VIXL whether we should synthesize constants in registers.
3322 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3323 Operand true_op = is_true_value_constant ?
3324 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3325 Operand false_op = is_false_value_constant ?
3326 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3327 bool true_value_in_register = false;
3328 bool false_value_in_register = false;
3329 MacroAssembler::GetCselSynthesisInformation(
3330 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3331 true_value_in_register |= !is_true_value_constant;
3332 false_value_in_register |= !is_false_value_constant;
3333
3334 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3335 : Location::ConstantLocation(cst_true_value));
3336 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3337 : Location::ConstantLocation(cst_false_value));
Donghui Bai426b49c2016-11-08 14:55:38 +08003338 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
David Brazdil74eb1b22015-12-14 11:44:01 +00003339 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003340
David Brazdil74eb1b22015-12-14 11:44:01 +00003341 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3342 locations->SetInAt(2, Location::RequiresRegister());
3343 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003344}
3345
3346void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003347 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003348 Condition csel_cond;
3349
3350 if (IsBooleanValueOrMaterializedCondition(cond)) {
3351 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003352 // Use the condition flags set by the previous instruction.
3353 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003354 } else {
3355 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003356 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003357 }
3358 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003359 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003360 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003361 } else {
3362 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003363 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003364 }
3365
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003366 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003367 __ Fcsel(OutputFPRegister(select),
3368 InputFPRegisterAt(select, 1),
3369 InputFPRegisterAt(select, 0),
3370 csel_cond);
3371 } else {
3372 __ Csel(OutputRegister(select),
3373 InputOperandAt(select, 1),
3374 InputOperandAt(select, 0),
3375 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003376 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003377}
3378
David Srbecky0cf44932015-12-09 14:09:59 +00003379void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003380 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00003381}
3382
David Srbeckyd28f4a02016-03-14 17:14:24 +00003383void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3384 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003385}
3386
3387void CodeGeneratorARM64::GenerateNop() {
3388 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003389}
3390
Alexandre Rames5319def2014-10-23 10:03:10 +01003391void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00003392 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003393}
3394
3395void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003396 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003397}
3398
3399void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003400 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003401}
3402
3403void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003404 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003405}
3406
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003407// Temp is used for read barrier.
3408static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3409 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003410 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003411 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3412 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3413 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3414 return 1;
3415 }
3416 return 0;
3417}
3418
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003419// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003420// interface pointer, one for loading the current interface.
3421// The other checks have one temp for loading the object's class.
3422static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3423 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3424 return 3;
3425 }
3426 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003427}
3428
Alexandre Rames67555f72014-11-18 10:55:16 +00003429void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003430 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003431 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003432 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003433 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003434 case TypeCheckKind::kExactCheck:
3435 case TypeCheckKind::kAbstractClassCheck:
3436 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00003437 case TypeCheckKind::kArrayObjectCheck: {
3438 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
3439 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
3440 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003441 break;
Vladimir Marko87584542017-12-12 17:47:52 +00003442 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003443 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003444 case TypeCheckKind::kUnresolvedCheck:
3445 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003446 call_kind = LocationSummary::kCallOnSlowPath;
3447 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00003448 case TypeCheckKind::kBitstringCheck:
3449 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003450 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003451
Vladimir Markoca6fff82017-10-03 14:49:14 +01003452 LocationSummary* locations =
3453 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003454 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003455 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003456 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003457 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00003458 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
3459 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
3460 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
3461 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
3462 } else {
3463 locations->SetInAt(1, Location::RequiresRegister());
3464 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003465 // The "out" register is used as a temporary, so it overlaps with the inputs.
3466 // Note that TypeCheckSlowPathARM64 uses this register too.
3467 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003468 // Add temps if necessary for read barriers.
3469 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003470}
3471
3472void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003473 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003474 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003475 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003476 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00003477 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
3478 ? Register()
3479 : InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003480 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003481 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003482 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3483 DCHECK_LE(num_temps, 1u);
3484 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003485 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3486 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3487 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3488 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003489
Scott Wakeling97c72b72016-06-24 16:19:36 +01003490 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003491 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003492
3493 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003494 // Avoid null check if we know `obj` is not null.
3495 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003496 __ Cbz(obj, &zero);
3497 }
3498
Roland Levillain44015862016-01-22 11:47:17 +00003499 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003500 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003501 ReadBarrierOption read_barrier_option =
3502 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003503 // /* HeapReference<Class> */ out = obj->klass_
3504 GenerateReferenceLoadTwoRegisters(instruction,
3505 out_loc,
3506 obj_loc,
3507 class_offset,
3508 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003509 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003510 __ Cmp(out, cls);
3511 __ Cset(out, eq);
3512 if (zero.IsLinked()) {
3513 __ B(&done);
3514 }
3515 break;
3516 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003517
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003518 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003519 ReadBarrierOption read_barrier_option =
3520 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003521 // /* HeapReference<Class> */ out = obj->klass_
3522 GenerateReferenceLoadTwoRegisters(instruction,
3523 out_loc,
3524 obj_loc,
3525 class_offset,
3526 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003527 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003528 // If the class is abstract, we eagerly fetch the super class of the
3529 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003530 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003531 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003532 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003533 GenerateReferenceLoadOneRegister(instruction,
3534 out_loc,
3535 super_offset,
3536 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003537 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003538 // If `out` is null, we use it for the result, and jump to `done`.
3539 __ Cbz(out, &done);
3540 __ Cmp(out, cls);
3541 __ B(ne, &loop);
3542 __ Mov(out, 1);
3543 if (zero.IsLinked()) {
3544 __ B(&done);
3545 }
3546 break;
3547 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003548
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003549 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003550 ReadBarrierOption read_barrier_option =
3551 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003552 // /* HeapReference<Class> */ out = obj->klass_
3553 GenerateReferenceLoadTwoRegisters(instruction,
3554 out_loc,
3555 obj_loc,
3556 class_offset,
3557 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003558 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003559 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003560 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003561 __ Bind(&loop);
3562 __ Cmp(out, cls);
3563 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003564 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003565 GenerateReferenceLoadOneRegister(instruction,
3566 out_loc,
3567 super_offset,
3568 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003569 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003570 __ Cbnz(out, &loop);
3571 // If `out` is null, we use it for the result, and jump to `done`.
3572 __ B(&done);
3573 __ Bind(&success);
3574 __ Mov(out, 1);
3575 if (zero.IsLinked()) {
3576 __ B(&done);
3577 }
3578 break;
3579 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003580
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003581 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003582 ReadBarrierOption read_barrier_option =
3583 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003584 // /* HeapReference<Class> */ out = obj->klass_
3585 GenerateReferenceLoadTwoRegisters(instruction,
3586 out_loc,
3587 obj_loc,
3588 class_offset,
3589 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003590 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003591 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003592 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003593 __ Cmp(out, cls);
3594 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003595 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003596 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003597 GenerateReferenceLoadOneRegister(instruction,
3598 out_loc,
3599 component_offset,
3600 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003601 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003602 // If `out` is null, we use it for the result, and jump to `done`.
3603 __ Cbz(out, &done);
3604 __ Ldrh(out, HeapOperand(out, primitive_offset));
3605 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3606 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003607 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003608 __ Mov(out, 1);
3609 __ B(&done);
3610 break;
3611 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003612
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003613 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003614 // No read barrier since the slow path will retry upon failure.
3615 // /* HeapReference<Class> */ out = obj->klass_
3616 GenerateReferenceLoadTwoRegisters(instruction,
3617 out_loc,
3618 obj_loc,
3619 class_offset,
3620 maybe_temp_loc,
3621 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003622 __ Cmp(out, cls);
3623 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01003624 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
3625 instruction, /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003626 codegen_->AddSlowPath(slow_path);
3627 __ B(ne, slow_path->GetEntryLabel());
3628 __ Mov(out, 1);
3629 if (zero.IsLinked()) {
3630 __ B(&done);
3631 }
3632 break;
3633 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003634
Calin Juravle98893e12015-10-02 21:05:03 +01003635 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003636 case TypeCheckKind::kInterfaceCheck: {
3637 // Note that we indeed only call on slow path, but we always go
3638 // into the slow path for the unresolved and interface check
3639 // cases.
3640 //
3641 // We cannot directly call the InstanceofNonTrivial runtime
3642 // entry point without resorting to a type checking slow path
3643 // here (i.e. by calling InvokeRuntime directly), as it would
3644 // require to assign fixed registers for the inputs of this
3645 // HInstanceOf instruction (following the runtime calling
3646 // convention), which might be cluttered by the potential first
3647 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003648 //
3649 // TODO: Introduce a new runtime entry point taking the object
3650 // to test (instead of its class) as argument, and let it deal
3651 // with the read barrier issues. This will let us refactor this
3652 // case of the `switch` code as it was previously (with a direct
3653 // call to the runtime not using a type checking slow path).
3654 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003655 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01003656 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
3657 instruction, /* is_fatal */ false);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003658 codegen_->AddSlowPath(slow_path);
3659 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003660 if (zero.IsLinked()) {
3661 __ B(&done);
3662 }
3663 break;
3664 }
Vladimir Marko175e7862018-03-27 09:03:13 +00003665
3666 case TypeCheckKind::kBitstringCheck: {
3667 // /* HeapReference<Class> */ temp = obj->klass_
3668 GenerateReferenceLoadTwoRegisters(instruction,
3669 out_loc,
3670 obj_loc,
3671 class_offset,
3672 maybe_temp_loc,
3673 kWithoutReadBarrier);
3674
3675 GenerateBitstringTypeCheckCompare(instruction, out);
3676 __ Cset(out, eq);
3677 if (zero.IsLinked()) {
3678 __ B(&done);
3679 }
3680 break;
3681 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003682 }
3683
3684 if (zero.IsLinked()) {
3685 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003686 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003687 }
3688
3689 if (done.IsLinked()) {
3690 __ Bind(&done);
3691 }
3692
3693 if (slow_path != nullptr) {
3694 __ Bind(slow_path->GetExitLabel());
3695 }
3696}
3697
3698void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003699 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00003700 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01003701 LocationSummary* locations =
3702 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003703 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00003704 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
3705 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
3706 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
3707 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
3708 } else {
3709 locations->SetInAt(1, Location::RequiresRegister());
3710 }
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003711 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
3712 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003713}
3714
3715void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003716 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003717 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003718 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003719 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00003720 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
3721 ? Register()
3722 : InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003723 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3724 DCHECK_GE(num_temps, 1u);
3725 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003726 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003727 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
3728 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003729 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003730 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3731 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3732 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3733 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3734 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3735 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3736 const uint32_t object_array_data_offset =
3737 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003738
Vladimir Marko87584542017-12-12 17:47:52 +00003739 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003740 SlowPathCodeARM64* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003741 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
3742 instruction, is_type_check_slow_path_fatal);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003743 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003744
Scott Wakeling97c72b72016-06-24 16:19:36 +01003745 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003746 // Avoid null check if we know obj is not null.
3747 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003748 __ Cbz(obj, &done);
3749 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003750
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003751 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003752 case TypeCheckKind::kExactCheck:
3753 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003754 // /* HeapReference<Class> */ temp = obj->klass_
3755 GenerateReferenceLoadTwoRegisters(instruction,
3756 temp_loc,
3757 obj_loc,
3758 class_offset,
3759 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003760 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003761
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003762 __ Cmp(temp, cls);
3763 // Jump to slow path for throwing the exception or doing a
3764 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003765 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003766 break;
3767 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003768
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003769 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003770 // /* HeapReference<Class> */ temp = obj->klass_
3771 GenerateReferenceLoadTwoRegisters(instruction,
3772 temp_loc,
3773 obj_loc,
3774 class_offset,
3775 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003776 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003777
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003778 // If the class is abstract, we eagerly fetch the super class of the
3779 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003780 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003781 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003782 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003783 GenerateReferenceLoadOneRegister(instruction,
3784 temp_loc,
3785 super_offset,
3786 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003787 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003788
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003789 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3790 // exception.
3791 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3792 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003793 __ Cmp(temp, cls);
3794 __ B(ne, &loop);
3795 break;
3796 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003797
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003798 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003799 // /* HeapReference<Class> */ temp = obj->klass_
3800 GenerateReferenceLoadTwoRegisters(instruction,
3801 temp_loc,
3802 obj_loc,
3803 class_offset,
3804 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003805 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003806
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003807 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003808 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003809 __ Bind(&loop);
3810 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003811 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003812
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003813 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003814 GenerateReferenceLoadOneRegister(instruction,
3815 temp_loc,
3816 super_offset,
3817 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003818 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003819
3820 // If the class reference currently in `temp` is not null, jump
3821 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003822 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003823 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003824 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003825 break;
3826 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003827
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003828 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003829 // /* HeapReference<Class> */ temp = obj->klass_
3830 GenerateReferenceLoadTwoRegisters(instruction,
3831 temp_loc,
3832 obj_loc,
3833 class_offset,
3834 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003835 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003836
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003837 // Do an exact check.
3838 __ Cmp(temp, cls);
3839 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003840
3841 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003842 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003843 GenerateReferenceLoadOneRegister(instruction,
3844 temp_loc,
3845 component_offset,
3846 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003847 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003848
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003849 // If the component type is null, jump to the slow path to throw the exception.
3850 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3851 // Otherwise, the object is indeed an array. Further check that this component type is not a
3852 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003853 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3854 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003855 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003856 break;
3857 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003858
Calin Juravle98893e12015-10-02 21:05:03 +01003859 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003860 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003861 //
3862 // We cannot directly call the CheckCast runtime entry point
3863 // without resorting to a type checking slow path here (i.e. by
3864 // calling InvokeRuntime directly), as it would require to
3865 // assign fixed registers for the inputs of this HInstanceOf
3866 // instruction (following the runtime calling convention), which
3867 // might be cluttered by the potential first read barrier
3868 // emission at the beginning of this method.
3869 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003870 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003871 case TypeCheckKind::kInterfaceCheck: {
3872 // /* HeapReference<Class> */ temp = obj->klass_
3873 GenerateReferenceLoadTwoRegisters(instruction,
3874 temp_loc,
3875 obj_loc,
3876 class_offset,
3877 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003878 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003879
3880 // /* HeapReference<Class> */ temp = temp->iftable_
3881 GenerateReferenceLoadTwoRegisters(instruction,
3882 temp_loc,
3883 temp_loc,
3884 iftable_offset,
3885 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003886 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08003887 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003888 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08003889 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003890 vixl::aarch64::Label start_loop;
3891 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08003892 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003893 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
3894 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003895 // Go to next interface.
3896 __ Add(temp, temp, 2 * kHeapReferenceSize);
3897 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08003898 // Compare the classes and continue the loop if they do not match.
3899 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
3900 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003901 break;
3902 }
Vladimir Marko175e7862018-03-27 09:03:13 +00003903
3904 case TypeCheckKind::kBitstringCheck: {
3905 // /* HeapReference<Class> */ temp = obj->klass_
3906 GenerateReferenceLoadTwoRegisters(instruction,
3907 temp_loc,
3908 obj_loc,
3909 class_offset,
3910 maybe_temp2_loc,
3911 kWithoutReadBarrier);
3912
3913 GenerateBitstringTypeCheckCompare(instruction, temp);
3914 __ B(ne, type_check_slow_path->GetEntryLabel());
3915 break;
3916 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003917 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003918 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003919
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003920 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003921}
3922
Alexandre Rames5319def2014-10-23 10:03:10 +01003923void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003924 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01003925 locations->SetOut(Location::ConstantLocation(constant));
3926}
3927
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003928void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003929 // Will be generated at use site.
3930}
3931
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003932void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003933 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003934 locations->SetOut(Location::ConstantLocation(constant));
3935}
3936
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003937void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003938 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00003939}
3940
Calin Juravle175dc732015-08-25 15:42:32 +01003941void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3942 // The trampoline uses the same calling convention as dex calling conventions,
3943 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3944 // the method_idx.
3945 HandleInvoke(invoke);
3946}
3947
3948void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3949 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01003950 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Calin Juravle175dc732015-08-25 15:42:32 +01003951}
3952
Alexandre Rames5319def2014-10-23 10:03:10 +01003953void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01003954 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01003955 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01003956}
3957
Alexandre Rames67555f72014-11-18 10:55:16 +00003958void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3959 HandleInvoke(invoke);
3960}
3961
3962void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
3963 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003964 LocationSummary* locations = invoke->GetLocations();
3965 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003966 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00003967 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07003968 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00003969
3970 // The register ip1 is required to be used for the hidden argument in
3971 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01003972 MacroAssembler* masm = GetVIXLAssembler();
3973 UseScratchRegisterScope scratch_scope(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00003974 scratch_scope.Exclude(ip1);
3975 __ Mov(ip1, invoke->GetDexMethodIndex());
3976
Artem Serov914d7a82017-02-07 14:33:49 +00003977 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
Alexandre Rames67555f72014-11-18 10:55:16 +00003978 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07003979 __ Ldr(temp.W(), StackOperandFrom(receiver));
Artem Serov914d7a82017-02-07 14:33:49 +00003980 {
3981 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
3982 // /* HeapReference<Class> */ temp = temp->klass_
3983 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
3984 codegen_->MaybeRecordImplicitNullCheck(invoke);
3985 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003986 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00003987 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003988 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07003989 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Artem Serov914d7a82017-02-07 14:33:49 +00003990 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00003991 }
Artem Serov914d7a82017-02-07 14:33:49 +00003992
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003993 // Instead of simply (possibly) unpoisoning `temp` here, we should
3994 // emit a read barrier for the previous class reference load.
3995 // However this is not required in practice, as this is an
3996 // intermediate/temporary reference and because the current
3997 // concurrent copying collector keeps the from-space memory
3998 // intact/accessible until the end of the marking phase (the
3999 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01004000 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004001 __ Ldr(temp,
4002 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
4003 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004004 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00004005 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004006 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00004007 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07004008 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004009
4010 {
4011 // Ensure the pc position is recorded immediately after the `blr` instruction.
4012 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4013
4014 // lr();
4015 __ blr(lr);
4016 DCHECK(!codegen_->IsLeafMethod());
4017 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4018 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004019
4020 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004021}
4022
4023void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004024 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004025 if (intrinsic.TryDispatch(invoke)) {
4026 return;
4027 }
4028
Alexandre Rames67555f72014-11-18 10:55:16 +00004029 HandleInvoke(invoke);
4030}
4031
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00004032void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004033 // Explicit clinit checks triggered by static invokes must have been pruned by
4034 // art::PrepareForRegisterAllocation.
4035 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004036
Vladimir Markoca6fff82017-10-03 14:49:14 +01004037 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004038 if (intrinsic.TryDispatch(invoke)) {
4039 return;
4040 }
4041
Alexandre Rames67555f72014-11-18 10:55:16 +00004042 HandleInvoke(invoke);
4043}
4044
Andreas Gampe878d58c2015-01-15 23:24:00 -08004045static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
4046 if (invoke->GetLocations()->Intrinsified()) {
4047 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
4048 intrinsic.Dispatch(invoke);
4049 return true;
4050 }
4051 return false;
4052}
4053
Vladimir Markodc151b22015-10-15 18:02:30 +01004054HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
4055 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004056 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00004057 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01004058 return desired_dispatch_info;
4059}
4060
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004061void CodeGeneratorARM64::GenerateStaticOrDirectCall(
4062 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004063 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004064 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4065 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004066 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4067 uint32_t offset =
4068 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004069 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004070 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004071 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004072 }
Vladimir Marko58155012015-08-19 12:49:41 +00004073 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004074 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004075 break;
Vladimir Marko65979462017-05-19 17:25:12 +01004076 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
4077 DCHECK(GetCompilerOptions().IsBootImage());
4078 // Add ADRP with its PC-relative method patch.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004079 vixl::aarch64::Label* adrp_label = NewBootImageMethodPatch(invoke->GetTargetMethod());
Vladimir Marko65979462017-05-19 17:25:12 +01004080 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4081 // Add ADD with its PC-relative method patch.
4082 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004083 NewBootImageMethodPatch(invoke->GetTargetMethod(), adrp_label);
Vladimir Marko65979462017-05-19 17:25:12 +01004084 EmitAddPlaceholder(add_label, XRegisterFrom(temp), XRegisterFrom(temp));
4085 break;
4086 }
Vladimir Markob066d432018-01-03 13:14:37 +00004087 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
4088 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004089 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00004090 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_offset);
4091 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4092 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
4093 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_offset, adrp_label);
4094 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
4095 EmitLdrOffsetPlaceholder(ldr_label, WRegisterFrom(temp), XRegisterFrom(temp));
4096 break;
4097 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004098 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Markob066d432018-01-03 13:14:37 +00004099 // Add ADRP with its PC-relative .bss entry patch.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004100 MethodReference target_method(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
4101 vixl::aarch64::Label* adrp_label = NewMethodBssEntryPatch(target_method);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004102 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Markob066d432018-01-03 13:14:37 +00004103 // Add LDR with its PC-relative .bss entry patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004104 vixl::aarch64::Label* ldr_label =
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004105 NewMethodBssEntryPatch(target_method, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004106 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004107 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004108 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004109 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
4110 // Load method address from literal pool.
4111 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
4112 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004113 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
4114 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
4115 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko58155012015-08-19 12:49:41 +00004116 }
4117 }
4118
4119 switch (invoke->GetCodePtrLocation()) {
4120 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004121 {
4122 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
4123 ExactAssemblyScope eas(GetVIXLAssembler(),
4124 kInstructionSize,
4125 CodeBufferCheckScope::kExactSize);
4126 __ bl(&frame_entry_label_);
4127 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
4128 }
Vladimir Marko58155012015-08-19 12:49:41 +00004129 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004130 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4131 // LR = callee_method->entry_point_from_quick_compiled_code_;
4132 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004133 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004134 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004135 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004136 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004137 ExactAssemblyScope eas(GetVIXLAssembler(),
4138 kInstructionSize,
4139 CodeBufferCheckScope::kExactSize);
4140 // lr()
4141 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004142 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004143 }
Vladimir Marko58155012015-08-19 12:49:41 +00004144 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004145 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004146
Andreas Gampe878d58c2015-01-15 23:24:00 -08004147 DCHECK(!IsLeafMethod());
4148}
4149
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004150void CodeGeneratorARM64::GenerateVirtualCall(
4151 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004152 // Use the calling convention instead of the location of the receiver, as
4153 // intrinsics may have put the receiver in a different register. In the intrinsics
4154 // slow path, the arguments have been moved to the right place, so here we are
4155 // guaranteed that the receiver is the first register of the calling convention.
4156 InvokeDexCallingConvention calling_convention;
4157 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004158 Register temp = XRegisterFrom(temp_in);
4159 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4160 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4161 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004162 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004163
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004164 DCHECK(receiver.IsRegister());
Artem Serov914d7a82017-02-07 14:33:49 +00004165
4166 {
4167 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4168 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4169 // /* HeapReference<Class> */ temp = receiver->klass_
4170 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
4171 MaybeRecordImplicitNullCheck(invoke);
4172 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004173 // Instead of simply (possibly) unpoisoning `temp` here, we should
4174 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004175 // intermediate/temporary reference and because the current
4176 // concurrent copying collector keeps the from-space memory
4177 // intact/accessible until the end of the marking phase (the
4178 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004179 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
4180 // temp = temp->GetMethodAt(method_offset);
4181 __ Ldr(temp, MemOperand(temp, method_offset));
4182 // lr = temp->GetEntryPoint();
4183 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
Artem Serov914d7a82017-02-07 14:33:49 +00004184 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004185 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004186 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4187 // lr();
4188 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004189 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004190 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004191}
4192
Orion Hodsonac141392017-01-13 11:53:47 +00004193void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4194 HandleInvoke(invoke);
4195}
4196
4197void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4198 codegen_->GenerateInvokePolymorphicCall(invoke);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004199 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Orion Hodsonac141392017-01-13 11:53:47 +00004200}
4201
Orion Hodson4c8e12e2018-05-18 08:33:20 +01004202void LocationsBuilderARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4203 HandleInvoke(invoke);
4204}
4205
4206void InstructionCodeGeneratorARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4207 codegen_->GenerateInvokeCustomCall(invoke);
4208 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
4209}
4210
Vladimir Marko6fd16062018-06-26 11:02:04 +01004211vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageIntrinsicPatch(
4212 uint32_t intrinsic_data,
4213 vixl::aarch64::Label* adrp_label) {
4214 return NewPcRelativePatch(
4215 /* dex_file */ nullptr, intrinsic_data, adrp_label, &boot_image_intrinsic_patches_);
4216}
4217
Vladimir Markob066d432018-01-03 13:14:37 +00004218vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageRelRoPatch(
4219 uint32_t boot_image_offset,
4220 vixl::aarch64::Label* adrp_label) {
4221 return NewPcRelativePatch(
4222 /* dex_file */ nullptr, boot_image_offset, adrp_label, &boot_image_method_patches_);
4223}
4224
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004225vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageMethodPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004226 MethodReference target_method,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004227 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004228 return NewPcRelativePatch(
4229 target_method.dex_file, target_method.index, adrp_label, &boot_image_method_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004230}
4231
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004232vixl::aarch64::Label* CodeGeneratorARM64::NewMethodBssEntryPatch(
4233 MethodReference target_method,
4234 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004235 return NewPcRelativePatch(
4236 target_method.dex_file, target_method.index, adrp_label, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004237}
4238
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004239vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageTypePatch(
Scott Wakeling97c72b72016-06-24 16:19:36 +01004240 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004241 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004242 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004243 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &boot_image_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004244}
4245
Vladimir Marko1998cd02017-01-13 13:02:58 +00004246vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4247 const DexFile& dex_file,
4248 dex::TypeIndex type_index,
4249 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004250 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004251}
4252
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004253vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageStringPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004254 const DexFile& dex_file,
4255 dex::StringIndex string_index,
4256 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004257 return NewPcRelativePatch(
4258 &dex_file, string_index.index_, adrp_label, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01004259}
4260
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004261vixl::aarch64::Label* CodeGeneratorARM64::NewStringBssEntryPatch(
4262 const DexFile& dex_file,
4263 dex::StringIndex string_index,
4264 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004265 return NewPcRelativePatch(&dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004266}
4267
Vladimir Marko966b46f2018-08-03 10:20:19 +00004268void CodeGeneratorARM64::EmitBakerReadBarrierCbnz(uint32_t custom_data) {
Vladimir Marko94796f82018-08-08 15:15:33 +01004269 DCHECK(!__ AllowMacroInstructions()); // In ExactAssemblyScope.
Vladimir Marko966b46f2018-08-03 10:20:19 +00004270 if (Runtime::Current()->UseJitCompilation()) {
4271 auto it = jit_baker_read_barrier_slow_paths_.FindOrAdd(custom_data);
4272 vixl::aarch64::Label* slow_path_entry = &it->second.label;
4273 __ cbnz(mr, slow_path_entry);
4274 } else {
4275 baker_read_barrier_patches_.emplace_back(custom_data);
4276 vixl::aarch64::Label* cbnz_label = &baker_read_barrier_patches_.back().label;
4277 __ bind(cbnz_label);
4278 __ cbnz(mr, static_cast<int64_t>(0)); // Placeholder, patched at link-time.
4279 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004280}
4281
Scott Wakeling97c72b72016-06-24 16:19:36 +01004282vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004283 const DexFile* dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004284 uint32_t offset_or_index,
4285 vixl::aarch64::Label* adrp_label,
4286 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004287 // Add a patch entry and return the label.
4288 patches->emplace_back(dex_file, offset_or_index);
4289 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004290 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004291 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4292 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4293 return label;
4294}
4295
Scott Wakeling97c72b72016-06-24 16:19:36 +01004296vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4297 uint64_t address) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004298 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004299}
4300
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004301vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004302 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004303 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004304 return jit_string_patches_.GetOrCreate(
4305 StringReference(&dex_file, string_index),
4306 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4307}
4308
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004309vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004310 const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004311 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004312 return jit_class_patches_.GetOrCreate(
4313 TypeReference(&dex_file, type_index),
4314 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4315}
4316
Vladimir Markoaad75c62016-10-03 08:46:48 +00004317void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4318 vixl::aarch64::Register reg) {
4319 DCHECK(reg.IsX());
4320 SingleEmissionCheckScope guard(GetVIXLAssembler());
4321 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004322 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004323}
4324
4325void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4326 vixl::aarch64::Register out,
4327 vixl::aarch64::Register base) {
4328 DCHECK(out.IsX());
4329 DCHECK(base.IsX());
4330 SingleEmissionCheckScope guard(GetVIXLAssembler());
4331 __ Bind(fixup_label);
4332 __ add(out, base, Operand(/* offset placeholder */ 0));
4333}
4334
4335void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4336 vixl::aarch64::Register out,
4337 vixl::aarch64::Register base) {
4338 DCHECK(base.IsX());
4339 SingleEmissionCheckScope guard(GetVIXLAssembler());
4340 __ Bind(fixup_label);
4341 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4342}
4343
Vladimir Markoeebb8212018-06-05 14:57:24 +01004344void CodeGeneratorARM64::LoadBootImageAddress(vixl::aarch64::Register reg,
Vladimir Marko6fd16062018-06-26 11:02:04 +01004345 uint32_t boot_image_reference) {
4346 if (GetCompilerOptions().IsBootImage()) {
4347 // Add ADRP with its PC-relative type patch.
4348 vixl::aarch64::Label* adrp_label = NewBootImageIntrinsicPatch(boot_image_reference);
4349 EmitAdrpPlaceholder(adrp_label, reg.X());
4350 // Add ADD with its PC-relative type patch.
4351 vixl::aarch64::Label* add_label = NewBootImageIntrinsicPatch(boot_image_reference, adrp_label);
4352 EmitAddPlaceholder(add_label, reg.X(), reg.X());
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004353 } else if (Runtime::Current()->IsAotCompiler()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01004354 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004355 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004356 EmitAdrpPlaceholder(adrp_label, reg.X());
4357 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004358 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_reference, adrp_label);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004359 EmitLdrOffsetPlaceholder(ldr_label, reg.W(), reg.X());
4360 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004361 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01004362 gc::Heap* heap = Runtime::Current()->GetHeap();
4363 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004364 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01004365 __ Ldr(reg.W(), DeduplicateBootImageAddressLiteral(reinterpret_cast<uintptr_t>(address)));
4366 }
4367}
4368
Vladimir Marko6fd16062018-06-26 11:02:04 +01004369void CodeGeneratorARM64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
4370 uint32_t boot_image_offset) {
4371 DCHECK(invoke->IsStatic());
4372 InvokeRuntimeCallingConvention calling_convention;
4373 Register argument = calling_convention.GetRegisterAt(0);
4374 if (GetCompilerOptions().IsBootImage()) {
4375 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
4376 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
4377 MethodReference target_method = invoke->GetTargetMethod();
4378 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
4379 // Add ADRP with its PC-relative type patch.
4380 vixl::aarch64::Label* adrp_label = NewBootImageTypePatch(*target_method.dex_file, type_idx);
4381 EmitAdrpPlaceholder(adrp_label, argument.X());
4382 // Add ADD with its PC-relative type patch.
4383 vixl::aarch64::Label* add_label =
4384 NewBootImageTypePatch(*target_method.dex_file, type_idx, adrp_label);
4385 EmitAddPlaceholder(add_label, argument.X(), argument.X());
4386 } else {
4387 LoadBootImageAddress(argument, boot_image_offset);
4388 }
4389 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
4390 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4391}
4392
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004393template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00004394inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4395 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004396 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00004397 for (const PcRelativePatchInfo& info : infos) {
4398 linker_patches->push_back(Factory(info.label.GetLocation(),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004399 info.target_dex_file,
Vladimir Markoaad75c62016-10-03 08:46:48 +00004400 info.pc_insn_label->GetLocation(),
4401 info.offset_or_index));
4402 }
4403}
4404
Vladimir Marko6fd16062018-06-26 11:02:04 +01004405template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
4406linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
4407 const DexFile* target_dex_file,
4408 uint32_t pc_insn_offset,
4409 uint32_t boot_image_offset) {
4410 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
4411 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00004412}
4413
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004414void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00004415 DCHECK(linker_patches->empty());
4416 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004417 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004418 method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004419 boot_image_type_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004420 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004421 boot_image_string_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004422 string_bss_entry_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01004423 boot_image_intrinsic_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004424 baker_read_barrier_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004425 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01004426 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004427 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004428 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004429 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004430 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004431 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004432 boot_image_string_patches_, linker_patches);
Vladimir Marko6fd16062018-06-26 11:02:04 +01004433 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
4434 boot_image_intrinsic_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01004435 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +01004436 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
Vladimir Markob066d432018-01-03 13:14:37 +00004437 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004438 DCHECK(boot_image_type_patches_.empty());
4439 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004440 DCHECK(boot_image_intrinsic_patches_.empty());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004441 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004442 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
4443 method_bss_entry_patches_, linker_patches);
4444 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
4445 type_bss_entry_patches_, linker_patches);
4446 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
4447 string_bss_entry_patches_, linker_patches);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004448 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004449 linker_patches->push_back(linker::LinkerPatch::BakerReadBarrierBranchPatch(
4450 info.label.GetLocation(), info.custom_data));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004451 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004452 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004453}
4454
Vladimir Markoca1e0382018-04-11 09:58:41 +00004455bool CodeGeneratorARM64::NeedsThunkCode(const linker::LinkerPatch& patch) const {
4456 return patch.GetType() == linker::LinkerPatch::Type::kBakerReadBarrierBranch ||
4457 patch.GetType() == linker::LinkerPatch::Type::kCallRelative;
4458}
4459
4460void CodeGeneratorARM64::EmitThunkCode(const linker::LinkerPatch& patch,
4461 /*out*/ ArenaVector<uint8_t>* code,
4462 /*out*/ std::string* debug_name) {
4463 Arm64Assembler assembler(GetGraph()->GetAllocator());
4464 switch (patch.GetType()) {
4465 case linker::LinkerPatch::Type::kCallRelative: {
4466 // The thunk just uses the entry point in the ArtMethod. This works even for calls
4467 // to the generic JNI and interpreter trampolines.
4468 Offset offset(ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4469 kArm64PointerSize).Int32Value());
4470 assembler.JumpTo(ManagedRegister(arm64::X0), offset, ManagedRegister(arm64::IP0));
4471 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
4472 *debug_name = "MethodCallThunk";
4473 }
4474 break;
4475 }
4476 case linker::LinkerPatch::Type::kBakerReadBarrierBranch: {
4477 DCHECK_EQ(patch.GetBakerCustomValue2(), 0u);
4478 CompileBakerReadBarrierThunk(assembler, patch.GetBakerCustomValue1(), debug_name);
4479 break;
4480 }
4481 default:
4482 LOG(FATAL) << "Unexpected patch type " << patch.GetType();
4483 UNREACHABLE();
4484 }
4485
4486 // Ensure we emit the literal pool if any.
4487 assembler.FinalizeCode();
4488 code->resize(assembler.CodeSize());
4489 MemoryRegion code_region(code->data(), code->size());
4490 assembler.FinalizeInstructions(code_region);
4491}
4492
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004493vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value) {
4494 return uint32_literals_.GetOrCreate(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004495 value,
4496 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4497}
4498
Scott Wakeling97c72b72016-06-24 16:19:36 +01004499vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004500 return uint64_literals_.GetOrCreate(
4501 value,
4502 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004503}
4504
Andreas Gampe878d58c2015-01-15 23:24:00 -08004505void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004506 // Explicit clinit checks triggered by static invokes must have been pruned by
4507 // art::PrepareForRegisterAllocation.
4508 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004509
Andreas Gampe878d58c2015-01-15 23:24:00 -08004510 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004511 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004512 return;
4513 }
4514
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004515 {
4516 // Ensure that between the BLR (emitted by GenerateStaticOrDirectCall) and RecordPcInfo there
4517 // are no pools emitted.
4518 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4519 LocationSummary* locations = invoke->GetLocations();
4520 codegen_->GenerateStaticOrDirectCall(
4521 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
4522 }
4523
4524 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004525}
4526
4527void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004528 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004529 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004530 return;
4531 }
4532
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004533 {
4534 // Ensure that between the BLR (emitted by GenerateVirtualCall) and RecordPcInfo there
4535 // are no pools emitted.
4536 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4537 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
4538 DCHECK(!codegen_->IsLeafMethod());
4539 }
4540
4541 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004542}
4543
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004544HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4545 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004546 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004547 case HLoadClass::LoadKind::kInvalid:
4548 LOG(FATAL) << "UNREACHABLE";
4549 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004550 case HLoadClass::LoadKind::kReferrersClass:
4551 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004552 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004553 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004554 case HLoadClass::LoadKind::kBssEntry:
4555 DCHECK(!Runtime::Current()->UseJitCompilation());
4556 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004557 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004558 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004559 DCHECK(Runtime::Current()->UseJitCompilation());
4560 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004561 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004562 break;
4563 }
4564 return desired_class_load_kind;
4565}
4566
Alexandre Rames67555f72014-11-18 10:55:16 +00004567void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00004568 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004569 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004570 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00004571 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004572 cls,
4573 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00004574 LocationFrom(vixl::aarch64::x0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00004575 DCHECK(calling_convention.GetRegisterAt(0).Is(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004576 return;
4577 }
Vladimir Marko41559982017-01-06 14:04:23 +00004578 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004579
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004580 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
4581 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004582 ? LocationSummary::kCallOnSlowPath
4583 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01004584 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004585 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004586 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004587 }
4588
Vladimir Marko41559982017-01-06 14:04:23 +00004589 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004590 locations->SetInAt(0, Location::RequiresRegister());
4591 }
4592 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004593 if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
4594 if (!kUseReadBarrier || kUseBakerReadBarrier) {
4595 // Rely on the type resolution or initialization and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01004596 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004597 } else {
4598 // For non-Baker read barrier we have a temp-clobbering call.
4599 }
4600 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004601}
4602
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004603// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4604// move.
4605void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00004606 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004607 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00004608 codegen_->GenerateLoadClassRuntimeCall(cls);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004609 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Calin Juravle580b6092015-10-06 17:35:58 +01004610 return;
4611 }
Vladimir Marko41559982017-01-06 14:04:23 +00004612 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01004613
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004614 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004615 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004616
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004617 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
4618 ? kWithoutReadBarrier
4619 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004620 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00004621 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004622 case HLoadClass::LoadKind::kReferrersClass: {
4623 DCHECK(!cls->CanCallRuntime());
4624 DCHECK(!cls->MustGenerateClinitCheck());
4625 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4626 Register current_method = InputRegisterAt(cls, 0);
Vladimir Markoca1e0382018-04-11 09:58:41 +00004627 codegen_->GenerateGcRootFieldLoad(cls,
4628 out_loc,
4629 current_method,
4630 ArtMethod::DeclaringClassOffset().Int32Value(),
4631 /* fixup_label */ nullptr,
4632 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004633 break;
4634 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004635 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004636 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004637 // Add ADRP with its PC-relative type patch.
4638 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08004639 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004640 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004641 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004642 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004643 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004644 codegen_->NewBootImageTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004645 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004646 break;
4647 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004648 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004649 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004650 uint32_t boot_image_offset = codegen_->GetBootImageOffset(cls);
4651 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
4652 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004653 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004654 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004655 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004656 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004657 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004658 break;
4659 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004660 case HLoadClass::LoadKind::kBssEntry: {
4661 // Add ADRP with its PC-relative Class .bss entry patch.
4662 const DexFile& dex_file = cls->GetDexFile();
4663 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Markof3c52b42017-11-17 17:32:12 +00004664 vixl::aarch64::Register temp = XRegisterFrom(out_loc);
4665 vixl::aarch64::Label* adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
4666 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004667 // Add LDR with its PC-relative Class .bss entry patch.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004668 vixl::aarch64::Label* ldr_label =
Vladimir Markof3c52b42017-11-17 17:32:12 +00004669 codegen_->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004670 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoca1e0382018-04-11 09:58:41 +00004671 codegen_->GenerateGcRootFieldLoad(cls,
4672 out_loc,
4673 temp,
4674 /* offset placeholder */ 0u,
4675 ldr_label,
4676 read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004677 generate_null_check = true;
4678 break;
4679 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004680 case HLoadClass::LoadKind::kJitBootImageAddress: {
4681 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
4682 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
4683 DCHECK_NE(address, 0u);
4684 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
4685 break;
4686 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004687 case HLoadClass::LoadKind::kJitTableAddress: {
4688 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
4689 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004690 cls->GetClass()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00004691 codegen_->GenerateGcRootFieldLoad(cls,
4692 out_loc,
4693 out.X(),
4694 /* offset */ 0,
4695 /* fixup_label */ nullptr,
4696 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004697 break;
4698 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004699 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004700 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00004701 LOG(FATAL) << "UNREACHABLE";
4702 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004703 }
4704
Vladimir Markoea4c1262017-02-06 19:59:33 +00004705 bool do_clinit = cls->MustGenerateClinitCheck();
4706 if (generate_null_check || do_clinit) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004707 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01004708 SlowPathCodeARM64* slow_path =
4709 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004710 codegen_->AddSlowPath(slow_path);
4711 if (generate_null_check) {
4712 __ Cbz(out, slow_path->GetEntryLabel());
4713 }
4714 if (cls->MustGenerateClinitCheck()) {
4715 GenerateClassInitializationCheck(slow_path, out);
4716 } else {
4717 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004718 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004719 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004720 }
4721}
4722
Orion Hodsondbaa5c72018-05-10 08:22:46 +01004723void LocationsBuilderARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
4724 InvokeRuntimeCallingConvention calling_convention;
4725 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
4726 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
4727}
4728
4729void InstructionCodeGeneratorARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
4730 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
4731}
4732
Orion Hodson18259d72018-04-12 11:18:23 +01004733void LocationsBuilderARM64::VisitLoadMethodType(HLoadMethodType* load) {
4734 InvokeRuntimeCallingConvention calling_convention;
4735 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
4736 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
4737}
4738
4739void InstructionCodeGeneratorARM64::VisitLoadMethodType(HLoadMethodType* load) {
4740 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
4741}
4742
David Brazdilcb1c0552015-08-04 16:22:25 +01004743static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004744 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004745}
4746
Alexandre Rames67555f72014-11-18 10:55:16 +00004747void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4748 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004749 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexandre Rames67555f72014-11-18 10:55:16 +00004750 locations->SetOut(Location::RequiresRegister());
4751}
4752
4753void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004754 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4755}
4756
4757void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004758 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01004759}
4760
4761void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4762 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004763}
4764
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004765HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4766 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004767 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004768 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004769 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00004770 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01004771 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004772 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004773 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004774 case HLoadString::LoadKind::kJitTableAddress:
4775 DCHECK(Runtime::Current()->UseJitCompilation());
4776 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004777 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004778 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004779 }
4780 return desired_string_load_kind;
4781}
4782
Alexandre Rames67555f72014-11-18 10:55:16 +00004783void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004784 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004785 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004786 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004787 InvokeRuntimeCallingConvention calling_convention;
4788 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
4789 } else {
4790 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004791 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
4792 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00004793 // Rely on the pResolveString and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01004794 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004795 } else {
4796 // For non-Baker read barrier we have a temp-clobbering call.
4797 }
4798 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004799 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004800}
4801
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004802// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4803// move.
4804void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00004805 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004806 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004807
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004808 switch (load->GetLoadKind()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004809 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004810 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004811 // Add ADRP with its PC-relative String patch.
4812 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004813 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004814 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004815 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004816 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004817 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004818 codegen_->NewBootImageStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004819 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004820 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004821 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004822 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004823 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004824 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
4825 uint32_t boot_image_offset = codegen_->GetBootImageOffset(load);
4826 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004827 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004828 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004829 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004830 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004831 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
4832 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004833 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004834 case HLoadString::LoadKind::kBssEntry: {
4835 // Add ADRP with its PC-relative String .bss entry patch.
4836 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004837 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004838 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markof3c52b42017-11-17 17:32:12 +00004839 Register temp = XRegisterFrom(out_loc);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004840 vixl::aarch64::Label* adrp_label = codegen_->NewStringBssEntryPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004841 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004842 // Add LDR with its PC-relative String .bss entry patch.
Vladimir Markoaad75c62016-10-03 08:46:48 +00004843 vixl::aarch64::Label* ldr_label =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004844 codegen_->NewStringBssEntryPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004845 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoca1e0382018-04-11 09:58:41 +00004846 codegen_->GenerateGcRootFieldLoad(load,
4847 out_loc,
4848 temp,
4849 /* offset placeholder */ 0u,
4850 ldr_label,
4851 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004852 SlowPathCodeARM64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00004853 new (codegen_->GetScopedAllocator()) LoadStringSlowPathARM64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004854 codegen_->AddSlowPath(slow_path);
4855 __ Cbz(out.X(), slow_path->GetEntryLabel());
4856 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004857 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004858 return;
4859 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004860 case HLoadString::LoadKind::kJitBootImageAddress: {
4861 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
4862 DCHECK_NE(address, 0u);
4863 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
4864 return;
4865 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004866 case HLoadString::LoadKind::kJitTableAddress: {
4867 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004868 load->GetStringIndex(),
4869 load->GetString()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00004870 codegen_->GenerateGcRootFieldLoad(load,
4871 out_loc,
4872 out.X(),
4873 /* offset */ 0,
4874 /* fixup_label */ nullptr,
4875 kCompilerReadBarrierOption);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004876 return;
4877 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004878 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004879 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004880 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004881
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004882 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004883 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004884 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004885 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004886 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
4887 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004888 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004889}
4890
Alexandre Rames5319def2014-10-23 10:03:10 +01004891void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004892 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01004893 locations->SetOut(Location::ConstantLocation(constant));
4894}
4895
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004896void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004897 // Will be generated at use site.
4898}
4899
Alexandre Rames67555f72014-11-18 10:55:16 +00004900void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004901 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4902 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004903 InvokeRuntimeCallingConvention calling_convention;
4904 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4905}
4906
4907void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004908 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004909 instruction,
4910 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004911 if (instruction->IsEnter()) {
4912 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4913 } else {
4914 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4915 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004916 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004917}
4918
Alexandre Rames42d641b2014-10-27 14:00:51 +00004919void LocationsBuilderARM64::VisitMul(HMul* mul) {
4920 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004921 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004922 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004923 case DataType::Type::kInt32:
4924 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00004925 locations->SetInAt(0, Location::RequiresRegister());
4926 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004927 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004928 break;
4929
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004930 case DataType::Type::kFloat32:
4931 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004932 locations->SetInAt(0, Location::RequiresFpuRegister());
4933 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004934 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004935 break;
4936
4937 default:
4938 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4939 }
4940}
4941
4942void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4943 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004944 case DataType::Type::kInt32:
4945 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00004946 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4947 break;
4948
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004949 case DataType::Type::kFloat32:
4950 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004951 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004952 break;
4953
4954 default:
4955 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4956 }
4957}
4958
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004959void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4960 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004961 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004962 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004963 case DataType::Type::kInt32:
4964 case DataType::Type::kInt64:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004965 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004966 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004967 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004968
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004969 case DataType::Type::kFloat32:
4970 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00004971 locations->SetInAt(0, Location::RequiresFpuRegister());
4972 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004973 break;
4974
4975 default:
4976 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4977 }
4978}
4979
4980void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4981 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004982 case DataType::Type::kInt32:
4983 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004984 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4985 break;
4986
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004987 case DataType::Type::kFloat32:
4988 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00004989 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004990 break;
4991
4992 default:
4993 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4994 }
4995}
4996
4997void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004998 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
4999 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005000 InvokeRuntimeCallingConvention calling_convention;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005001 locations->SetOut(LocationFrom(x0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005002 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5003 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005004}
5005
5006void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01005007 // Note: if heap poisoning is enabled, the entry point takes cares
5008 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00005009 QuickEntrypointEnum entrypoint =
5010 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
5011 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005012 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005013 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005014}
5015
Alexandre Rames5319def2014-10-23 10:03:10 +01005016void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005017 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5018 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01005019 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07005020 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005021 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexandre Rames5319def2014-10-23 10:03:10 +01005022}
5023
5024void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07005025 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
5026 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005027 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005028}
5029
5030void LocationsBuilderARM64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005031 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00005032 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005033 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01005034}
5035
5036void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00005037 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005038 case DataType::Type::kInt32:
5039 case DataType::Type::kInt64:
Roland Levillain55dcfb52014-10-24 18:09:09 +01005040 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01005041 break;
5042
5043 default:
5044 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5045 }
5046}
5047
David Brazdil66d126e2015-04-03 16:02:44 +01005048void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005049 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
David Brazdil66d126e2015-04-03 16:02:44 +01005050 locations->SetInAt(0, Location::RequiresRegister());
5051 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5052}
5053
5054void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005055 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01005056}
5057
Alexandre Rames5319def2014-10-23 10:03:10 +01005058void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005059 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5060 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01005061}
5062
Calin Juravle2ae48182016-03-16 14:05:09 +00005063void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5064 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005065 return;
5066 }
Artem Serov914d7a82017-02-07 14:33:49 +00005067 {
5068 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
5069 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5070 Location obj = instruction->GetLocations()->InAt(0);
5071 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
5072 RecordPcInfo(instruction, instruction->GetDexPc());
5073 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005074}
5075
Calin Juravle2ae48182016-03-16 14:05:09 +00005076void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005077 SlowPathCodeARM64* slow_path = new (GetScopedAllocator()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005078 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01005079
5080 LocationSummary* locations = instruction->GetLocations();
5081 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005082
5083 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01005084}
5085
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005086void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005087 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005088}
5089
Alexandre Rames67555f72014-11-18 10:55:16 +00005090void LocationsBuilderARM64::VisitOr(HOr* instruction) {
5091 HandleBinaryOp(instruction);
5092}
5093
5094void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
5095 HandleBinaryOp(instruction);
5096}
5097
Alexandre Rames3e69f162014-12-10 10:36:50 +00005098void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5099 LOG(FATAL) << "Unreachable";
5100}
5101
5102void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005103 if (instruction->GetNext()->IsSuspendCheck() &&
5104 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5105 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5106 // The back edge will generate the suspend check.
5107 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5108 }
5109
Alexandre Rames3e69f162014-12-10 10:36:50 +00005110 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5111}
5112
Alexandre Rames5319def2014-10-23 10:03:10 +01005113void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005114 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005115 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5116 if (location.IsStackSlot()) {
5117 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5118 } else if (location.IsDoubleStackSlot()) {
5119 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5120 }
5121 locations->SetOut(location);
5122}
5123
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005124void InstructionCodeGeneratorARM64::VisitParameterValue(
5125 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005126 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005127}
5128
5129void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
5130 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005131 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01005132 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005133}
5134
5135void InstructionCodeGeneratorARM64::VisitCurrentMethod(
5136 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5137 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01005138}
5139
5140void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005141 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005142 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005143 locations->SetInAt(i, Location::Any());
5144 }
5145 locations->SetOut(Location::Any());
5146}
5147
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005148void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005149 LOG(FATAL) << "Unreachable";
5150}
5151
Serban Constantinescu02164b32014-11-13 14:05:07 +00005152void LocationsBuilderARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005153 DataType::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00005154 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005155 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005156 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005157 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005158
5159 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005160 case DataType::Type::kInt32:
5161 case DataType::Type::kInt64:
Serban Constantinescu02164b32014-11-13 14:05:07 +00005162 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08005163 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00005164 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5165 break;
5166
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005167 case DataType::Type::kFloat32:
5168 case DataType::Type::kFloat64: {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005169 InvokeRuntimeCallingConvention calling_convention;
5170 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
5171 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
5172 locations->SetOut(calling_convention.GetReturnLocation(type));
5173
5174 break;
5175 }
5176
Serban Constantinescu02164b32014-11-13 14:05:07 +00005177 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005178 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00005179 }
5180}
5181
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005182void InstructionCodeGeneratorARM64::GenerateIntRemForPower2Denom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005183 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005184 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
5185 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
5186
5187 Register out = OutputRegister(instruction);
5188 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005189
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005190 if (abs_imm == 2) {
5191 __ Cmp(dividend, 0);
5192 __ And(out, dividend, 1);
5193 __ Csneg(out, out, out, ge);
5194 } else {
5195 UseScratchRegisterScope temps(GetVIXLAssembler());
5196 Register temp = temps.AcquireSameSizeAs(out);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005197
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005198 __ Negs(temp, dividend);
5199 __ And(out, dividend, abs_imm - 1);
5200 __ And(temp, temp, abs_imm - 1);
5201 __ Csneg(out, out, temp, mi);
5202 }
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005203}
5204
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005205void InstructionCodeGeneratorARM64::GenerateIntRemForConstDenom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005206 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005207
5208 if (imm == 0) {
5209 // Do not generate anything.
5210 // DivZeroCheck would prevent any code to be executed.
5211 return;
5212 }
5213
Evgeny Astigeevichf58dc652018-06-25 17:54:07 +01005214 if (IsPowerOfTwo(AbsOrMin(imm))) {
5215 // Cases imm == -1 or imm == 1 are handled in constant folding by
5216 // InstructionWithAbsorbingInputSimplifier.
5217 // If the cases have survided till code generation they are handled in
5218 // GenerateIntRemForPower2Denom becauses -1 and 1 are the power of 2 (2^0).
5219 // The correct code is generated for them, just more instructions.
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005220 GenerateIntRemForPower2Denom(instruction);
5221 } else {
5222 DCHECK(imm < -2 || imm > 2) << imm;
5223 GenerateDivRemWithAnyConstant(instruction);
5224 }
5225}
5226
5227void InstructionCodeGeneratorARM64::GenerateIntRem(HRem* instruction) {
5228 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
5229 << instruction->GetResultType();
5230
5231 if (instruction->GetLocations()->InAt(1).IsConstant()) {
5232 GenerateIntRemForConstDenom(instruction);
5233 } else {
5234 Register out = OutputRegister(instruction);
5235 Register dividend = InputRegisterAt(instruction, 0);
5236 Register divisor = InputRegisterAt(instruction, 1);
5237 UseScratchRegisterScope temps(GetVIXLAssembler());
5238 Register temp = temps.AcquireSameSizeAs(out);
5239 __ Sdiv(temp, dividend, divisor);
5240 __ Msub(out, temp, divisor, dividend);
5241 }
5242}
5243
Serban Constantinescu02164b32014-11-13 14:05:07 +00005244void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005245 DataType::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005246
Serban Constantinescu02164b32014-11-13 14:05:07 +00005247 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005248 case DataType::Type::kInt32:
5249 case DataType::Type::kInt64: {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005250 GenerateIntRem(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005251 break;
5252 }
5253
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005254 case DataType::Type::kFloat32:
5255 case DataType::Type::kFloat64: {
5256 QuickEntrypointEnum entrypoint =
5257 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005258 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005259 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00005260 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5261 } else {
5262 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5263 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005264 break;
5265 }
5266
Serban Constantinescu02164b32014-11-13 14:05:07 +00005267 default:
5268 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005269 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00005270 }
5271}
5272
Aart Bik1f8d51b2018-02-15 10:42:37 -08005273void LocationsBuilderARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005274 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005275}
5276
Aart Bik1f8d51b2018-02-15 10:42:37 -08005277void InstructionCodeGeneratorARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005278 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005279}
5280
5281void LocationsBuilderARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005282 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005283}
5284
5285void InstructionCodeGeneratorARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005286 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005287}
5288
Aart Bik3dad3412018-02-28 12:01:46 -08005289void LocationsBuilderARM64::VisitAbs(HAbs* abs) {
5290 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
5291 switch (abs->GetResultType()) {
5292 case DataType::Type::kInt32:
5293 case DataType::Type::kInt64:
5294 locations->SetInAt(0, Location::RequiresRegister());
5295 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5296 break;
5297 case DataType::Type::kFloat32:
5298 case DataType::Type::kFloat64:
5299 locations->SetInAt(0, Location::RequiresFpuRegister());
5300 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5301 break;
5302 default:
5303 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5304 }
5305}
5306
5307void InstructionCodeGeneratorARM64::VisitAbs(HAbs* abs) {
5308 switch (abs->GetResultType()) {
5309 case DataType::Type::kInt32:
5310 case DataType::Type::kInt64: {
5311 Register in_reg = InputRegisterAt(abs, 0);
5312 Register out_reg = OutputRegister(abs);
5313 __ Cmp(in_reg, Operand(0));
5314 __ Cneg(out_reg, in_reg, lt);
5315 break;
5316 }
5317 case DataType::Type::kFloat32:
5318 case DataType::Type::kFloat64: {
5319 FPRegister in_reg = InputFPRegisterAt(abs, 0);
5320 FPRegister out_reg = OutputFPRegister(abs);
5321 __ Fabs(out_reg, in_reg);
5322 break;
5323 }
5324 default:
5325 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5326 }
5327}
5328
Igor Murashkind01745e2017-04-05 16:40:31 -07005329void LocationsBuilderARM64::VisitConstructorFence(HConstructorFence* constructor_fence) {
5330 constructor_fence->SetLocations(nullptr);
5331}
5332
5333void InstructionCodeGeneratorARM64::VisitConstructorFence(
5334 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
5335 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
5336}
5337
Calin Juravle27df7582015-04-17 19:12:31 +01005338void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5339 memory_barrier->SetLocations(nullptr);
5340}
5341
5342void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005343 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01005344}
5345
Alexandre Rames5319def2014-10-23 10:03:10 +01005346void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005347 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005348 DataType::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005349 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01005350}
5351
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005352void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005353 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005354}
5355
5356void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
5357 instruction->SetLocations(nullptr);
5358}
5359
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005360void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005361 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005362}
5363
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005364void LocationsBuilderARM64::VisitRor(HRor* ror) {
5365 HandleBinaryOp(ror);
5366}
5367
5368void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
5369 HandleBinaryOp(ror);
5370}
5371
Serban Constantinescu02164b32014-11-13 14:05:07 +00005372void LocationsBuilderARM64::VisitShl(HShl* shl) {
5373 HandleShift(shl);
5374}
5375
5376void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5377 HandleShift(shl);
5378}
5379
5380void LocationsBuilderARM64::VisitShr(HShr* shr) {
5381 HandleShift(shr);
5382}
5383
5384void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5385 HandleShift(shr);
5386}
5387
Alexandre Rames5319def2014-10-23 10:03:10 +01005388void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005389 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005390}
5391
5392void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005393 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005394}
5395
Alexandre Rames67555f72014-11-18 10:55:16 +00005396void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005397 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005398}
5399
5400void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005401 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005402}
5403
5404void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005405 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005406}
5407
Alexandre Rames67555f72014-11-18 10:55:16 +00005408void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005409 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005410}
5411
Calin Juravlee460d1d2015-09-29 04:52:17 +01005412void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5413 HUnresolvedInstanceFieldGet* instruction) {
5414 FieldAccessCallingConventionARM64 calling_convention;
5415 codegen_->CreateUnresolvedFieldLocationSummary(
5416 instruction, instruction->GetFieldType(), calling_convention);
5417}
5418
5419void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5420 HUnresolvedInstanceFieldGet* instruction) {
5421 FieldAccessCallingConventionARM64 calling_convention;
5422 codegen_->GenerateUnresolvedFieldAccess(instruction,
5423 instruction->GetFieldType(),
5424 instruction->GetFieldIndex(),
5425 instruction->GetDexPc(),
5426 calling_convention);
5427}
5428
5429void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5430 HUnresolvedInstanceFieldSet* instruction) {
5431 FieldAccessCallingConventionARM64 calling_convention;
5432 codegen_->CreateUnresolvedFieldLocationSummary(
5433 instruction, instruction->GetFieldType(), calling_convention);
5434}
5435
5436void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5437 HUnresolvedInstanceFieldSet* instruction) {
5438 FieldAccessCallingConventionARM64 calling_convention;
5439 codegen_->GenerateUnresolvedFieldAccess(instruction,
5440 instruction->GetFieldType(),
5441 instruction->GetFieldIndex(),
5442 instruction->GetDexPc(),
5443 calling_convention);
5444}
5445
5446void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5447 HUnresolvedStaticFieldGet* instruction) {
5448 FieldAccessCallingConventionARM64 calling_convention;
5449 codegen_->CreateUnresolvedFieldLocationSummary(
5450 instruction, instruction->GetFieldType(), calling_convention);
5451}
5452
5453void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5454 HUnresolvedStaticFieldGet* instruction) {
5455 FieldAccessCallingConventionARM64 calling_convention;
5456 codegen_->GenerateUnresolvedFieldAccess(instruction,
5457 instruction->GetFieldType(),
5458 instruction->GetFieldIndex(),
5459 instruction->GetDexPc(),
5460 calling_convention);
5461}
5462
5463void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5464 HUnresolvedStaticFieldSet* instruction) {
5465 FieldAccessCallingConventionARM64 calling_convention;
5466 codegen_->CreateUnresolvedFieldLocationSummary(
5467 instruction, instruction->GetFieldType(), calling_convention);
5468}
5469
5470void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5471 HUnresolvedStaticFieldSet* instruction) {
5472 FieldAccessCallingConventionARM64 calling_convention;
5473 codegen_->GenerateUnresolvedFieldAccess(instruction,
5474 instruction->GetFieldType(),
5475 instruction->GetFieldIndex(),
5476 instruction->GetDexPc(),
5477 calling_convention);
5478}
5479
Alexandre Rames5319def2014-10-23 10:03:10 +01005480void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005481 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5482 instruction, LocationSummary::kCallOnSlowPath);
Artem Serov7957d952017-04-04 15:44:09 +01005483 // In suspend check slow path, usually there are no caller-save registers at all.
5484 // If SIMD instructions are present, however, we force spilling all live SIMD
5485 // registers in full width (since the runtime only saves/restores lower part).
5486 locations->SetCustomSlowPathCallerSaves(
5487 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexandre Rames5319def2014-10-23 10:03:10 +01005488}
5489
5490void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005491 HBasicBlock* block = instruction->GetBlock();
5492 if (block->GetLoopInformation() != nullptr) {
5493 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5494 // The back edge will generate the suspend check.
5495 return;
5496 }
5497 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5498 // The goto will generate the suspend check.
5499 return;
5500 }
5501 GenerateSuspendCheck(instruction, nullptr);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005502 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005503}
5504
Alexandre Rames67555f72014-11-18 10:55:16 +00005505void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005506 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5507 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005508 InvokeRuntimeCallingConvention calling_convention;
5509 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5510}
5511
5512void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005513 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08005514 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005515}
5516
5517void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
5518 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005519 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005520 DataType::Type input_type = conversion->GetInputType();
5521 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005522 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
5523 << input_type << " -> " << result_type;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005524 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
5525 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005526 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5527 }
5528
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005529 if (DataType::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005530 locations->SetInAt(0, Location::RequiresFpuRegister());
5531 } else {
5532 locations->SetInAt(0, Location::RequiresRegister());
5533 }
5534
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005535 if (DataType::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005536 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5537 } else {
5538 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5539 }
5540}
5541
5542void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005543 DataType::Type result_type = conversion->GetResultType();
5544 DataType::Type input_type = conversion->GetInputType();
Alexandre Rames67555f72014-11-18 10:55:16 +00005545
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005546 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
5547 << input_type << " -> " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005548
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005549 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
5550 int result_size = DataType::Size(result_type);
5551 int input_size = DataType::Size(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00005552 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005553 Register output = OutputRegister(conversion);
5554 Register source = InputRegisterAt(conversion, 0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005555 if (result_type == DataType::Type::kInt32 && input_type == DataType::Type::kInt64) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01005556 // 'int' values are used directly as W registers, discarding the top
5557 // bits, so we don't need to sign-extend and can just perform a move.
5558 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
5559 // top 32 bits of the target register. We theoretically could leave those
5560 // bits unchanged, but we would have to make sure that no code uses a
5561 // 32bit input value as a 64bit value assuming that the top 32 bits are
5562 // zero.
5563 __ Mov(output.W(), source.W());
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005564 } else if (DataType::IsUnsignedType(result_type) ||
5565 (DataType::IsUnsignedType(input_type) && input_size < result_size)) {
5566 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, result_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005567 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00005568 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005569 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005570 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005571 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005572 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
5573 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005574 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005575 } else if (DataType::IsFloatingPointType(result_type) &&
5576 DataType::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005577 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
5578 } else {
5579 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
5580 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005581 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00005582}
Alexandre Rames67555f72014-11-18 10:55:16 +00005583
Serban Constantinescu02164b32014-11-13 14:05:07 +00005584void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
5585 HandleShift(ushr);
5586}
5587
5588void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
5589 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00005590}
5591
5592void LocationsBuilderARM64::VisitXor(HXor* instruction) {
5593 HandleBinaryOp(instruction);
5594}
5595
5596void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
5597 HandleBinaryOp(instruction);
5598}
5599
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005600void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005601 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005602 LOG(FATAL) << "Unreachable";
5603}
5604
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005605void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005606 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005607 LOG(FATAL) << "Unreachable";
5608}
5609
Mark Mendellfe57faa2015-09-18 09:26:15 -04005610// Simple implementation of packed switch - generate cascaded compare/jumps.
5611void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5612 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005613 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005614 locations->SetInAt(0, Location::RequiresRegister());
5615}
5616
5617void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5618 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08005619 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04005620 Register value_reg = InputRegisterAt(switch_instr, 0);
5621 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
5622
Zheng Xu3927c8b2015-11-18 17:46:25 +08005623 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005624 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08005625 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
5626 // make sure we don't emit it if the target may run out of range.
5627 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
5628 // ranges and emit the tables only as required.
5629 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04005630
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005631 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08005632 // Current instruction id is an upper bound of the number of HIRs in the graph.
5633 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
5634 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005635 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5636 Register temp = temps.AcquireW();
5637 __ Subs(temp, value_reg, Operand(lower_bound));
5638
Zheng Xu3927c8b2015-11-18 17:46:25 +08005639 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005640 // Jump to successors[0] if value == lower_bound.
5641 __ B(eq, codegen_->GetLabelOf(successors[0]));
5642 int32_t last_index = 0;
5643 for (; num_entries - last_index > 2; last_index += 2) {
5644 __ Subs(temp, temp, Operand(2));
5645 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
5646 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
5647 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
5648 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
5649 }
5650 if (num_entries - last_index == 2) {
5651 // The last missing case_value.
5652 __ Cmp(temp, Operand(1));
5653 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08005654 }
5655
5656 // And the default for any other value.
5657 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
5658 __ B(codegen_->GetLabelOf(default_block));
5659 }
5660 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01005661 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08005662
5663 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5664
5665 // Below instructions should use at most one blocked register. Since there are two blocked
5666 // registers, we are free to block one.
5667 Register temp_w = temps.AcquireW();
5668 Register index;
5669 // Remove the bias.
5670 if (lower_bound != 0) {
5671 index = temp_w;
5672 __ Sub(index, value_reg, Operand(lower_bound));
5673 } else {
5674 index = value_reg;
5675 }
5676
5677 // Jump to default block if index is out of the range.
5678 __ Cmp(index, Operand(num_entries));
5679 __ B(hs, codegen_->GetLabelOf(default_block));
5680
5681 // In current VIXL implementation, it won't require any blocked registers to encode the
5682 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
5683 // register pressure.
5684 Register table_base = temps.AcquireX();
5685 // Load jump offset from the table.
5686 __ Adr(table_base, jump_table->GetTableStartLabel());
5687 Register jump_offset = temp_w;
5688 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
5689
5690 // Jump to target block by branching to table_base(pc related) + offset.
5691 Register target_address = table_base;
5692 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
5693 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005694 }
5695}
5696
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005697void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
5698 HInstruction* instruction,
5699 Location out,
5700 uint32_t offset,
5701 Location maybe_temp,
5702 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005703 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00005704 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005705 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005706 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005707 if (kUseBakerReadBarrier) {
5708 // Load with fast path based Baker's read barrier.
5709 // /* HeapReference<Object> */ out = *(out + offset)
5710 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5711 out,
5712 out_reg,
5713 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005714 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00005715 /* needs_null_check */ false,
5716 /* use_load_acquire */ false);
5717 } else {
5718 // Load with slow path based read barrier.
5719 // Save the value of `out` into `maybe_temp` before overwriting it
5720 // in the following move operation, as we will need it for the
5721 // read barrier below.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005722 Register temp_reg = RegisterFrom(maybe_temp, type);
Roland Levillain44015862016-01-22 11:47:17 +00005723 __ Mov(temp_reg, out_reg);
5724 // /* HeapReference<Object> */ out = *(out + offset)
5725 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5726 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5727 }
5728 } else {
5729 // Plain load with no read barrier.
5730 // /* HeapReference<Object> */ out = *(out + offset)
5731 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5732 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5733 }
5734}
5735
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005736void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
5737 HInstruction* instruction,
5738 Location out,
5739 Location obj,
5740 uint32_t offset,
5741 Location maybe_temp,
5742 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005743 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00005744 Register out_reg = RegisterFrom(out, type);
5745 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005746 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005747 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005748 if (kUseBakerReadBarrier) {
5749 // Load with fast path based Baker's read barrier.
Roland Levillain44015862016-01-22 11:47:17 +00005750 // /* HeapReference<Object> */ out = *(obj + offset)
5751 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5752 out,
5753 obj_reg,
5754 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005755 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00005756 /* needs_null_check */ false,
5757 /* use_load_acquire */ false);
5758 } else {
5759 // Load with slow path based read barrier.
5760 // /* HeapReference<Object> */ out = *(obj + offset)
5761 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5762 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5763 }
5764 } else {
5765 // Plain load with no read barrier.
5766 // /* HeapReference<Object> */ out = *(obj + offset)
5767 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5768 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5769 }
5770}
5771
Vladimir Markoca1e0382018-04-11 09:58:41 +00005772void CodeGeneratorARM64::GenerateGcRootFieldLoad(
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005773 HInstruction* instruction,
5774 Location root,
5775 Register obj,
5776 uint32_t offset,
5777 vixl::aarch64::Label* fixup_label,
5778 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005779 DCHECK(fixup_label == nullptr || offset == 0u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005780 Register root_reg = RegisterFrom(root, DataType::Type::kReference);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005781 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005782 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005783 if (kUseBakerReadBarrier) {
5784 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillainba650a42017-03-06 13:52:32 +00005785 // Baker's read barrier are used.
Roland Levillain44015862016-01-22 11:47:17 +00005786
Vladimir Marko008e09f32018-08-06 15:42:43 +01005787 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in
5788 // the Marking Register) to decide whether we need to enter
5789 // the slow path to mark the GC root.
5790 //
5791 // We use shared thunks for the slow path; shared within the method
5792 // for JIT, across methods for AOT. That thunk checks the reference
5793 // and jumps to the entrypoint if needed.
5794 //
5795 // lr = &return_address;
5796 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5797 // if (mr) { // Thread::Current()->GetIsGcMarking()
5798 // goto gc_root_thunk<root_reg>(lr)
5799 // }
5800 // return_address:
Roland Levillainba650a42017-03-06 13:52:32 +00005801
Vladimir Marko008e09f32018-08-06 15:42:43 +01005802 UseScratchRegisterScope temps(GetVIXLAssembler());
5803 DCHECK(temps.IsAvailable(ip0));
5804 DCHECK(temps.IsAvailable(ip1));
5805 temps.Exclude(ip0, ip1);
5806 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(root_reg.GetCode());
Roland Levillain44015862016-01-22 11:47:17 +00005807
Vladimir Marko008e09f32018-08-06 15:42:43 +01005808 ExactAssemblyScope guard(GetVIXLAssembler(), 3 * vixl::aarch64::kInstructionSize);
5809 vixl::aarch64::Label return_address;
5810 __ adr(lr, &return_address);
5811 if (fixup_label != nullptr) {
5812 __ bind(fixup_label);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005813 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01005814 static_assert(BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_OFFSET == -8,
Vladimir Marko94796f82018-08-08 15:15:33 +01005815 "GC root LDR must be 2 instructions (8B) before the return address label.");
Vladimir Marko008e09f32018-08-06 15:42:43 +01005816 __ ldr(root_reg, MemOperand(obj.X(), offset));
5817 EmitBakerReadBarrierCbnz(custom_data);
5818 __ bind(&return_address);
Roland Levillain44015862016-01-22 11:47:17 +00005819 } else {
5820 // GC root loaded through a slow path for read barriers other
5821 // than Baker's.
5822 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005823 if (fixup_label == nullptr) {
5824 __ Add(root_reg.X(), obj.X(), offset);
5825 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00005826 EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005827 }
Roland Levillain44015862016-01-22 11:47:17 +00005828 // /* mirror::Object* */ root = root->Read()
Vladimir Markoca1e0382018-04-11 09:58:41 +00005829 GenerateReadBarrierForRootSlow(instruction, root, root);
Roland Levillain44015862016-01-22 11:47:17 +00005830 }
5831 } else {
5832 // Plain GC root load with no read barrier.
5833 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005834 if (fixup_label == nullptr) {
5835 __ Ldr(root_reg, MemOperand(obj, offset));
5836 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00005837 EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005838 }
Roland Levillain44015862016-01-22 11:47:17 +00005839 // Note that GC roots are not affected by heap poisoning, thus we
5840 // do not have to unpoison `root_reg` here.
5841 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00005842 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillain44015862016-01-22 11:47:17 +00005843}
5844
Vladimir Marko94796f82018-08-08 15:15:33 +01005845void CodeGeneratorARM64::GenerateUnsafeCasOldValueMovWithBakerReadBarrier(
5846 vixl::aarch64::Register marked,
5847 vixl::aarch64::Register old_value) {
5848 DCHECK(kEmitCompilerReadBarrier);
5849 DCHECK(kUseBakerReadBarrier);
5850
5851 // Similar to the Baker RB path in GenerateGcRootFieldLoad(), with a MOV instead of LDR.
5852 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(marked.GetCode());
5853
5854 ExactAssemblyScope guard(GetVIXLAssembler(), 3 * vixl::aarch64::kInstructionSize);
5855 vixl::aarch64::Label return_address;
5856 __ adr(lr, &return_address);
5857 static_assert(BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_OFFSET == -8,
5858 "GC root LDR must be 2 instructions (8B) before the return address label.");
5859 __ mov(marked, old_value);
5860 EmitBakerReadBarrierCbnz(custom_data);
5861 __ bind(&return_address);
5862}
5863
Roland Levillain44015862016-01-22 11:47:17 +00005864void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5865 Location ref,
Vladimir Marko248141f2018-08-10 10:40:07 +01005866 vixl::aarch64::Register obj,
5867 const vixl::aarch64::MemOperand& src,
Roland Levillain44015862016-01-22 11:47:17 +00005868 bool needs_null_check,
5869 bool use_load_acquire) {
5870 DCHECK(kEmitCompilerReadBarrier);
5871 DCHECK(kUseBakerReadBarrier);
5872
Vladimir Marko0ecac682018-08-07 10:40:38 +01005873 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
5874 // Marking Register) to decide whether we need to enter the slow
5875 // path to mark the reference. Then, in the slow path, check the
5876 // gray bit in the lock word of the reference's holder (`obj`) to
5877 // decide whether to mark `ref` or not.
5878 //
5879 // We use shared thunks for the slow path; shared within the method
5880 // for JIT, across methods for AOT. That thunk checks the holder
5881 // and jumps to the entrypoint if needed. If the holder is not gray,
5882 // it creates a fake dependency and returns to the LDR instruction.
5883 //
5884 // lr = &gray_return_address;
5885 // if (mr) { // Thread::Current()->GetIsGcMarking()
5886 // goto field_thunk<holder_reg, base_reg, use_load_acquire>(lr)
5887 // }
5888 // not_gray_return_address:
5889 // // Original reference load. If the offset is too large to fit
5890 // // into LDR, we use an adjusted base register here.
5891 // HeapReference<mirror::Object> reference = *(obj+offset);
5892 // gray_return_address:
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005893
Vladimir Marko248141f2018-08-10 10:40:07 +01005894 DCHECK(src.GetAddrMode() == vixl::aarch64::Offset);
5895 DCHECK_ALIGNED(src.GetOffset(), sizeof(mirror::HeapReference<mirror::Object>));
5896
5897 UseScratchRegisterScope temps(GetVIXLAssembler());
5898 DCHECK(temps.IsAvailable(ip0));
5899 DCHECK(temps.IsAvailable(ip1));
5900 temps.Exclude(ip0, ip1);
5901 uint32_t custom_data = use_load_acquire
5902 ? EncodeBakerReadBarrierAcquireData(src.GetBaseRegister().GetCode(), obj.GetCode())
5903 : EncodeBakerReadBarrierFieldData(src.GetBaseRegister().GetCode(), obj.GetCode());
5904
5905 {
5906 ExactAssemblyScope guard(GetVIXLAssembler(),
5907 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
5908 vixl::aarch64::Label return_address;
5909 __ adr(lr, &return_address);
5910 EmitBakerReadBarrierCbnz(custom_data);
5911 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
5912 "Field LDR must be 1 instruction (4B) before the return address label; "
5913 " 2 instructions (8B) for heap poisoning.");
5914 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
5915 if (use_load_acquire) {
5916 DCHECK_EQ(src.GetOffset(), 0);
5917 __ ldar(ref_reg, src);
5918 } else {
5919 __ ldr(ref_reg, src);
5920 }
5921 if (needs_null_check) {
5922 MaybeRecordImplicitNullCheck(instruction);
5923 }
5924 // Unpoison the reference explicitly if needed. MaybeUnpoisonHeapReference() uses
5925 // macro instructions disallowed in ExactAssemblyScope.
5926 if (kPoisonHeapReferences) {
5927 __ neg(ref_reg, Operand(ref_reg));
5928 }
5929 __ bind(&return_address);
5930 }
5931 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__, /* temp_loc */ LocationFrom(ip1));
5932}
5933
5934void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5935 Location ref,
5936 Register obj,
5937 uint32_t offset,
5938 Location maybe_temp,
5939 bool needs_null_check,
5940 bool use_load_acquire) {
Vladimir Marko0ecac682018-08-07 10:40:38 +01005941 DCHECK_ALIGNED(offset, sizeof(mirror::HeapReference<mirror::Object>));
5942 Register base = obj;
5943 if (use_load_acquire) {
5944 DCHECK(maybe_temp.IsRegister());
5945 base = WRegisterFrom(maybe_temp);
5946 __ Add(base, obj, offset);
5947 offset = 0u;
5948 } else if (offset >= kReferenceLoadMinFarOffset) {
5949 DCHECK(maybe_temp.IsRegister());
5950 base = WRegisterFrom(maybe_temp);
5951 static_assert(IsPowerOfTwo(kReferenceLoadMinFarOffset), "Expecting a power of 2.");
5952 __ Add(base, obj, Operand(offset & ~(kReferenceLoadMinFarOffset - 1u)));
5953 offset &= (kReferenceLoadMinFarOffset - 1u);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005954 }
Vladimir Marko248141f2018-08-10 10:40:07 +01005955 MemOperand src(base.X(), offset);
5956 GenerateFieldLoadWithBakerReadBarrier(
5957 instruction, ref, obj, src, needs_null_check, use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005958}
5959
Vladimir Marko008e09f32018-08-06 15:42:43 +01005960void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005961 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005962 uint32_t data_offset,
5963 Location index,
5964 Register temp,
5965 bool needs_null_check) {
5966 DCHECK(kEmitCompilerReadBarrier);
5967 DCHECK(kUseBakerReadBarrier);
5968
Vladimir Marko66d691d2017-04-07 17:53:39 +01005969 static_assert(
5970 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5971 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005972 size_t scale_factor = DataType::SizeShift(DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01005973
Vladimir Marko008e09f32018-08-06 15:42:43 +01005974 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
5975 // Marking Register) to decide whether we need to enter the slow
5976 // path to mark the reference. Then, in the slow path, check the
5977 // gray bit in the lock word of the reference's holder (`obj`) to
5978 // decide whether to mark `ref` or not.
5979 //
5980 // We use shared thunks for the slow path; shared within the method
5981 // for JIT, across methods for AOT. That thunk checks the holder
5982 // and jumps to the entrypoint if needed. If the holder is not gray,
5983 // it creates a fake dependency and returns to the LDR instruction.
5984 //
5985 // lr = &gray_return_address;
5986 // if (mr) { // Thread::Current()->GetIsGcMarking()
5987 // goto array_thunk<base_reg>(lr)
5988 // }
5989 // not_gray_return_address:
5990 // // Original reference load. If the offset is too large to fit
5991 // // into LDR, we use an adjusted base register here.
5992 // HeapReference<mirror::Object> reference = data[index];
5993 // gray_return_address:
Vladimir Marko66d691d2017-04-07 17:53:39 +01005994
Vladimir Marko008e09f32018-08-06 15:42:43 +01005995 DCHECK(index.IsValid());
5996 Register index_reg = RegisterFrom(index, DataType::Type::kInt32);
5997 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01005998
Vladimir Marko008e09f32018-08-06 15:42:43 +01005999 UseScratchRegisterScope temps(GetVIXLAssembler());
6000 DCHECK(temps.IsAvailable(ip0));
6001 DCHECK(temps.IsAvailable(ip1));
6002 temps.Exclude(ip0, ip1);
6003 uint32_t custom_data = EncodeBakerReadBarrierArrayData(temp.GetCode());
Vladimir Marko66d691d2017-04-07 17:53:39 +01006004
Vladimir Marko008e09f32018-08-06 15:42:43 +01006005 __ Add(temp.X(), obj.X(), Operand(data_offset));
6006 {
6007 ExactAssemblyScope guard(GetVIXLAssembler(),
6008 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6009 vixl::aarch64::Label return_address;
6010 __ adr(lr, &return_address);
6011 EmitBakerReadBarrierCbnz(custom_data);
6012 static_assert(BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6013 "Array LDR must be 1 instruction (4B) before the return address label; "
6014 " 2 instructions (8B) for heap poisoning.");
6015 __ ldr(ref_reg, MemOperand(temp.X(), index_reg.X(), LSL, scale_factor));
6016 DCHECK(!needs_null_check); // The thunk cannot handle the null check.
6017 // Unpoison the reference explicitly if needed. MaybeUnpoisonHeapReference() uses
6018 // macro instructions disallowed in ExactAssemblyScope.
6019 if (kPoisonHeapReferences) {
6020 __ neg(ref_reg, Operand(ref_reg));
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006021 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006022 __ bind(&return_address);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006023 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006024 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__, /* temp_loc */ LocationFrom(ip1));
Roland Levillain44015862016-01-22 11:47:17 +00006025}
6026
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006027void CodeGeneratorARM64::MaybeGenerateMarkingRegisterCheck(int code, Location temp_loc) {
6028 // The following condition is a compile-time one, so it does not have a run-time cost.
6029 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && kIsDebugBuild) {
6030 // The following condition is a run-time one; it is executed after the
6031 // previous compile-time test, to avoid penalizing non-debug builds.
6032 if (GetCompilerOptions().EmitRunTimeChecksInDebugMode()) {
6033 UseScratchRegisterScope temps(GetVIXLAssembler());
6034 Register temp = temp_loc.IsValid() ? WRegisterFrom(temp_loc) : temps.AcquireW();
6035 GetAssembler()->GenerateMarkingRegisterCheck(temp, code);
6036 }
6037 }
6038}
6039
Roland Levillain44015862016-01-22 11:47:17 +00006040void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
6041 Location out,
6042 Location ref,
6043 Location obj,
6044 uint32_t offset,
6045 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006046 DCHECK(kEmitCompilerReadBarrier);
6047
Roland Levillain44015862016-01-22 11:47:17 +00006048 // Insert a slow path based read barrier *after* the reference load.
6049 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006050 // If heap poisoning is enabled, the unpoisoning of the loaded
6051 // reference will be carried out by the runtime within the slow
6052 // path.
6053 //
6054 // Note that `ref` currently does not get unpoisoned (when heap
6055 // poisoning is enabled), which is alright as the `ref` argument is
6056 // not used by the artReadBarrierSlow entry point.
6057 //
6058 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006059 SlowPathCodeARM64* slow_path = new (GetScopedAllocator())
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006060 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
6061 AddSlowPath(slow_path);
6062
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006063 __ B(slow_path->GetEntryLabel());
6064 __ Bind(slow_path->GetExitLabel());
6065}
6066
Roland Levillain44015862016-01-22 11:47:17 +00006067void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6068 Location out,
6069 Location ref,
6070 Location obj,
6071 uint32_t offset,
6072 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006073 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00006074 // Baker's read barriers shall be handled by the fast path
6075 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
6076 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006077 // If heap poisoning is enabled, unpoisoning will be taken care of
6078 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00006079 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006080 } else if (kPoisonHeapReferences) {
6081 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
6082 }
6083}
6084
Roland Levillain44015862016-01-22 11:47:17 +00006085void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6086 Location out,
6087 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006088 DCHECK(kEmitCompilerReadBarrier);
6089
Roland Levillain44015862016-01-22 11:47:17 +00006090 // Insert a slow path based read barrier *after* the GC root load.
6091 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006092 // Note that GC roots are not affected by heap poisoning, so we do
6093 // not need to do anything special for this here.
6094 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006095 new (GetScopedAllocator()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006096 AddSlowPath(slow_path);
6097
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006098 __ B(slow_path->GetEntryLabel());
6099 __ Bind(slow_path->GetExitLabel());
6100}
6101
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006102void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
6103 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006104 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006105 locations->SetInAt(0, Location::RequiresRegister());
6106 locations->SetOut(Location::RequiresRegister());
6107}
6108
6109void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
6110 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00006111 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006112 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006113 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006114 __ Ldr(XRegisterFrom(locations->Out()),
6115 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006116 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006117 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006118 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006119 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
6120 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006121 __ Ldr(XRegisterFrom(locations->Out()),
6122 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006123 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006124}
6125
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006126static void PatchJitRootUse(uint8_t* code,
6127 const uint8_t* roots_data,
6128 vixl::aarch64::Literal<uint32_t>* literal,
6129 uint64_t index_in_table) {
6130 uint32_t literal_offset = literal->GetOffset();
6131 uintptr_t address =
6132 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
6133 uint8_t* data = code + literal_offset;
6134 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
6135}
6136
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006137void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
6138 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006139 const StringReference& string_reference = entry.first;
6140 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006141 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006142 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006143 }
6144 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006145 const TypeReference& type_reference = entry.first;
6146 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006147 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006148 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006149 }
6150}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006151
Alexandre Rames67555f72014-11-18 10:55:16 +00006152#undef __
6153#undef QUICK_ENTRY_POINT
6154
Vladimir Markoca1e0382018-04-11 09:58:41 +00006155#define __ assembler.GetVIXLAssembler()->
6156
6157static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler,
6158 vixl::aarch64::Register base_reg,
6159 vixl::aarch64::MemOperand& lock_word,
Vladimir Marko7a695052018-04-12 10:26:50 +01006160 vixl::aarch64::Label* slow_path,
6161 vixl::aarch64::Label* throw_npe = nullptr) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006162 // Load the lock word containing the rb_state.
6163 __ Ldr(ip0.W(), lock_word);
6164 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Roland Levillain14e5a292018-06-28 12:00:56 +01006165 static_assert(ReadBarrier::NonGrayState() == 0, "Expecting non-gray to have value 0");
Vladimir Markoca1e0382018-04-11 09:58:41 +00006166 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6167 __ Tbnz(ip0.W(), LockWord::kReadBarrierStateShift, slow_path);
6168 static_assert(
6169 BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET,
6170 "Field and array LDR offsets must be the same to reuse the same code.");
Vladimir Marko7a695052018-04-12 10:26:50 +01006171 // To throw NPE, we return to the fast path; the artificial dependence below does not matter.
6172 if (throw_npe != nullptr) {
6173 __ Bind(throw_npe);
6174 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006175 // Adjust the return address back to the LDR (1 instruction; 2 for heap poisoning).
6176 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6177 "Field LDR must be 1 instruction (4B) before the return address label; "
6178 " 2 instructions (8B) for heap poisoning.");
6179 __ Add(lr, lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6180 // Introduce a dependency on the lock_word including rb_state,
6181 // to prevent load-load reordering, and without using
6182 // a memory barrier (which would be more expensive).
6183 __ Add(base_reg, base_reg, Operand(ip0, LSR, 32));
6184 __ Br(lr); // And return back to the function.
6185 // Note: The fake dependency is unnecessary for the slow path.
6186}
6187
6188// Load the read barrier introspection entrypoint in register `entrypoint`.
6189static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler,
6190 vixl::aarch64::Register entrypoint) {
6191 // entrypoint = Thread::Current()->pReadBarrierMarkReg16, i.e. pReadBarrierMarkIntrospection.
6192 DCHECK_EQ(ip0.GetCode(), 16u);
6193 const int32_t entry_point_offset =
6194 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ip0.GetCode());
6195 __ Ldr(entrypoint, MemOperand(tr, entry_point_offset));
6196}
6197
6198void CodeGeneratorARM64::CompileBakerReadBarrierThunk(Arm64Assembler& assembler,
6199 uint32_t encoded_data,
6200 /*out*/ std::string* debug_name) {
6201 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
6202 switch (kind) {
Vladimir Marko0ecac682018-08-07 10:40:38 +01006203 case BakerReadBarrierKind::kField:
6204 case BakerReadBarrierKind::kAcquire: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006205 auto base_reg =
6206 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6207 CheckValidReg(base_reg.GetCode());
6208 auto holder_reg =
6209 Register::GetXRegFromCode(BakerReadBarrierSecondRegField::Decode(encoded_data));
6210 CheckValidReg(holder_reg.GetCode());
6211 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6212 temps.Exclude(ip0, ip1);
Vladimir Marko7a695052018-04-12 10:26:50 +01006213 // If base_reg differs from holder_reg, the offset was too large and we must have emitted
6214 // an explicit null check before the load. Otherwise, for implicit null checks, we need to
6215 // null-check the holder as we do not necessarily do that check before going to the thunk.
6216 vixl::aarch64::Label throw_npe_label;
6217 vixl::aarch64::Label* throw_npe = nullptr;
6218 if (GetCompilerOptions().GetImplicitNullChecks() && holder_reg.Is(base_reg)) {
6219 throw_npe = &throw_npe_label;
6220 __ Cbz(holder_reg.W(), throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006221 }
Vladimir Marko7a695052018-04-12 10:26:50 +01006222 // Check if the holder is gray and, if not, add fake dependency to the base register
6223 // and return to the LDR instruction to load the reference. Otherwise, use introspection
6224 // to load the reference and call the entrypoint that performs further checks on the
6225 // reference and marks it if needed.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006226 vixl::aarch64::Label slow_path;
6227 MemOperand lock_word(holder_reg, mirror::Object::MonitorOffset().Int32Value());
Vladimir Marko7a695052018-04-12 10:26:50 +01006228 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006229 __ Bind(&slow_path);
Vladimir Marko0ecac682018-08-07 10:40:38 +01006230 if (kind == BakerReadBarrierKind::kField) {
6231 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6232 __ Ldr(ip0.W(), ldr_address); // Load the LDR (immediate) unsigned offset.
6233 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6234 __ Ubfx(ip0.W(), ip0.W(), 10, 12); // Extract the offset.
6235 __ Ldr(ip0.W(), MemOperand(base_reg, ip0, LSL, 2)); // Load the reference.
6236 } else {
6237 DCHECK(kind == BakerReadBarrierKind::kAcquire);
6238 DCHECK(!base_reg.Is(holder_reg));
6239 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6240 __ Ldar(ip0.W(), MemOperand(base_reg));
6241 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006242 // Do not unpoison. With heap poisoning enabled, the entrypoint expects a poisoned reference.
6243 __ Br(ip1); // Jump to the entrypoint.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006244 break;
6245 }
6246 case BakerReadBarrierKind::kArray: {
6247 auto base_reg =
6248 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6249 CheckValidReg(base_reg.GetCode());
6250 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6251 BakerReadBarrierSecondRegField::Decode(encoded_data));
6252 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6253 temps.Exclude(ip0, ip1);
6254 vixl::aarch64::Label slow_path;
6255 int32_t data_offset =
6256 mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
6257 MemOperand lock_word(base_reg, mirror::Object::MonitorOffset().Int32Value() - data_offset);
6258 DCHECK_LT(lock_word.GetOffset(), 0);
6259 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path);
6260 __ Bind(&slow_path);
6261 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET);
6262 __ Ldr(ip0.W(), ldr_address); // Load the LDR (register) unsigned offset.
6263 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6264 __ Ubfx(ip0, ip0, 16, 6); // Extract the index register, plus 32 (bit 21 is set).
6265 __ Bfi(ip1, ip0, 3, 6); // Insert ip0 to the entrypoint address to create
6266 // a switch case target based on the index register.
6267 __ Mov(ip0, base_reg); // Move the base register to ip0.
6268 __ Br(ip1); // Jump to the entrypoint's array switch case.
6269 break;
6270 }
6271 case BakerReadBarrierKind::kGcRoot: {
6272 // Check if the reference needs to be marked and if so (i.e. not null, not marked yet
6273 // and it does not have a forwarding address), call the correct introspection entrypoint;
6274 // otherwise return the reference (or the extracted forwarding address).
6275 // There is no gray bit check for GC roots.
6276 auto root_reg =
6277 Register::GetWRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6278 CheckValidReg(root_reg.GetCode());
6279 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6280 BakerReadBarrierSecondRegField::Decode(encoded_data));
6281 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6282 temps.Exclude(ip0, ip1);
6283 vixl::aarch64::Label return_label, not_marked, forwarding_address;
6284 __ Cbz(root_reg, &return_label);
6285 MemOperand lock_word(root_reg.X(), mirror::Object::MonitorOffset().Int32Value());
6286 __ Ldr(ip0.W(), lock_word);
6287 __ Tbz(ip0.W(), LockWord::kMarkBitStateShift, &not_marked);
6288 __ Bind(&return_label);
6289 __ Br(lr);
6290 __ Bind(&not_marked);
6291 __ Tst(ip0.W(), Operand(ip0.W(), LSL, 1));
6292 __ B(&forwarding_address, mi);
6293 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6294 // Adjust the art_quick_read_barrier_mark_introspection address in IP1 to
6295 // art_quick_read_barrier_mark_introspection_gc_roots.
6296 __ Add(ip1, ip1, Operand(BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRYPOINT_OFFSET));
6297 __ Mov(ip0.W(), root_reg);
6298 __ Br(ip1);
6299 __ Bind(&forwarding_address);
6300 __ Lsl(root_reg, ip0.W(), LockWord::kForwardingAddressShift);
6301 __ Br(lr);
6302 break;
6303 }
6304 default:
6305 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
6306 UNREACHABLE();
6307 }
6308
Vladimir Marko966b46f2018-08-03 10:20:19 +00006309 // For JIT, the slow path is considered part of the compiled method,
6310 // so JIT should pass null as `debug_name`. Tests may not have a runtime.
6311 DCHECK(Runtime::Current() == nullptr ||
6312 !Runtime::Current()->UseJitCompilation() ||
6313 debug_name == nullptr);
6314 if (debug_name != nullptr && GetCompilerOptions().GenerateAnyDebugInfo()) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006315 std::ostringstream oss;
6316 oss << "BakerReadBarrierThunk";
6317 switch (kind) {
6318 case BakerReadBarrierKind::kField:
6319 oss << "Field_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
6320 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
6321 break;
Vladimir Marko0ecac682018-08-07 10:40:38 +01006322 case BakerReadBarrierKind::kAcquire:
6323 oss << "Acquire_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
6324 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
6325 break;
Vladimir Markoca1e0382018-04-11 09:58:41 +00006326 case BakerReadBarrierKind::kArray:
6327 oss << "Array_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
6328 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6329 BakerReadBarrierSecondRegField::Decode(encoded_data));
6330 break;
6331 case BakerReadBarrierKind::kGcRoot:
6332 oss << "GcRoot_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
6333 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6334 BakerReadBarrierSecondRegField::Decode(encoded_data));
6335 break;
6336 }
6337 *debug_name = oss.str();
6338 }
6339}
6340
6341#undef __
6342
Alexandre Rames5319def2014-10-23 10:03:10 +01006343} // namespace arm64
6344} // namespace art