blob: a43ab0b0e8760190ad253af6e7981ced2e06eebd [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Vladimir Markof4f2daa2017-03-20 18:26:59 +000019#include "arch/arm64/asm_support_arm64.h"
Serban Constantinescu579885a2015-02-22 20:51:33 +000020#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070021#include "art_method.h"
Andreas Gampe5678db52017-06-08 14:11:18 -070022#include "base/bit_utils.h"
23#include "base/bit_utils_iterator.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010024#include "class_table.h"
Zheng Xuc6667102015-05-15 16:08:45 +080025#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000026#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010027#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080028#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010029#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010030#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070031#include "heap_poisoning.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080032#include "intrinsics.h"
33#include "intrinsics_arm64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010034#include "linker/linker_patch.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070035#include "lock_word.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010036#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070037#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000038#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010039#include "thread.h"
40#include "utils/arm64/assembler_arm64.h"
41#include "utils/assembler.h"
42#include "utils/stack_checks.h"
43
Scott Wakeling97c72b72016-06-24 16:19:36 +010044using namespace vixl::aarch64; // NOLINT(build/namespaces)
Artem Serov914d7a82017-02-07 14:33:49 +000045using vixl::ExactAssemblyScope;
46using vixl::CodeBufferCheckScope;
47using vixl::EmissionCheckScope;
Alexandre Rames5319def2014-10-23 10:03:10 +010048
49#ifdef __
50#error "ARM64 Codegen VIXL macro-assembler macro already defined."
51#endif
52
Alexandre Rames5319def2014-10-23 10:03:10 +010053namespace art {
54
Roland Levillain22ccc3a2015-11-24 13:10:05 +000055template<class MirrorType>
56class GcRoot;
57
Alexandre Rames5319def2014-10-23 10:03:10 +010058namespace arm64 {
59
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::ARM64EncodableConstantOrRegister;
61using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080062using helpers::CPURegisterFrom;
63using helpers::DRegisterFrom;
64using helpers::FPRegisterFrom;
65using helpers::HeapOperand;
66using helpers::HeapOperandFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010067using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080068using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080069using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010070using helpers::InputRegisterAt;
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +010071using helpers::Int64FromLocation;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010072using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080073using helpers::LocationFrom;
74using helpers::OperandFromMemOperand;
75using helpers::OutputCPURegister;
76using helpers::OutputFPRegister;
77using helpers::OutputRegister;
Artem Serovd4bccf12017-04-03 18:47:32 +010078using helpers::QRegisterFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080079using helpers::RegisterFrom;
80using helpers::StackOperandFrom;
81using helpers::VIXLRegCodeFromART;
82using helpers::WRegisterFrom;
83using helpers::XRegisterFrom;
84
Vladimir Markof3e0ee22015-12-17 15:23:13 +000085// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080086// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
87// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000088static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010089
Vladimir Markof4f2daa2017-03-20 18:26:59 +000090// Reference load (except object array loads) is using LDR Wt, [Xn, #offset] which can handle
91// offset < 16KiB. For offsets >= 16KiB, the load shall be emitted as two or more instructions.
Vladimir Marko008e09f32018-08-06 15:42:43 +010092// For the Baker read barrier implementation using link-time generated thunks we need to split
Vladimir Markof4f2daa2017-03-20 18:26:59 +000093// the offset explicitly.
94constexpr uint32_t kReferenceLoadMinFarOffset = 16 * KB;
95
Alexandre Rames5319def2014-10-23 10:03:10 +010096inline Condition ARM64Condition(IfCondition cond) {
97 switch (cond) {
98 case kCondEQ: return eq;
99 case kCondNE: return ne;
100 case kCondLT: return lt;
101 case kCondLE: return le;
102 case kCondGT: return gt;
103 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -0700104 case kCondB: return lo;
105 case kCondBE: return ls;
106 case kCondA: return hi;
107 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +0100108 }
Roland Levillain7f63c522015-07-13 15:54:55 +0000109 LOG(FATAL) << "Unreachable";
110 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +0100111}
112
Vladimir Markod6e069b2016-01-18 11:11:01 +0000113inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
114 // The ARM64 condition codes can express all the necessary branches, see the
115 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
116 // There is no dex instruction or HIR that would need the missing conditions
117 // "equal or unordered" or "not equal".
118 switch (cond) {
119 case kCondEQ: return eq;
120 case kCondNE: return ne /* unordered */;
121 case kCondLT: return gt_bias ? cc : lt /* unordered */;
122 case kCondLE: return gt_bias ? ls : le /* unordered */;
123 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
124 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
125 default:
126 LOG(FATAL) << "UNREACHABLE";
127 UNREACHABLE();
128 }
129}
130
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100131Location ARM64ReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000132 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
133 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
134 // but we use the exact registers for clarity.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100135 if (return_type == DataType::Type::kFloat32) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000136 return LocationFrom(s0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100137 } else if (return_type == DataType::Type::kFloat64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000138 return LocationFrom(d0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100139 } else if (return_type == DataType::Type::kInt64) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000140 return LocationFrom(x0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100141 } else if (return_type == DataType::Type::kVoid) {
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100142 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000143 } else {
144 return LocationFrom(w0);
145 }
146}
147
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100148Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000149 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100150}
151
Vladimir Marko3232dbb2018-07-25 15:42:46 +0100152static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
153 InvokeRuntimeCallingConvention calling_convention;
154 RegisterSet caller_saves = RegisterSet::Empty();
155 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
156 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
157 RegisterFrom(calling_convention.GetReturnLocation(DataType::Type::kReference),
158 DataType::Type::kReference).GetCode());
159 return caller_saves;
160}
161
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100162// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
163#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700164#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100165
Zheng Xuda403092015-04-24 17:35:39 +0800166// Calculate memory accessing operand for save/restore live registers.
167static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100168 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800169 int64_t spill_offset,
170 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100171 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
172 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
173 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800174 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100175 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800176 codegen->GetNumberOfFloatingPointRegisters()));
177
Vladimir Marko804b03f2016-09-14 16:26:36 +0100178 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
Artem Serov7957d952017-04-04 15:44:09 +0100179 unsigned v_reg_size = codegen->GetGraph()->HasSIMD() ? kQRegSize : kDRegSize;
180 CPURegList fp_list = CPURegList(CPURegister::kVRegister, v_reg_size, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800181
182 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
183 UseScratchRegisterScope temps(masm);
184
185 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100186 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
187 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800188 int64_t reg_size = kXRegSizeInBytes;
189 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
190 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100191 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800192 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
193 // If the offset does not fit in the instruction's immediate field, use an alternate register
194 // to compute the base address(float point registers spill base address).
195 Register new_base = temps.AcquireSameSizeAs(base);
196 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
197 base = new_base;
198 spill_offset = -core_spill_size;
199 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
200 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
201 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
202 }
203
204 if (is_save) {
205 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
206 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
207 } else {
208 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
209 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
210 }
211}
212
213void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800214 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100215 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
216 for (uint32_t i : LowToHighBits(core_spills)) {
217 // If the register holds an object, update the stack mask.
218 if (locations->RegisterContainsObject(i)) {
219 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800220 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100221 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
222 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
223 saved_core_stack_offsets_[i] = stack_offset;
224 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800225 }
226
Vladimir Marko804b03f2016-09-14 16:26:36 +0100227 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
228 for (uint32_t i : LowToHighBits(fp_spills)) {
229 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
230 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
231 saved_fpu_stack_offsets_[i] = stack_offset;
232 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800233 }
234
Vladimir Marko804b03f2016-09-14 16:26:36 +0100235 SaveRestoreLiveRegistersHelper(codegen,
236 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800237 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
238}
239
240void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100241 SaveRestoreLiveRegistersHelper(codegen,
242 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800243 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
244}
245
Alexandre Rames5319def2014-10-23 10:03:10 +0100246class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
247 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000248 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100249
Alexandre Rames67555f72014-11-18 10:55:16 +0000250 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100251 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000252 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100253
Alexandre Rames5319def2014-10-23 10:03:10 +0100254 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000255 if (instruction_->CanThrowIntoCatchBlock()) {
256 // Live registers will be restored in the catch block if caught.
257 SaveLiveRegisters(codegen, instruction_->GetLocations());
258 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000259 // We're moving two locations to locations that could overlap, so we need a parallel
260 // move resolver.
261 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100262 codegen->EmitParallelMoves(locations->InAt(0),
263 LocationFrom(calling_convention.GetRegisterAt(0)),
264 DataType::Type::kInt32,
265 locations->InAt(1),
266 LocationFrom(calling_convention.GetRegisterAt(1)),
267 DataType::Type::kInt32);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000268 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
269 ? kQuickThrowStringBounds
270 : kQuickThrowArrayBounds;
271 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100272 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800273 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100274 }
275
Alexandre Rames8158f282015-08-07 10:26:17 +0100276 bool IsFatal() const OVERRIDE { return true; }
277
Alexandre Rames9931f312015-06-19 14:47:01 +0100278 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
279
Alexandre Rames5319def2014-10-23 10:03:10 +0100280 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100281 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
282};
283
Alexandre Rames67555f72014-11-18 10:55:16 +0000284class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
285 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000286 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000287
288 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
289 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
290 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000291 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800292 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000293 }
294
Alexandre Rames8158f282015-08-07 10:26:17 +0100295 bool IsFatal() const OVERRIDE { return true; }
296
Alexandre Rames9931f312015-06-19 14:47:01 +0100297 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
298
Alexandre Rames67555f72014-11-18 10:55:16 +0000299 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000300 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
301};
302
303class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
304 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100305 LoadClassSlowPathARM64(HLoadClass* cls, HInstruction* at)
306 : SlowPathCodeARM64(at), cls_(cls) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000307 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100308 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Alexandre Rames67555f72014-11-18 10:55:16 +0000309 }
310
311 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000312 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000313 Location out = locations->Out();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100314 const uint32_t dex_pc = instruction_->GetDexPc();
315 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
316 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
Alexandre Rames67555f72014-11-18 10:55:16 +0000317
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100318 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames67555f72014-11-18 10:55:16 +0000319 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000320 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000321
Vladimir Markof3c52b42017-11-17 17:32:12 +0000322 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100323 if (must_resolve_type) {
324 DCHECK(IsSameDexFile(cls_->GetDexFile(), arm64_codegen->GetGraph()->GetDexFile()));
325 dex::TypeIndex type_index = cls_->GetTypeIndex();
326 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Vladimir Marko9d479252018-07-24 11:35:20 +0100327 arm64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
328 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100329 // If we also must_do_clinit, the resolved type is now in the correct register.
330 } else {
331 DCHECK(must_do_clinit);
332 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
333 arm64_codegen->MoveLocation(LocationFrom(calling_convention.GetRegisterAt(0)),
334 source,
335 cls_->GetType());
336 }
337 if (must_do_clinit) {
338 arm64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
339 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800340 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000341
342 // Move the class to the desired location.
Alexandre Rames67555f72014-11-18 10:55:16 +0000343 if (out.IsValid()) {
344 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100345 DataType::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000346 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000347 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000348 RestoreLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000349 __ B(GetExitLabel());
350 }
351
Alexandre Rames9931f312015-06-19 14:47:01 +0100352 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
353
Alexandre Rames67555f72014-11-18 10:55:16 +0000354 private:
355 // The class this slow path will load.
356 HLoadClass* const cls_;
357
Alexandre Rames67555f72014-11-18 10:55:16 +0000358 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
359};
360
Vladimir Markoaad75c62016-10-03 08:46:48 +0000361class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
362 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000363 explicit LoadStringSlowPathARM64(HLoadString* instruction)
364 : SlowPathCodeARM64(instruction) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000365
366 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
367 LocationSummary* locations = instruction_->GetLocations();
368 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
369 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
370
371 __ Bind(GetEntryLabel());
372 SaveLiveRegisters(codegen, locations);
373
Vladimir Markof3c52b42017-11-17 17:32:12 +0000374 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000375 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
376 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000377 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
378 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100379 DataType::Type type = instruction_->GetType();
Vladimir Markoaad75c62016-10-03 08:46:48 +0000380 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
381
382 RestoreLiveRegisters(codegen, locations);
383
Vladimir Markoaad75c62016-10-03 08:46:48 +0000384 __ B(GetExitLabel());
385 }
386
387 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
388
389 private:
390 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
391};
392
Alexandre Rames5319def2014-10-23 10:03:10 +0100393class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
394 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000395 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100396
Alexandre Rames67555f72014-11-18 10:55:16 +0000397 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
398 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100399 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000400 if (instruction_->CanThrowIntoCatchBlock()) {
401 // Live registers will be restored in the catch block if caught.
402 SaveLiveRegisters(codegen, instruction_->GetLocations());
403 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000404 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
405 instruction_,
406 instruction_->GetDexPc(),
407 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800408 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100409 }
410
Alexandre Rames8158f282015-08-07 10:26:17 +0100411 bool IsFatal() const OVERRIDE { return true; }
412
Alexandre Rames9931f312015-06-19 14:47:01 +0100413 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
414
Alexandre Rames5319def2014-10-23 10:03:10 +0100415 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100416 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
417};
418
419class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
420 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100421 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000422 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100423
Alexandre Rames67555f72014-11-18 10:55:16 +0000424 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Artem Serov7957d952017-04-04 15:44:09 +0100425 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +0000426 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100427 __ Bind(GetEntryLabel());
Artem Serov7957d952017-04-04 15:44:09 +0100428 SaveLiveRegisters(codegen, locations); // Only saves live 128-bit regs for SIMD.
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000429 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800430 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Artem Serov7957d952017-04-04 15:44:09 +0100431 RestoreLiveRegisters(codegen, locations); // Only restores live 128-bit regs for SIMD.
Alexandre Rames67555f72014-11-18 10:55:16 +0000432 if (successor_ == nullptr) {
433 __ B(GetReturnLabel());
434 } else {
435 __ B(arm64_codegen->GetLabelOf(successor_));
436 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100437 }
438
Scott Wakeling97c72b72016-06-24 16:19:36 +0100439 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100440 DCHECK(successor_ == nullptr);
441 return &return_label_;
442 }
443
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100444 HBasicBlock* GetSuccessor() const {
445 return successor_;
446 }
447
Alexandre Rames9931f312015-06-19 14:47:01 +0100448 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
449
Alexandre Rames5319def2014-10-23 10:03:10 +0100450 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100451 // If not null, the block to branch to after the suspend check.
452 HBasicBlock* const successor_;
453
454 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100455 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100456
457 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
458};
459
Alexandre Rames67555f72014-11-18 10:55:16 +0000460class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
461 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000462 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000463 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000464
465 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000466 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800467
Alexandre Rames3e69f162014-12-10 10:36:50 +0000468 DCHECK(instruction_->IsCheckCast()
469 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
470 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100471 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000472
Alexandre Rames67555f72014-11-18 10:55:16 +0000473 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000474
Vladimir Marko87584542017-12-12 17:47:52 +0000475 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000476 SaveLiveRegisters(codegen, locations);
477 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000478
479 // We're moving two locations to locations that could overlap, so we need a parallel
480 // move resolver.
481 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800482 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800483 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100484 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800485 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800486 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100487 DataType::Type::kReference);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000488 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000489 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800490 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100491 DataType::Type ret_type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000492 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
493 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
494 } else {
495 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800496 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
497 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000498 }
499
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000500 if (!is_fatal_) {
501 RestoreLiveRegisters(codegen, locations);
502 __ B(GetExitLabel());
503 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000504 }
505
Alexandre Rames9931f312015-06-19 14:47:01 +0100506 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100507 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100508
Alexandre Rames67555f72014-11-18 10:55:16 +0000509 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000510 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000511
Alexandre Rames67555f72014-11-18 10:55:16 +0000512 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
513};
514
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700515class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
516 public:
Aart Bik42249c32016-01-07 15:33:50 -0800517 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000518 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700519
520 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800521 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700522 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100523 LocationSummary* locations = instruction_->GetLocations();
524 SaveLiveRegisters(codegen, locations);
525 InvokeRuntimeCallingConvention calling_convention;
526 __ Mov(calling_convention.GetRegisterAt(0),
527 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000528 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100529 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700530 }
531
Alexandre Rames9931f312015-06-19 14:47:01 +0100532 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
533
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700534 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700535 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
536};
537
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100538class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
539 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000540 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100541
542 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
543 LocationSummary* locations = instruction_->GetLocations();
544 __ Bind(GetEntryLabel());
545 SaveLiveRegisters(codegen, locations);
546
547 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100548 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100549 parallel_move.AddMove(
550 locations->InAt(0),
551 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100552 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100553 nullptr);
554 parallel_move.AddMove(
555 locations->InAt(1),
556 LocationFrom(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100557 DataType::Type::kInt32,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100558 nullptr);
559 parallel_move.AddMove(
560 locations->InAt(2),
561 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100562 DataType::Type::kReference,
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100563 nullptr);
564 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
565
566 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000567 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100568 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
569 RestoreLiveRegisters(codegen, locations);
570 __ B(GetExitLabel());
571 }
572
573 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
574
575 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100576 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
577};
578
Zheng Xu3927c8b2015-11-18 17:46:25 +0800579void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
580 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000581 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800582
583 // We are about to use the assembler to place literals directly. Make sure we have enough
584 // underlying code buffer and we have generated the jump table with right size.
Artem Serov914d7a82017-02-07 14:33:49 +0000585 EmissionCheckScope scope(codegen->GetVIXLAssembler(),
586 num_entries * sizeof(int32_t),
587 CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800588
589 __ Bind(&table_start_);
590 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
591 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100592 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800593 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100594 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800595 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
596 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
597 Literal<int32_t> literal(jump_offset);
598 __ place(&literal);
599 }
600}
601
Roland Levillain54f869e2017-03-06 13:54:11 +0000602// Abstract base class for read barrier slow paths marking a reference
603// `ref`.
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000604//
Roland Levillain54f869e2017-03-06 13:54:11 +0000605// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100606// barrier marking runtime entry point to be invoked or an empty
607// location; in the latter case, the read barrier marking runtime
608// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000609class ReadBarrierMarkSlowPathBaseARM64 : public SlowPathCodeARM64 {
610 protected:
611 ReadBarrierMarkSlowPathBaseARM64(HInstruction* instruction, Location ref, Location entrypoint)
612 : SlowPathCodeARM64(instruction), ref_(ref), entrypoint_(entrypoint) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000613 DCHECK(kEmitCompilerReadBarrier);
614 }
615
Roland Levillain54f869e2017-03-06 13:54:11 +0000616 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathBaseARM64"; }
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000617
Roland Levillain54f869e2017-03-06 13:54:11 +0000618 // Generate assembly code calling the read barrier marking runtime
619 // entry point (ReadBarrierMarkRegX).
620 void GenerateReadBarrierMarkRuntimeCall(CodeGenerator* codegen) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000621 // No need to save live registers; it's taken care of by the
622 // entrypoint. Also, there is no need to update the stack mask,
623 // as this runtime call will not trigger a garbage collection.
624 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
625 DCHECK_NE(ref_.reg(), LR);
626 DCHECK_NE(ref_.reg(), WSP);
627 DCHECK_NE(ref_.reg(), WZR);
628 // IP0 is used internally by the ReadBarrierMarkRegX entry point
629 // as a temporary, it cannot be the entry point's input/output.
630 DCHECK_NE(ref_.reg(), IP0);
631 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
632 // "Compact" slow path, saving two moves.
633 //
634 // Instead of using the standard runtime calling convention (input
635 // and output in W0):
636 //
637 // W0 <- ref
638 // W0 <- ReadBarrierMark(W0)
639 // ref <- W0
640 //
641 // we just use rX (the register containing `ref`) as input and output
642 // of a dedicated entrypoint:
643 //
644 // rX <- ReadBarrierMarkRegX(rX)
645 //
646 if (entrypoint_.IsValid()) {
647 arm64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
648 __ Blr(XRegisterFrom(entrypoint_));
649 } else {
650 // Entrypoint is not already loaded, load from the thread.
651 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100652 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000653 // This runtime call does not require a stack map.
654 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
655 }
656 }
657
658 // The location (register) of the marked object reference.
659 const Location ref_;
660
661 // The location of the entrypoint if it is already loaded.
662 const Location entrypoint_;
663
Roland Levillain54f869e2017-03-06 13:54:11 +0000664 private:
665 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathBaseARM64);
666};
667
Roland Levillain54f869e2017-03-06 13:54:11 +0000668// Slow path loading `obj`'s lock word, loading a reference from
669// object `*(obj + offset + (index << scale_factor))` into `ref`, and
670// marking `ref` if `obj` is gray according to the lock word (Baker
Roland Levillain54f869e2017-03-06 13:54:11 +0000671// read barrier). If needed, this slow path also atomically updates
672// the field `obj.field` in the object `obj` holding this reference
Vladimir Marko248141f2018-08-10 10:40:07 +0100673// after marking.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100674//
675// This means that after the execution of this slow path, both `ref`
676// and `obj.field` will be up-to-date; i.e., after the flip, both will
677// hold the same to-space reference (unless another thread installed
678// another object reference (different from `ref`) in `obj.field`).
Roland Levillainba650a42017-03-06 13:52:32 +0000679//
Roland Levillain54f869e2017-03-06 13:54:11 +0000680// Argument `entrypoint` must be a register location holding the read
Roland Levillain97c46462017-05-11 14:04:03 +0100681// barrier marking runtime entry point to be invoked or an empty
682// location; in the latter case, the read barrier marking runtime
683// entry point will be loaded by the slow path code itself.
Roland Levillain54f869e2017-03-06 13:54:11 +0000684class LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64
685 : public ReadBarrierMarkSlowPathBaseARM64 {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100686 public:
Roland Levillain97c46462017-05-11 14:04:03 +0100687 LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64(
688 HInstruction* instruction,
689 Location ref,
690 Register obj,
691 uint32_t offset,
692 Location index,
693 size_t scale_factor,
694 bool needs_null_check,
695 bool use_load_acquire,
696 Register temp,
697 Location entrypoint = Location::NoLocation())
Roland Levillain54f869e2017-03-06 13:54:11 +0000698 : ReadBarrierMarkSlowPathBaseARM64(instruction, ref, entrypoint),
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100699 obj_(obj),
Roland Levillain54f869e2017-03-06 13:54:11 +0000700 offset_(offset),
701 index_(index),
702 scale_factor_(scale_factor),
703 needs_null_check_(needs_null_check),
704 use_load_acquire_(use_load_acquire),
Roland Levillain35345a52017-02-27 14:32:08 +0000705 temp_(temp) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100706 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain54f869e2017-03-06 13:54:11 +0000707 DCHECK(kUseBakerReadBarrier);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100708 }
709
710 const char* GetDescription() const OVERRIDE {
Roland Levillain54f869e2017-03-06 13:54:11 +0000711 return "LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64";
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100712 }
713
714 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
715 LocationSummary* locations = instruction_->GetLocations();
716 Register ref_reg = WRegisterFrom(ref_);
717 DCHECK(locations->CanCall());
718 DCHECK(ref_.IsRegister()) << ref_;
719 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain54f869e2017-03-06 13:54:11 +0000720 DCHECK(obj_.IsW());
721 DCHECK_NE(ref_.reg(), LocationFrom(temp_).reg());
722
723 // This slow path is only used by the UnsafeCASObject intrinsic at the moment.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100724 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
725 << "Unexpected instruction in read barrier marking and field updating slow path: "
726 << instruction_->DebugName();
727 DCHECK(instruction_->GetLocations()->Intrinsified());
728 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
Roland Levillain54f869e2017-03-06 13:54:11 +0000729 DCHECK_EQ(offset_, 0u);
730 DCHECK_EQ(scale_factor_, 0u);
731 DCHECK_EQ(use_load_acquire_, false);
732 // The location of the offset of the marked reference field within `obj_`.
733 Location field_offset = index_;
734 DCHECK(field_offset.IsRegister()) << field_offset;
735
736 // Temporary register `temp_`, used to store the lock word, must
737 // not be IP0 nor IP1, as we may use them to emit the reference
738 // load (in the call to GenerateRawReferenceLoad below), and we
739 // need the lock word to still be in `temp_` after the reference
740 // load.
741 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
742 DCHECK_NE(LocationFrom(temp_).reg(), IP1);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100743
744 __ Bind(GetEntryLabel());
745
Vladimir Marko248141f2018-08-10 10:40:07 +0100746 // The implementation is:
Roland Levillainff487002017-03-07 16:50:01 +0000747 //
748 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
749 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
750 // HeapReference<mirror::Object> ref = *src; // Original reference load.
751 // bool is_gray = (rb_state == ReadBarrier::GrayState());
752 // if (is_gray) {
753 // old_ref = ref;
754 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
755 // compareAndSwapObject(obj, field_offset, old_ref, ref);
756 // }
757
Roland Levillain54f869e2017-03-06 13:54:11 +0000758 // /* int32_t */ monitor = obj->monitor_
759 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
760 __ Ldr(temp_, HeapOperand(obj_, monitor_offset));
761 if (needs_null_check_) {
762 codegen->MaybeRecordImplicitNullCheck(instruction_);
763 }
764 // /* LockWord */ lock_word = LockWord(monitor)
765 static_assert(sizeof(LockWord) == sizeof(int32_t),
766 "art::LockWord and int32_t have different sizes.");
767
768 // Introduce a dependency on the lock_word including rb_state,
769 // to prevent load-load reordering, and without using
770 // a memory barrier (which would be more expensive).
771 // `obj` is unchanged by this operation, but its value now depends
772 // on `temp`.
773 __ Add(obj_.X(), obj_.X(), Operand(temp_.X(), LSR, 32));
774
775 // The actual reference load.
776 // A possible implicit null check has already been handled above.
777 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
778 arm64_codegen->GenerateRawReferenceLoad(instruction_,
779 ref_,
780 obj_,
781 offset_,
782 index_,
783 scale_factor_,
784 /* needs_null_check */ false,
785 use_load_acquire_);
786
787 // Mark the object `ref` when `obj` is gray.
788 //
789 // if (rb_state == ReadBarrier::GrayState())
790 // ref = ReadBarrier::Mark(ref);
791 //
792 // Given the numeric representation, it's enough to check the low bit of the rb_state.
793 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
794 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
795 __ Tbz(temp_, LockWord::kReadBarrierStateShift, GetExitLabel());
796
797 // Save the old value of the reference before marking it.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100798 // Note that we cannot use IP to save the old reference, as IP is
799 // used internally by the ReadBarrierMarkRegX entry point, and we
800 // need the old reference after the call to that entry point.
801 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
802 __ Mov(temp_.W(), ref_reg);
803
Roland Levillain54f869e2017-03-06 13:54:11 +0000804 GenerateReadBarrierMarkRuntimeCall(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100805
806 // If the new reference is different from the old reference,
Roland Levillain54f869e2017-03-06 13:54:11 +0000807 // update the field in the holder (`*(obj_ + field_offset)`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100808 //
809 // Note that this field could also hold a different object, if
810 // another thread had concurrently changed it. In that case, the
811 // LDXR/CMP/BNE sequence of instructions in the compare-and-set
812 // (CAS) operation below would abort the CAS, leaving the field
813 // as-is.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100814 __ Cmp(temp_.W(), ref_reg);
Roland Levillain54f869e2017-03-06 13:54:11 +0000815 __ B(eq, GetExitLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100816
817 // Update the the holder's field atomically. This may fail if
818 // mutator updates before us, but it's OK. This is achieved
819 // using a strong compare-and-set (CAS) operation with relaxed
820 // memory synchronization ordering, where the expected value is
821 // the old reference and the desired value is the new reference.
822
823 MacroAssembler* masm = arm64_codegen->GetVIXLAssembler();
824 UseScratchRegisterScope temps(masm);
825
826 // Convenience aliases.
827 Register base = obj_.W();
Roland Levillain54f869e2017-03-06 13:54:11 +0000828 Register offset = XRegisterFrom(field_offset);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100829 Register expected = temp_.W();
830 Register value = ref_reg;
831 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
832 Register tmp_value = temps.AcquireW(); // Value in memory.
833
834 __ Add(tmp_ptr, base.X(), Operand(offset));
835
836 if (kPoisonHeapReferences) {
837 arm64_codegen->GetAssembler()->PoisonHeapReference(expected);
838 if (value.Is(expected)) {
839 // Do not poison `value`, as it is the same register as
840 // `expected`, which has just been poisoned.
841 } else {
842 arm64_codegen->GetAssembler()->PoisonHeapReference(value);
843 }
844 }
845
846 // do {
847 // tmp_value = [tmp_ptr] - expected;
848 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
849
Roland Levillain24a4d112016-10-26 13:10:46 +0100850 vixl::aarch64::Label loop_head, comparison_failed, exit_loop;
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100851 __ Bind(&loop_head);
852 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
853 __ Cmp(tmp_value, expected);
Roland Levillain24a4d112016-10-26 13:10:46 +0100854 __ B(&comparison_failed, ne);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100855 __ Stxr(tmp_value, value, MemOperand(tmp_ptr));
856 __ Cbnz(tmp_value, &loop_head);
Roland Levillain24a4d112016-10-26 13:10:46 +0100857 __ B(&exit_loop);
858 __ Bind(&comparison_failed);
859 __ Clrex();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100860 __ Bind(&exit_loop);
861
862 if (kPoisonHeapReferences) {
863 arm64_codegen->GetAssembler()->UnpoisonHeapReference(expected);
864 if (value.Is(expected)) {
865 // Do not unpoison `value`, as it is the same register as
866 // `expected`, which has just been unpoisoned.
867 } else {
868 arm64_codegen->GetAssembler()->UnpoisonHeapReference(value);
869 }
870 }
871
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100872 __ B(GetExitLabel());
873 }
874
875 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100876 // The register containing the object holding the marked object reference field.
877 const Register obj_;
Roland Levillain54f869e2017-03-06 13:54:11 +0000878 // The offset, index and scale factor to access the reference in `obj_`.
879 uint32_t offset_;
880 Location index_;
881 size_t scale_factor_;
882 // Is a null check required?
883 bool needs_null_check_;
884 // Should this reference load use Load-Acquire semantics?
885 bool use_load_acquire_;
886 // A temporary register used to hold the lock word of `obj_`; and
887 // also to hold the original reference value, when the reference is
888 // marked.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100889 const Register temp_;
890
Roland Levillain54f869e2017-03-06 13:54:11 +0000891 DISALLOW_COPY_AND_ASSIGN(LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100892};
893
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000894// Slow path generating a read barrier for a heap reference.
895class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
896 public:
897 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
898 Location out,
899 Location ref,
900 Location obj,
901 uint32_t offset,
902 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000903 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000904 out_(out),
905 ref_(ref),
906 obj_(obj),
907 offset_(offset),
908 index_(index) {
909 DCHECK(kEmitCompilerReadBarrier);
910 // If `obj` is equal to `out` or `ref`, it means the initial object
911 // has been overwritten by (or after) the heap object reference load
912 // to be instrumented, e.g.:
913 //
914 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000915 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000916 //
917 // In that case, we have lost the information about the original
918 // object, and the emitted read barrier cannot work properly.
919 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
920 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
921 }
922
923 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
924 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
925 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100926 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000927 DCHECK(locations->CanCall());
928 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100929 DCHECK(instruction_->IsInstanceFieldGet() ||
930 instruction_->IsStaticFieldGet() ||
931 instruction_->IsArrayGet() ||
932 instruction_->IsInstanceOf() ||
933 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700934 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain44015862016-01-22 11:47:17 +0000935 << "Unexpected instruction in read barrier for heap reference slow path: "
936 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +0000937 // The read barrier instrumentation of object ArrayGet
938 // instructions does not support the HIntermediateAddress
939 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000940 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100941 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000942
943 __ Bind(GetEntryLabel());
944
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000945 SaveLiveRegisters(codegen, locations);
946
947 // We may have to change the index's value, but as `index_` is a
948 // constant member (like other "inputs" of this slow path),
949 // introduce a copy of it, `index`.
950 Location index = index_;
951 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100952 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000953 if (instruction_->IsArrayGet()) {
954 // Compute the actual memory offset and store it in `index`.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100955 Register index_reg = RegisterFrom(index_, DataType::Type::kInt32);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000956 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
957 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
958 // We are about to change the value of `index_reg` (see the
959 // calls to vixl::MacroAssembler::Lsl and
960 // vixl::MacroAssembler::Mov below), but it has
961 // not been saved by the previous call to
962 // art::SlowPathCode::SaveLiveRegisters, as it is a
963 // callee-save register --
964 // art::SlowPathCode::SaveLiveRegisters does not consider
965 // callee-save registers, as it has been designed with the
966 // assumption that callee-save registers are supposed to be
967 // handled by the called function. So, as a callee-save
968 // register, `index_reg` _would_ eventually be saved onto
969 // the stack, but it would be too late: we would have
970 // changed its value earlier. Therefore, we manually save
971 // it here into another freely available register,
972 // `free_reg`, chosen of course among the caller-save
973 // registers (as a callee-save `free_reg` register would
974 // exhibit the same problem).
975 //
976 // Note we could have requested a temporary register from
977 // the register allocator instead; but we prefer not to, as
978 // this is a slow path, and we know we can find a
979 // caller-save register that is available.
980 Register free_reg = FindAvailableCallerSaveRegister(codegen);
981 __ Mov(free_reg.W(), index_reg);
982 index_reg = free_reg;
983 index = LocationFrom(index_reg);
984 } else {
985 // The initial register stored in `index_` has already been
986 // saved in the call to art::SlowPathCode::SaveLiveRegisters
987 // (as it is not a callee-save register), so we can freely
988 // use it.
989 }
990 // Shifting the index value contained in `index_reg` by the scale
991 // factor (2) cannot overflow in practice, as the runtime is
992 // unable to allocate object arrays with a size larger than
993 // 2^26 - 1 (that is, 2^28 - 4 bytes).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100994 __ Lsl(index_reg, index_reg, DataType::SizeShift(type));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000995 static_assert(
996 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
997 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
998 __ Add(index_reg, index_reg, Operand(offset_));
999 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01001000 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
1001 // intrinsics, `index_` is not shifted by a scale factor of 2
1002 // (as in the case of ArrayGet), as it is actually an offset
1003 // to an object field within an object.
1004 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001005 DCHECK(instruction_->GetLocations()->Intrinsified());
1006 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
1007 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
1008 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001009 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +01001010 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001011 }
1012 }
1013
1014 // We're moving two or three locations to locations that could
1015 // overlap, so we need a parallel move resolver.
1016 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +01001017 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001018 parallel_move.AddMove(ref_,
1019 LocationFrom(calling_convention.GetRegisterAt(0)),
1020 type,
1021 nullptr);
1022 parallel_move.AddMove(obj_,
1023 LocationFrom(calling_convention.GetRegisterAt(1)),
1024 type,
1025 nullptr);
1026 if (index.IsValid()) {
1027 parallel_move.AddMove(index,
1028 LocationFrom(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001029 DataType::Type::kInt32,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001030 nullptr);
1031 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1032 } else {
1033 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1034 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
1035 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001036 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001037 instruction_,
1038 instruction_->GetDexPc(),
1039 this);
1040 CheckEntrypointTypes<
1041 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
1042 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1043
1044 RestoreLiveRegisters(codegen, locations);
1045
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001046 __ B(GetExitLabel());
1047 }
1048
1049 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
1050
1051 private:
1052 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001053 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
1054 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001055 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1056 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
1057 return Register(VIXLRegCodeFromART(i), kXRegSize);
1058 }
1059 }
1060 // We shall never fail to find a free caller-save register, as
1061 // there are more than two core caller-save registers on ARM64
1062 // (meaning it is possible to find one which is different from
1063 // `ref` and `obj`).
1064 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1065 LOG(FATAL) << "Could not find a free register";
1066 UNREACHABLE();
1067 }
1068
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001069 const Location out_;
1070 const Location ref_;
1071 const Location obj_;
1072 const uint32_t offset_;
1073 // An additional location containing an index to an array.
1074 // Only used for HArrayGet and the UnsafeGetObject &
1075 // UnsafeGetObjectVolatile intrinsics.
1076 const Location index_;
1077
1078 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
1079};
1080
1081// Slow path generating a read barrier for a GC root.
1082class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
1083 public:
1084 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +00001085 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +00001086 DCHECK(kEmitCompilerReadBarrier);
1087 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001088
1089 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1090 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001091 DataType::Type type = DataType::Type::kReference;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001092 DCHECK(locations->CanCall());
1093 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +00001094 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1095 << "Unexpected instruction in read barrier for GC root slow path: "
1096 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001097
1098 __ Bind(GetEntryLabel());
1099 SaveLiveRegisters(codegen, locations);
1100
1101 InvokeRuntimeCallingConvention calling_convention;
1102 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1103 // The argument of the ReadBarrierForRootSlow is not a managed
1104 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
1105 // thus we need a 64-bit move here, and we cannot use
1106 //
1107 // arm64_codegen->MoveLocation(
1108 // LocationFrom(calling_convention.GetRegisterAt(0)),
1109 // root_,
1110 // type);
1111 //
1112 // which would emit a 32-bit move, as `type` is a (32-bit wide)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001113 // reference type (`DataType::Type::kReference`).
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001114 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001115 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001116 instruction_,
1117 instruction_->GetDexPc(),
1118 this);
1119 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1120 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1121
1122 RestoreLiveRegisters(codegen, locations);
1123 __ B(GetExitLabel());
1124 }
1125
1126 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
1127
1128 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001129 const Location out_;
1130 const Location root_;
1131
1132 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
1133};
1134
Alexandre Rames5319def2014-10-23 10:03:10 +01001135#undef __
1136
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001137Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(DataType::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001138 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001139 if (type == DataType::Type::kVoid) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001140 LOG(FATAL) << "Unreachable type " << type;
1141 }
1142
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001143 if (DataType::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001144 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
1145 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001146 } else if (!DataType::IsFloatingPointType(type) &&
Alexandre Rames542361f2015-01-29 16:57:31 +00001147 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001148 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
1149 } else {
1150 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001151 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
1152 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +01001153 }
1154
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001155 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001156 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +01001157 return next_location;
1158}
1159
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001160Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +01001161 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001162}
1163
Serban Constantinescu579885a2015-02-22 20:51:33 +00001164CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001165 const CompilerOptions& compiler_options,
1166 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +01001167 : CodeGenerator(graph,
1168 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001169 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +00001170 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001171 callee_saved_core_registers.GetList(),
1172 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001173 compiler_options,
1174 stats),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001175 block_labels_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1176 jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +01001177 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +00001178 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001179 move_resolver_(graph->GetAllocator(), this),
1180 assembler_(graph->GetAllocator()),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001181 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001182 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +01001183 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001184 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001185 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001186 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001187 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001188 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001189 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001190 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6fd16062018-06-26 11:02:04 +01001191 boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001192 baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001193 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +01001194 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001195 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Marko966b46f2018-08-03 10:20:19 +00001196 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
1197 jit_baker_read_barrier_slow_paths_(std::less<uint32_t>(),
1198 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001199 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001200 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001201}
Alexandre Rames5319def2014-10-23 10:03:10 +01001202
Alexandre Rames67555f72014-11-18 10:55:16 +00001203#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +01001204
Zheng Xu3927c8b2015-11-18 17:46:25 +08001205void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001206 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001207 jump_table->EmitTable(this);
1208 }
1209}
1210
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001211void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001212 EmitJumpTables();
Vladimir Marko966b46f2018-08-03 10:20:19 +00001213
1214 // Emit JIT baker read barrier slow paths.
1215 DCHECK(Runtime::Current()->UseJitCompilation() || jit_baker_read_barrier_slow_paths_.empty());
1216 for (auto& entry : jit_baker_read_barrier_slow_paths_) {
1217 uint32_t encoded_data = entry.first;
1218 vixl::aarch64::Label* slow_path_entry = &entry.second.label;
1219 __ Bind(slow_path_entry);
1220 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name */ nullptr);
1221 }
1222
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001223 // Ensure we emit the literal pool.
1224 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +00001225
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001226 CodeGenerator::Finalize(allocator);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001227
1228 // Verify Baker read barrier linker patches.
1229 if (kIsDebugBuild) {
1230 ArrayRef<const uint8_t> code = allocator->GetMemory();
1231 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
1232 DCHECK(info.label.IsBound());
1233 uint32_t literal_offset = info.label.GetLocation();
1234 DCHECK_ALIGNED(literal_offset, 4u);
1235
1236 auto GetInsn = [&code](uint32_t offset) {
1237 DCHECK_ALIGNED(offset, 4u);
1238 return
1239 (static_cast<uint32_t>(code[offset + 0]) << 0) +
1240 (static_cast<uint32_t>(code[offset + 1]) << 8) +
1241 (static_cast<uint32_t>(code[offset + 2]) << 16)+
1242 (static_cast<uint32_t>(code[offset + 3]) << 24);
1243 };
1244
1245 const uint32_t encoded_data = info.custom_data;
1246 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
1247 // Check that the next instruction matches the expected LDR.
1248 switch (kind) {
Vladimir Marko0ecac682018-08-07 10:40:38 +01001249 case BakerReadBarrierKind::kField:
1250 case BakerReadBarrierKind::kAcquire: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00001251 DCHECK_GE(code.size() - literal_offset, 8u);
1252 uint32_t next_insn = GetInsn(literal_offset + 4u);
Vladimir Markoca1e0382018-04-11 09:58:41 +00001253 CheckValidReg(next_insn & 0x1fu); // Check destination register.
1254 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
Vladimir Marko0ecac682018-08-07 10:40:38 +01001255 if (kind == BakerReadBarrierKind::kField) {
1256 // LDR (immediate) with correct base_reg.
1257 CHECK_EQ(next_insn & 0xffc003e0u, 0xb9400000u | (base_reg << 5));
1258 } else {
1259 DCHECK(kind == BakerReadBarrierKind::kAcquire);
1260 // LDAR with correct base_reg.
1261 CHECK_EQ(next_insn & 0xffffffe0u, 0x88dffc00u | (base_reg << 5));
1262 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00001263 break;
1264 }
1265 case BakerReadBarrierKind::kArray: {
1266 DCHECK_GE(code.size() - literal_offset, 8u);
1267 uint32_t next_insn = GetInsn(literal_offset + 4u);
1268 // LDR (register) with the correct base_reg, size=10 (32-bit), option=011 (extend = LSL),
1269 // and S=1 (shift amount = 2 for 32-bit version), i.e. LDR Wt, [Xn, Xm, LSL #2].
1270 CheckValidReg(next_insn & 0x1fu); // Check destination register.
1271 const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1272 CHECK_EQ(next_insn & 0xffe0ffe0u, 0xb8607800u | (base_reg << 5));
1273 CheckValidReg((next_insn >> 16) & 0x1f); // Check index register
1274 break;
1275 }
1276 case BakerReadBarrierKind::kGcRoot: {
1277 DCHECK_GE(literal_offset, 4u);
1278 uint32_t prev_insn = GetInsn(literal_offset - 4u);
1279 // LDR (immediate) with correct root_reg.
1280 const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
1281 CHECK_EQ(prev_insn & 0xffc0001fu, 0xb9400000u | root_reg);
1282 break;
1283 }
1284 default:
1285 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
1286 UNREACHABLE();
1287 }
1288 }
1289 }
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001290}
1291
Zheng Xuad4450e2015-04-17 18:48:56 +08001292void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1293 // Note: There are 6 kinds of moves:
1294 // 1. constant -> GPR/FPR (non-cycle)
1295 // 2. constant -> stack (non-cycle)
1296 // 3. GPR/FPR -> GPR/FPR
1297 // 4. GPR/FPR -> stack
1298 // 5. stack -> GPR/FPR
1299 // 6. stack -> stack (non-cycle)
1300 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1301 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1302 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1303 // dependency.
1304 vixl_temps_.Open(GetVIXLAssembler());
1305}
1306
1307void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1308 vixl_temps_.Close();
1309}
1310
1311Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
Artem Serovd4bccf12017-04-03 18:47:32 +01001312 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister
1313 || kind == Location::kStackSlot || kind == Location::kDoubleStackSlot
1314 || kind == Location::kSIMDStackSlot);
1315 kind = (kind == Location::kFpuRegister || kind == Location::kSIMDStackSlot)
1316 ? Location::kFpuRegister
1317 : Location::kRegister;
Zheng Xuad4450e2015-04-17 18:48:56 +08001318 Location scratch = GetScratchLocation(kind);
1319 if (!scratch.Equals(Location::NoLocation())) {
1320 return scratch;
1321 }
1322 // Allocate from VIXL temp registers.
1323 if (kind == Location::kRegister) {
1324 scratch = LocationFrom(vixl_temps_.AcquireX());
1325 } else {
Roland Levillain952b2352017-05-03 19:49:14 +01001326 DCHECK_EQ(kind, Location::kFpuRegister);
Artem Serovd4bccf12017-04-03 18:47:32 +01001327 scratch = LocationFrom(codegen_->GetGraph()->HasSIMD()
1328 ? vixl_temps_.AcquireVRegisterOfSize(kQRegSize)
1329 : vixl_temps_.AcquireD());
Zheng Xuad4450e2015-04-17 18:48:56 +08001330 }
1331 AddScratchLocation(scratch);
1332 return scratch;
1333}
1334
1335void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1336 if (loc.IsRegister()) {
1337 vixl_temps_.Release(XRegisterFrom(loc));
1338 } else {
1339 DCHECK(loc.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001340 vixl_temps_.Release(codegen_->GetGraph()->HasSIMD() ? QRegisterFrom(loc) : DRegisterFrom(loc));
Zheng Xuad4450e2015-04-17 18:48:56 +08001341 }
1342 RemoveScratchLocation(loc);
1343}
1344
Alexandre Rames3e69f162014-12-10 10:36:50 +00001345void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001346 MoveOperands* move = moves_[index];
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001347 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001348}
1349
Alexandre Rames5319def2014-10-23 10:03:10 +01001350void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001351 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001352 __ Bind(&frame_entry_label_);
1353
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001354 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1355 UseScratchRegisterScope temps(masm);
1356 Register temp = temps.AcquireX();
1357 __ Ldrh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1358 __ Add(temp, temp, 1);
1359 __ Strh(temp, MemOperand(kArtMethodRegister, ArtMethod::HotnessCountOffset().Int32Value()));
1360 }
1361
Vladimir Marko33bff252017-11-01 14:35:42 +00001362 bool do_overflow_check =
1363 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm64) || !IsLeafMethod();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001364 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001365 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001366 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001367 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Vladimir Marko33bff252017-11-01 14:35:42 +00001368 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kArm64)));
Artem Serov914d7a82017-02-07 14:33:49 +00001369 {
1370 // Ensure that between load and RecordPcInfo there are no pools emitted.
1371 ExactAssemblyScope eas(GetVIXLAssembler(),
1372 kInstructionSize,
1373 CodeBufferCheckScope::kExactSize);
1374 __ ldr(wzr, MemOperand(temp, 0));
1375 RecordPcInfo(nullptr, 0);
1376 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001377 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001378
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001379 if (!HasEmptyFrame()) {
1380 int frame_size = GetFrameSize();
1381 // Stack layout:
1382 // sp[frame_size - 8] : lr.
1383 // ... : other preserved core registers.
1384 // ... : other preserved fp registers.
1385 // ... : reserved frame space.
1386 // sp[0] : current method.
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001387
1388 // Save the current method if we need it. Note that we do not
1389 // do this in HCurrentMethod, as the instruction might have been removed
1390 // in the SSA graph.
1391 if (RequiresCurrentMethod()) {
1392 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001393 } else {
1394 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001395 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001396 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001397 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1398 frame_size - GetCoreSpillSize());
1399 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1400 frame_size - FrameEntrySpillSize());
Mingyao Yang063fc772016-08-02 11:02:54 -07001401
1402 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1403 // Initialize should_deoptimize flag to 0.
1404 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1405 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1406 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001407 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01001408
1409 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01001410}
1411
1412void CodeGeneratorARM64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001413 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001414 if (!HasEmptyFrame()) {
1415 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001416 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1417 frame_size - FrameEntrySpillSize());
1418 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1419 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001420 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001421 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001422 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001423 __ Ret();
1424 GetAssembler()->cfi().RestoreState();
1425 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001426}
1427
Scott Wakeling97c72b72016-06-24 16:19:36 +01001428CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001429 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001430 return CPURegList(CPURegister::kRegister, kXRegSize,
1431 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001432}
1433
Scott Wakeling97c72b72016-06-24 16:19:36 +01001434CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001435 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1436 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001437 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1438 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001439}
1440
Alexandre Rames5319def2014-10-23 10:03:10 +01001441void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1442 __ Bind(GetLabelOf(block));
1443}
1444
Calin Juravle175dc732015-08-25 15:42:32 +01001445void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1446 DCHECK(location.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001447 __ Mov(RegisterFrom(location, DataType::Type::kInt32), value);
Calin Juravle175dc732015-08-25 15:42:32 +01001448}
1449
Calin Juravlee460d1d2015-09-29 04:52:17 +01001450void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1451 if (location.IsRegister()) {
1452 locations->AddTemp(location);
1453 } else {
1454 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1455 }
1456}
1457
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001458void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001459 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001460 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001461 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001462 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001463 if (value_can_be_null) {
1464 __ Cbz(value, &done);
1465 }
Roland Levillainc73f0522018-08-14 15:16:50 +01001466 // Load the address of the card table into `card`.
Andreas Gampe542451c2016-07-26 09:02:02 -07001467 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Roland Levillainc73f0522018-08-14 15:16:50 +01001468 // Calculate the offset (in the card table) of the card corresponding to
1469 // `object`.
Alexandre Rames5319def2014-10-23 10:03:10 +01001470 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Roland Levillainc73f0522018-08-14 15:16:50 +01001471 // Write the `art::gc::accounting::CardTable::kCardDirty` value into the
1472 // `object`'s card.
1473 //
1474 // Register `card` contains the address of the card table. Note that the card
1475 // table's base is biased during its creation so that it always starts at an
1476 // address whose least-significant byte is equal to `kCardDirty` (see
1477 // art::gc::accounting::CardTable::Create). Therefore the STRB instruction
1478 // below writes the `kCardDirty` (byte) value into the `object`'s card
1479 // (located at `card + object >> kCardShift`).
1480 //
1481 // This dual use of the value in register `card` (1. to calculate the location
1482 // of the card to mark; and 2. to load the `kCardDirty` value) saves a load
1483 // (no need to explicitly load `kCardDirty` as an immediate value).
Serban Constantinescu02164b32014-11-13 14:05:07 +00001484 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001485 if (value_can_be_null) {
1486 __ Bind(&done);
1487 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001488}
1489
David Brazdil58282f42016-01-14 12:45:10 +00001490void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001491 // Blocked core registers:
1492 // lr : Runtime reserved.
1493 // tr : Runtime reserved.
Roland Levillain97c46462017-05-11 14:04:03 +01001494 // mr : Runtime reserved.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001495 // ip1 : VIXL core temp.
1496 // ip0 : VIXL core temp.
1497 //
1498 // Blocked fp registers:
1499 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001500 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1501 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001502 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001503 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001504 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001505
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001506 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001507 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001508 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001509 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001510
David Brazdil58282f42016-01-14 12:45:10 +00001511 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001512 // Stubs do not save callee-save floating point registers. If the graph
1513 // is debuggable, we need to deal with these registers differently. For
1514 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001515 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1516 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001517 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001518 }
1519 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001520}
1521
Alexandre Rames3e69f162014-12-10 10:36:50 +00001522size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1523 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1524 __ Str(reg, MemOperand(sp, stack_index));
1525 return kArm64WordSize;
1526}
1527
1528size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1529 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1530 __ Ldr(reg, MemOperand(sp, stack_index));
1531 return kArm64WordSize;
1532}
1533
1534size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1535 FPRegister reg = FPRegister(reg_id, kDRegSize);
1536 __ Str(reg, MemOperand(sp, stack_index));
1537 return kArm64WordSize;
1538}
1539
1540size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1541 FPRegister reg = FPRegister(reg_id, kDRegSize);
1542 __ Ldr(reg, MemOperand(sp, stack_index));
1543 return kArm64WordSize;
1544}
1545
Alexandre Rames5319def2014-10-23 10:03:10 +01001546void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001547 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001548}
1549
1550void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001551 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001552}
1553
Vladimir Markoa0431112018-06-25 09:32:54 +01001554const Arm64InstructionSetFeatures& CodeGeneratorARM64::GetInstructionSetFeatures() const {
1555 return *GetCompilerOptions().GetInstructionSetFeatures()->AsArm64InstructionSetFeatures();
1556}
1557
Alexandre Rames67555f72014-11-18 10:55:16 +00001558void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001559 if (constant->IsIntConstant()) {
1560 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1561 } else if (constant->IsLongConstant()) {
1562 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1563 } else if (constant->IsNullConstant()) {
1564 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001565 } else if (constant->IsFloatConstant()) {
1566 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1567 } else {
1568 DCHECK(constant->IsDoubleConstant());
1569 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1570 }
1571}
1572
Alexandre Rames3e69f162014-12-10 10:36:50 +00001573
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001574static bool CoherentConstantAndType(Location constant, DataType::Type type) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001575 DCHECK(constant.IsConstant());
1576 HConstant* cst = constant.GetConstant();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001577 return (cst->IsIntConstant() && type == DataType::Type::kInt32) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001578 // Null is mapped to a core W register, which we associate with kPrimInt.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001579 (cst->IsNullConstant() && type == DataType::Type::kInt32) ||
1580 (cst->IsLongConstant() && type == DataType::Type::kInt64) ||
1581 (cst->IsFloatConstant() && type == DataType::Type::kFloat32) ||
1582 (cst->IsDoubleConstant() && type == DataType::Type::kFloat64);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001583}
1584
Roland Levillain952b2352017-05-03 19:49:14 +01001585// Allocate a scratch register from the VIXL pool, querying first
1586// the floating-point register pool, and then the core register
1587// pool. This is essentially a reimplementation of
Roland Levillain558dea12017-01-27 19:40:44 +00001588// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
1589// using a different allocation strategy.
1590static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
1591 vixl::aarch64::UseScratchRegisterScope* temps,
1592 int size_in_bits) {
1593 return masm->GetScratchFPRegisterList()->IsEmpty()
1594 ? CPURegister(temps->AcquireRegisterOfSize(size_in_bits))
1595 : CPURegister(temps->AcquireVRegisterOfSize(size_in_bits));
1596}
1597
Calin Juravlee460d1d2015-09-29 04:52:17 +01001598void CodeGeneratorARM64::MoveLocation(Location destination,
1599 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001600 DataType::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001601 if (source.Equals(destination)) {
1602 return;
1603 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001604
1605 // A valid move can always be inferred from the destination and source
1606 // locations. When moving from and to a register, the argument type can be
1607 // used to generate 32bit instead of 64bit moves. In debug mode we also
1608 // checks the coherency of the locations and the type.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001609 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001610
1611 if (destination.IsRegister() || destination.IsFpuRegister()) {
1612 if (unspecified_type) {
1613 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1614 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001615 (src_cst != nullptr && (src_cst->IsIntConstant()
1616 || src_cst->IsFloatConstant()
1617 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001618 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001619 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexandre Rames67555f72014-11-18 10:55:16 +00001620 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001621 // If the source is a double stack slot or a 64bit constant, a 64bit
1622 // type is appropriate. Else the source is a register, and since the
1623 // type has not been specified, we chose a 64bit type to force a 64bit
1624 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001625 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexandre Rames67555f72014-11-18 10:55:16 +00001626 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001627 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001628 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1629 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001630 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001631 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1632 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1633 __ Ldr(dst, StackOperandFrom(source));
Artem Serovd4bccf12017-04-03 18:47:32 +01001634 } else if (source.IsSIMDStackSlot()) {
1635 __ Ldr(QRegisterFrom(destination), StackOperandFrom(source));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001636 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001637 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001638 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001639 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001640 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001641 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001642 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001643 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001644 DataType::Type source_type = DataType::Is64BitType(dst_type)
1645 ? DataType::Type::kInt64
1646 : DataType::Type::kInt32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001647 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1648 }
1649 } else {
1650 DCHECK(source.IsFpuRegister());
1651 if (destination.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001652 DataType::Type source_type = DataType::Is64BitType(dst_type)
1653 ? DataType::Type::kFloat64
1654 : DataType::Type::kFloat32;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001655 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1656 } else {
1657 DCHECK(destination.IsFpuRegister());
Artem Serovd4bccf12017-04-03 18:47:32 +01001658 if (GetGraph()->HasSIMD()) {
1659 __ Mov(QRegisterFrom(destination), QRegisterFrom(source));
1660 } else {
1661 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
1662 }
1663 }
1664 }
1665 } else if (destination.IsSIMDStackSlot()) {
1666 if (source.IsFpuRegister()) {
1667 __ Str(QRegisterFrom(source), StackOperandFrom(destination));
1668 } else {
1669 DCHECK(source.IsSIMDStackSlot());
1670 UseScratchRegisterScope temps(GetVIXLAssembler());
1671 if (GetVIXLAssembler()->GetScratchFPRegisterList()->IsEmpty()) {
1672 Register temp = temps.AcquireX();
1673 __ Ldr(temp, MemOperand(sp, source.GetStackIndex()));
1674 __ Str(temp, MemOperand(sp, destination.GetStackIndex()));
1675 __ Ldr(temp, MemOperand(sp, source.GetStackIndex() + kArm64WordSize));
1676 __ Str(temp, MemOperand(sp, destination.GetStackIndex() + kArm64WordSize));
1677 } else {
1678 FPRegister temp = temps.AcquireVRegisterOfSize(kQRegSize);
1679 __ Ldr(temp, StackOperandFrom(source));
1680 __ Str(temp, StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001681 }
1682 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001683 } else { // The destination is not a register. It must be a stack slot.
1684 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1685 if (source.IsRegister() || source.IsFpuRegister()) {
1686 if (unspecified_type) {
1687 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001688 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001689 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001690 dst_type =
1691 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001692 }
1693 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001694 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1695 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001696 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001697 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001698 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1699 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001700 UseScratchRegisterScope temps(GetVIXLAssembler());
1701 HConstant* src_cst = source.GetConstant();
1702 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001703 if (src_cst->IsZeroBitPattern()) {
Scott Wakeling79db9972017-01-19 14:08:42 +00001704 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant())
1705 ? Register(xzr)
1706 : Register(wzr);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001707 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001708 if (src_cst->IsIntConstant()) {
1709 temp = temps.AcquireW();
1710 } else if (src_cst->IsLongConstant()) {
1711 temp = temps.AcquireX();
1712 } else if (src_cst->IsFloatConstant()) {
1713 temp = temps.AcquireS();
1714 } else {
1715 DCHECK(src_cst->IsDoubleConstant());
1716 temp = temps.AcquireD();
1717 }
1718 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001719 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001720 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001721 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001722 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001723 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001724 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001725 // Use any scratch register (a core or a floating-point one)
1726 // from VIXL scratch register pools as a temporary.
1727 //
1728 // We used to only use the FP scratch register pool, but in some
1729 // rare cases the only register from this pool (D31) would
1730 // already be used (e.g. within a ParallelMove instruction, when
1731 // a move is blocked by a another move requiring a scratch FP
1732 // register, which would reserve D31). To prevent this issue, we
1733 // ask for a scratch register of any type (core or FP).
Roland Levillain558dea12017-01-27 19:40:44 +00001734 //
1735 // Also, we start by asking for a FP scratch register first, as the
Roland Levillain952b2352017-05-03 19:49:14 +01001736 // demand of scratch core registers is higher. This is why we
Roland Levillain558dea12017-01-27 19:40:44 +00001737 // use AcquireFPOrCoreCPURegisterOfSize instead of
1738 // UseScratchRegisterScope::AcquireCPURegisterOfSize, which
1739 // allocates core scratch registers first.
1740 CPURegister temp = AcquireFPOrCoreCPURegisterOfSize(
1741 GetVIXLAssembler(),
1742 &temps,
1743 (destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001744 __ Ldr(temp, StackOperandFrom(source));
1745 __ Str(temp, StackOperandFrom(destination));
1746 }
1747 }
1748}
1749
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001750void CodeGeneratorARM64::Load(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001751 CPURegister dst,
1752 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001753 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001754 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001755 case DataType::Type::kUint8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001756 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001757 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001758 case DataType::Type::kInt8:
Alexandre Rames67555f72014-11-18 10:55:16 +00001759 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001760 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001761 case DataType::Type::kUint16:
Alexandre Rames67555f72014-11-18 10:55:16 +00001762 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001763 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001764 case DataType::Type::kInt16:
1765 __ Ldrsh(Register(dst), src);
1766 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001767 case DataType::Type::kInt32:
1768 case DataType::Type::kReference:
1769 case DataType::Type::kInt64:
1770 case DataType::Type::kFloat32:
1771 case DataType::Type::kFloat64:
1772 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001773 __ Ldr(dst, src);
1774 break;
Aart Bik66c158e2018-01-31 12:55:04 -08001775 case DataType::Type::kUint32:
1776 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001777 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001778 LOG(FATAL) << "Unreachable type " << type;
1779 }
1780}
1781
Calin Juravle77520bc2015-01-12 18:45:46 +00001782void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001783 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001784 const MemOperand& src,
1785 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001786 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001787 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001788 Register temp_base = temps.AcquireX();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001789 DataType::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001790
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001791 DCHECK(!src.IsPreIndex());
1792 DCHECK(!src.IsPostIndex());
1793
1794 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001795 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Artem Serov914d7a82017-02-07 14:33:49 +00001796 {
1797 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1798 MemOperand base = MemOperand(temp_base);
1799 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001800 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001801 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001802 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00001803 {
1804 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1805 __ ldarb(Register(dst), base);
1806 if (needs_null_check) {
1807 MaybeRecordImplicitNullCheck(instruction);
1808 }
1809 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001810 if (type == DataType::Type::kInt8) {
1811 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
Artem Serov914d7a82017-02-07 14:33:49 +00001812 }
1813 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001814 case DataType::Type::kUint16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001815 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00001816 {
1817 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1818 __ ldarh(Register(dst), base);
1819 if (needs_null_check) {
1820 MaybeRecordImplicitNullCheck(instruction);
1821 }
1822 }
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001823 if (type == DataType::Type::kInt16) {
1824 __ Sbfx(Register(dst), Register(dst), 0, DataType::Size(type) * kBitsPerByte);
1825 }
Artem Serov914d7a82017-02-07 14:33:49 +00001826 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001827 case DataType::Type::kInt32:
1828 case DataType::Type::kReference:
1829 case DataType::Type::kInt64:
1830 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001831 {
1832 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1833 __ ldar(Register(dst), base);
1834 if (needs_null_check) {
1835 MaybeRecordImplicitNullCheck(instruction);
1836 }
1837 }
1838 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001839 case DataType::Type::kFloat32:
1840 case DataType::Type::kFloat64: {
Artem Serov914d7a82017-02-07 14:33:49 +00001841 DCHECK(dst.IsFPRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001842 DCHECK_EQ(dst.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001843
Artem Serov914d7a82017-02-07 14:33:49 +00001844 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1845 {
1846 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1847 __ ldar(temp, base);
1848 if (needs_null_check) {
1849 MaybeRecordImplicitNullCheck(instruction);
1850 }
1851 }
1852 __ Fmov(FPRegister(dst), temp);
1853 break;
Roland Levillain44015862016-01-22 11:47:17 +00001854 }
Aart Bik66c158e2018-01-31 12:55:04 -08001855 case DataType::Type::kUint32:
1856 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001857 case DataType::Type::kVoid:
Artem Serov914d7a82017-02-07 14:33:49 +00001858 LOG(FATAL) << "Unreachable type " << type;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001859 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001860 }
1861}
1862
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001863void CodeGeneratorARM64::Store(DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001864 CPURegister src,
1865 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001866 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001867 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001868 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001869 case DataType::Type::kInt8:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001870 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001871 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001872 case DataType::Type::kUint16:
1873 case DataType::Type::kInt16:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001874 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001875 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001876 case DataType::Type::kInt32:
1877 case DataType::Type::kReference:
1878 case DataType::Type::kInt64:
1879 case DataType::Type::kFloat32:
1880 case DataType::Type::kFloat64:
1881 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001882 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001883 break;
Aart Bik66c158e2018-01-31 12:55:04 -08001884 case DataType::Type::kUint32:
1885 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001886 case DataType::Type::kVoid:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001887 LOG(FATAL) << "Unreachable type " << type;
1888 }
1889}
1890
Artem Serov914d7a82017-02-07 14:33:49 +00001891void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001892 DataType::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001893 CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +00001894 const MemOperand& dst,
1895 bool needs_null_check) {
1896 MacroAssembler* masm = GetVIXLAssembler();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001897 UseScratchRegisterScope temps(GetVIXLAssembler());
1898 Register temp_base = temps.AcquireX();
1899
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001900 DCHECK(!dst.IsPreIndex());
1901 DCHECK(!dst.IsPostIndex());
1902
1903 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001904 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001905 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001906 MemOperand base = MemOperand(temp_base);
Artem Serov914d7a82017-02-07 14:33:49 +00001907 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001908 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001909 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01001910 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001911 case DataType::Type::kInt8:
Artem Serov914d7a82017-02-07 14:33:49 +00001912 {
1913 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1914 __ stlrb(Register(src), base);
1915 if (needs_null_check) {
1916 MaybeRecordImplicitNullCheck(instruction);
1917 }
1918 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001919 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001920 case DataType::Type::kUint16:
1921 case DataType::Type::kInt16:
Artem Serov914d7a82017-02-07 14:33:49 +00001922 {
1923 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1924 __ stlrh(Register(src), base);
1925 if (needs_null_check) {
1926 MaybeRecordImplicitNullCheck(instruction);
1927 }
1928 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001929 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001930 case DataType::Type::kInt32:
1931 case DataType::Type::kReference:
1932 case DataType::Type::kInt64:
1933 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001934 {
1935 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1936 __ stlr(Register(src), base);
1937 if (needs_null_check) {
1938 MaybeRecordImplicitNullCheck(instruction);
1939 }
1940 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001941 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001942 case DataType::Type::kFloat32:
1943 case DataType::Type::kFloat64: {
1944 DCHECK_EQ(src.Is64Bits(), DataType::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001945 Register temp_src;
1946 if (src.IsZero()) {
1947 // The zero register is used to avoid synthesizing zero constants.
1948 temp_src = Register(src);
1949 } else {
1950 DCHECK(src.IsFPRegister());
1951 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1952 __ Fmov(temp_src, FPRegister(src));
1953 }
Artem Serov914d7a82017-02-07 14:33:49 +00001954 {
1955 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1956 __ stlr(temp_src, base);
1957 if (needs_null_check) {
1958 MaybeRecordImplicitNullCheck(instruction);
1959 }
1960 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001961 break;
1962 }
Aart Bik66c158e2018-01-31 12:55:04 -08001963 case DataType::Type::kUint32:
1964 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001965 case DataType::Type::kVoid:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001966 LOG(FATAL) << "Unreachable type " << type;
1967 }
1968}
1969
Calin Juravle175dc732015-08-25 15:42:32 +01001970void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1971 HInstruction* instruction,
1972 uint32_t dex_pc,
1973 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001974 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00001975
1976 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value()));
1977 {
1978 // Ensure the pc position is recorded immediately after the `blr` instruction.
1979 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
1980 __ blr(lr);
1981 if (EntrypointRequiresStackMap(entrypoint)) {
1982 RecordPcInfo(instruction, dex_pc, slow_path);
1983 }
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00001984 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001985}
1986
Roland Levillaindec8f632016-07-22 17:10:06 +01001987void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1988 HInstruction* instruction,
1989 SlowPathCode* slow_path) {
1990 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Roland Levillaindec8f632016-07-22 17:10:06 +01001991 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1992 __ Blr(lr);
1993}
1994
Alexandre Rames67555f72014-11-18 10:55:16 +00001995void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001996 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001997 UseScratchRegisterScope temps(GetVIXLAssembler());
1998 Register temp = temps.AcquireW();
Vladimir Markodc682aa2018-01-04 18:42:57 +00001999 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
2000 const size_t status_byte_offset =
2001 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
2002 constexpr uint32_t shifted_initialized_value =
2003 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002004
Serban Constantinescu02164b32014-11-13 14:05:07 +00002005 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002006 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Vladimir Markodc682aa2018-01-04 18:42:57 +00002007 __ Add(temp, class_reg, status_byte_offset);
Igor Murashkin86083f72017-10-27 10:59:04 -07002008 __ Ldarb(temp, HeapOperand(temp));
Vladimir Markodc682aa2018-01-04 18:42:57 +00002009 __ Cmp(temp, shifted_initialized_value);
Vladimir Marko2c64a832018-01-04 11:31:56 +00002010 __ B(lo, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00002011 __ Bind(slow_path->GetExitLabel());
2012}
Alexandre Rames5319def2014-10-23 10:03:10 +01002013
Vladimir Marko175e7862018-03-27 09:03:13 +00002014void InstructionCodeGeneratorARM64::GenerateBitstringTypeCheckCompare(
2015 HTypeCheckInstruction* check, vixl::aarch64::Register temp) {
2016 uint32_t path_to_root = check->GetBitstringPathToRoot();
2017 uint32_t mask = check->GetBitstringMask();
2018 DCHECK(IsPowerOfTwo(mask + 1));
2019 size_t mask_bits = WhichPowerOf2(mask + 1);
2020
2021 if (mask_bits == 16u) {
2022 // Load only the bitstring part of the status word.
2023 __ Ldrh(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
2024 } else {
2025 // /* uint32_t */ temp = temp->status_
2026 __ Ldr(temp, HeapOperand(temp, mirror::Class::StatusOffset()));
2027 // Extract the bitstring bits.
2028 __ Ubfx(temp, temp, 0, mask_bits);
2029 }
2030 // Compare the bitstring bits to `path_to_root`.
2031 __ Cmp(temp, path_to_root);
2032}
2033
Roland Levillain44015862016-01-22 11:47:17 +00002034void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002035 BarrierType type = BarrierAll;
2036
2037 switch (kind) {
2038 case MemBarrierKind::kAnyAny:
2039 case MemBarrierKind::kAnyStore: {
2040 type = BarrierAll;
2041 break;
2042 }
2043 case MemBarrierKind::kLoadAny: {
2044 type = BarrierReads;
2045 break;
2046 }
2047 case MemBarrierKind::kStoreStore: {
2048 type = BarrierWrites;
2049 break;
2050 }
2051 default:
2052 LOG(FATAL) << "Unexpected memory barrier " << kind;
2053 }
2054 __ Dmb(InnerShareable, type);
2055}
2056
Serban Constantinescu02164b32014-11-13 14:05:07 +00002057void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
2058 HBasicBlock* successor) {
2059 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002060 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
2061 if (slow_path == nullptr) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002062 slow_path =
2063 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathARM64(instruction, successor);
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002064 instruction->SetSlowPath(slow_path);
2065 codegen_->AddSlowPath(slow_path);
2066 if (successor != nullptr) {
2067 DCHECK(successor->IsLoopHeader());
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01002068 }
2069 } else {
2070 DCHECK_EQ(slow_path->GetSuccessor(), successor);
2071 }
2072
Serban Constantinescu02164b32014-11-13 14:05:07 +00002073 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
2074 Register temp = temps.AcquireW();
2075
Andreas Gampe542451c2016-07-26 09:02:02 -07002076 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002077 if (successor == nullptr) {
2078 __ Cbnz(temp, slow_path->GetEntryLabel());
2079 __ Bind(slow_path->GetReturnLabel());
2080 } else {
2081 __ Cbz(temp, codegen_->GetLabelOf(successor));
2082 __ B(slow_path->GetEntryLabel());
2083 // slow_path will return to GetLabelOf(successor).
2084 }
2085}
2086
Alexandre Rames5319def2014-10-23 10:03:10 +01002087InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
2088 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08002089 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01002090 assembler_(codegen->GetAssembler()),
2091 codegen_(codegen) {}
2092
Alexandre Rames67555f72014-11-18 10:55:16 +00002093void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002094 DCHECK_EQ(instr->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01002095 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002096 DataType::Type type = instr->GetResultType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002097 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002098 case DataType::Type::kInt32:
2099 case DataType::Type::kInt64:
Alexandre Rames5319def2014-10-23 10:03:10 +01002100 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002101 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002102 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002103 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002104
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002105 case DataType::Type::kFloat32:
2106 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002107 locations->SetInAt(0, Location::RequiresFpuRegister());
2108 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002109 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002110 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002111
Alexandre Rames5319def2014-10-23 10:03:10 +01002112 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002113 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002114 }
2115}
2116
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002117void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction,
2118 const FieldInfo& field_info) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002119 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
2120
2121 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002122 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Rames09a99962015-04-15 11:47:56 +01002123 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002124 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2125 object_field_get_with_read_barrier
2126 ? LocationSummary::kCallOnSlowPath
2127 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002128 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002129 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko0ecac682018-08-07 10:40:38 +01002130 // We need a temporary register for the read barrier load in
2131 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier()
2132 // only if the field is volatile or the offset is too big.
2133 if (field_info.IsVolatile() ||
2134 field_info.GetFieldOffset().Uint32Value() >= kReferenceLoadMinFarOffset) {
2135 locations->AddTemp(FixedTempLocation());
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002136 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002137 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002138 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002139 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002140 locations->SetOut(Location::RequiresFpuRegister());
2141 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002142 // The output overlaps for an object field get when read barriers
2143 // are enabled: we do not want the load to overwrite the object's
2144 // location, as we need it to emit the read barrier.
2145 locations->SetOut(
2146 Location::RequiresRegister(),
2147 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01002148 }
2149}
2150
2151void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
2152 const FieldInfo& field_info) {
2153 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00002154 LocationSummary* locations = instruction->GetLocations();
2155 Location base_loc = locations->InAt(0);
2156 Location out = locations->Out();
2157 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Vladimir Marko61b92282017-10-11 13:23:17 +01002158 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
2159 DataType::Type load_type = instruction->GetType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002160 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01002161
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002162 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier &&
Vladimir Marko61b92282017-10-11 13:23:17 +01002163 load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002164 // Object FieldGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002165 // /* HeapReference<Object> */ out = *(base + offset)
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002166 Register base = RegisterFrom(base_loc, DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002167 Location maybe_temp =
2168 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
Roland Levillain44015862016-01-22 11:47:17 +00002169 // Note that potential implicit null checks are handled in this
2170 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
2171 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2172 instruction,
2173 out,
2174 base,
2175 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002176 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00002177 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002178 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00002179 } else {
2180 // General case.
2181 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002182 // Note that a potential implicit null check is handled in this
2183 // CodeGeneratorARM64::LoadAcquire call.
2184 // NB: LoadAcquire will record the pc info if needed.
2185 codegen_->LoadAcquire(
2186 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01002187 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002188 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2189 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Vladimir Marko61b92282017-10-11 13:23:17 +01002190 codegen_->Load(load_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01002191 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01002192 }
Vladimir Marko61b92282017-10-11 13:23:17 +01002193 if (load_type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002194 // If read barriers are enabled, emit read barriers other than
2195 // Baker's using a slow path (and also unpoison the loaded
2196 // reference, if heap poisoning is enabled).
2197 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
2198 }
Roland Levillain4d027112015-07-01 15:41:14 +01002199 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002200}
2201
2202void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
2203 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002204 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames09a99962015-04-15 11:47:56 +01002205 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002206 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
2207 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002208 } else if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002209 locations->SetInAt(1, Location::RequiresFpuRegister());
2210 } else {
2211 locations->SetInAt(1, Location::RequiresRegister());
2212 }
2213}
2214
2215void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002216 const FieldInfo& field_info,
2217 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002218 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2219
2220 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002221 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01002222 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01002223 Offset offset = field_info.GetFieldOffset();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002224 DataType::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002225
Roland Levillain4d027112015-07-01 15:41:14 +01002226 {
2227 // We use a block to end the scratch scope before the write barrier, thus
2228 // freeing the temporary registers so they can be used in `MarkGCCard`.
2229 UseScratchRegisterScope temps(GetVIXLAssembler());
2230
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002231 if (kPoisonHeapReferences && field_type == DataType::Type::kReference) {
Roland Levillain4d027112015-07-01 15:41:14 +01002232 DCHECK(value.IsW());
2233 Register temp = temps.AcquireW();
2234 __ Mov(temp, value.W());
2235 GetAssembler()->PoisonHeapReference(temp.W());
2236 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01002237 }
Roland Levillain4d027112015-07-01 15:41:14 +01002238
2239 if (field_info.IsVolatile()) {
Artem Serov914d7a82017-02-07 14:33:49 +00002240 codegen_->StoreRelease(
2241 instruction, field_type, source, HeapOperand(obj, offset), /* needs_null_check */ true);
Roland Levillain4d027112015-07-01 15:41:14 +01002242 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002243 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2244 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002245 codegen_->Store(field_type, source, HeapOperand(obj, offset));
2246 codegen_->MaybeRecordImplicitNullCheck(instruction);
2247 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002248 }
2249
2250 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002251 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01002252 }
2253}
2254
Alexandre Rames67555f72014-11-18 10:55:16 +00002255void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002256 DataType::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002257
2258 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002259 case DataType::Type::kInt32:
2260 case DataType::Type::kInt64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002261 Register dst = OutputRegister(instr);
2262 Register lhs = InputRegisterAt(instr, 0);
2263 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01002264 if (instr->IsAdd()) {
2265 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002266 } else if (instr->IsAnd()) {
2267 __ And(dst, lhs, rhs);
2268 } else if (instr->IsOr()) {
2269 __ Orr(dst, lhs, rhs);
2270 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002271 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002272 } else if (instr->IsRor()) {
2273 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002274 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002275 __ Ror(dst, lhs, shift);
2276 } else {
2277 // Ensure shift distance is in the same size register as the result. If
2278 // we are rotating a long and the shift comes in a w register originally,
2279 // we don't need to sxtw for use as an x since the shift distances are
2280 // all & reg_bits - 1.
2281 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
2282 }
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002283 } else if (instr->IsMin() || instr->IsMax()) {
2284 __ Cmp(lhs, rhs);
2285 __ Csel(dst, lhs, rhs, instr->IsMin() ? lt : gt);
Alexandre Rames67555f72014-11-18 10:55:16 +00002286 } else {
2287 DCHECK(instr->IsXor());
2288 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002289 }
2290 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002291 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002292 case DataType::Type::kFloat32:
2293 case DataType::Type::kFloat64: {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002294 FPRegister dst = OutputFPRegister(instr);
2295 FPRegister lhs = InputFPRegisterAt(instr, 0);
2296 FPRegister rhs = InputFPRegisterAt(instr, 1);
2297 if (instr->IsAdd()) {
2298 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002299 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002300 __ Fsub(dst, lhs, rhs);
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01002301 } else if (instr->IsMin()) {
2302 __ Fmin(dst, lhs, rhs);
2303 } else if (instr->IsMax()) {
2304 __ Fmax(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002305 } else {
2306 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002307 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002308 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002309 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002310 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002311 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002312 }
2313}
2314
Serban Constantinescu02164b32014-11-13 14:05:07 +00002315void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2316 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2317
Vladimir Markoca6fff82017-10-03 14:49:14 +01002318 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002319 DataType::Type type = instr->GetResultType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002320 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002321 case DataType::Type::kInt32:
2322 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002323 locations->SetInAt(0, Location::RequiresRegister());
2324 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Artem Serov87c97052016-09-23 13:34:31 +01002325 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002326 break;
2327 }
2328 default:
2329 LOG(FATAL) << "Unexpected shift type " << type;
2330 }
2331}
2332
2333void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2334 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2335
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002336 DataType::Type type = instr->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002337 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002338 case DataType::Type::kInt32:
2339 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002340 Register dst = OutputRegister(instr);
2341 Register lhs = InputRegisterAt(instr, 0);
2342 Operand rhs = InputOperandAt(instr, 1);
2343 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002344 uint32_t shift_value = rhs.GetImmediate() &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002345 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002346 if (instr->IsShl()) {
2347 __ Lsl(dst, lhs, shift_value);
2348 } else if (instr->IsShr()) {
2349 __ Asr(dst, lhs, shift_value);
2350 } else {
2351 __ Lsr(dst, lhs, shift_value);
2352 }
2353 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002354 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002355
2356 if (instr->IsShl()) {
2357 __ Lsl(dst, lhs, rhs_reg);
2358 } else if (instr->IsShr()) {
2359 __ Asr(dst, lhs, rhs_reg);
2360 } else {
2361 __ Lsr(dst, lhs, rhs_reg);
2362 }
2363 }
2364 break;
2365 }
2366 default:
2367 LOG(FATAL) << "Unexpected shift operation type " << type;
2368 }
2369}
2370
Alexandre Rames5319def2014-10-23 10:03:10 +01002371void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002372 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002373}
2374
2375void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002376 HandleBinaryOp(instruction);
2377}
2378
2379void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2380 HandleBinaryOp(instruction);
2381}
2382
2383void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2384 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002385}
2386
Artem Serov7fc63502016-02-09 17:15:29 +00002387void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002388 DCHECK(DataType::IsIntegralType(instr->GetType())) << instr->GetType();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002389 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002390 locations->SetInAt(0, Location::RequiresRegister());
2391 // There is no immediate variant of negated bitwise instructions in AArch64.
2392 locations->SetInAt(1, Location::RequiresRegister());
2393 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2394}
2395
Artem Serov7fc63502016-02-09 17:15:29 +00002396void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002397 Register dst = OutputRegister(instr);
2398 Register lhs = InputRegisterAt(instr, 0);
2399 Register rhs = InputRegisterAt(instr, 1);
2400
2401 switch (instr->GetOpKind()) {
2402 case HInstruction::kAnd:
2403 __ Bic(dst, lhs, rhs);
2404 break;
2405 case HInstruction::kOr:
2406 __ Orn(dst, lhs, rhs);
2407 break;
2408 case HInstruction::kXor:
2409 __ Eon(dst, lhs, rhs);
2410 break;
2411 default:
2412 LOG(FATAL) << "Unreachable";
2413 }
2414}
2415
Anton Kirilov74234da2017-01-13 14:42:47 +00002416void LocationsBuilderARM64::VisitDataProcWithShifterOp(
2417 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002418 DCHECK(instruction->GetType() == DataType::Type::kInt32 ||
2419 instruction->GetType() == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002420 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002421 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames8626b742015-11-25 16:28:08 +00002422 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2423 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2424 } else {
2425 locations->SetInAt(0, Location::RequiresRegister());
2426 }
2427 locations->SetInAt(1, Location::RequiresRegister());
2428 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2429}
2430
Anton Kirilov74234da2017-01-13 14:42:47 +00002431void InstructionCodeGeneratorARM64::VisitDataProcWithShifterOp(
2432 HDataProcWithShifterOp* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002433 DataType::Type type = instruction->GetType();
Alexandre Rames8626b742015-11-25 16:28:08 +00002434 HInstruction::InstructionKind kind = instruction->GetInstrKind();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002435 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Alexandre Rames8626b742015-11-25 16:28:08 +00002436 Register out = OutputRegister(instruction);
2437 Register left;
2438 if (kind != HInstruction::kNeg) {
2439 left = InputRegisterAt(instruction, 0);
2440 }
Anton Kirilov74234da2017-01-13 14:42:47 +00002441 // If this `HDataProcWithShifterOp` was created by merging a type conversion as the
Alexandre Rames8626b742015-11-25 16:28:08 +00002442 // shifter operand operation, the IR generating `right_reg` (input to the type
2443 // conversion) can have a different type from the current instruction's type,
2444 // so we manually indicate the type.
2445 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Alexandre Rames8626b742015-11-25 16:28:08 +00002446 Operand right_operand(0);
2447
Anton Kirilov74234da2017-01-13 14:42:47 +00002448 HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2449 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002450 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2451 } else {
Anton Kirilov74234da2017-01-13 14:42:47 +00002452 right_operand = Operand(right_reg,
2453 helpers::ShiftFromOpKind(op_kind),
2454 instruction->GetShiftAmount());
Alexandre Rames8626b742015-11-25 16:28:08 +00002455 }
2456
2457 // Logical binary operations do not support extension operations in the
2458 // operand. Note that VIXL would still manage if it was passed by generating
2459 // the extension as a separate instruction.
2460 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2461 DCHECK(!right_operand.IsExtendedRegister() ||
2462 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2463 kind != HInstruction::kNeg));
2464 switch (kind) {
2465 case HInstruction::kAdd:
2466 __ Add(out, left, right_operand);
2467 break;
2468 case HInstruction::kAnd:
2469 __ And(out, left, right_operand);
2470 break;
2471 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002472 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002473 __ Neg(out, right_operand);
2474 break;
2475 case HInstruction::kOr:
2476 __ Orr(out, left, right_operand);
2477 break;
2478 case HInstruction::kSub:
2479 __ Sub(out, left, right_operand);
2480 break;
2481 case HInstruction::kXor:
2482 __ Eor(out, left, right_operand);
2483 break;
2484 default:
2485 LOG(FATAL) << "Unexpected operation kind: " << kind;
2486 UNREACHABLE();
2487 }
2488}
2489
Artem Serov328429f2016-07-06 16:23:04 +01002490void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002491 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002492 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002493 locations->SetInAt(0, Location::RequiresRegister());
2494 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
Artem Serov87c97052016-09-23 13:34:31 +01002495 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002496}
2497
Roland Levillain19c54192016-11-04 13:44:09 +00002498void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002499 __ Add(OutputRegister(instruction),
2500 InputRegisterAt(instruction, 0),
2501 Operand(InputOperandAt(instruction, 1)));
2502}
2503
Artem Serove1811ed2017-04-27 16:50:47 +01002504void LocationsBuilderARM64::VisitIntermediateAddressIndex(HIntermediateAddressIndex* instruction) {
2505 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002506 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Artem Serove1811ed2017-04-27 16:50:47 +01002507
2508 HIntConstant* shift = instruction->GetShift()->AsIntConstant();
2509
2510 locations->SetInAt(0, Location::RequiresRegister());
2511 // For byte case we don't need to shift the index variable so we can encode the data offset into
2512 // ADD instruction. For other cases we prefer the data_offset to be in register; that will hoist
2513 // data offset constant generation out of the loop and reduce the critical path length in the
2514 // loop.
2515 locations->SetInAt(1, shift->GetValue() == 0
2516 ? Location::ConstantLocation(instruction->GetOffset()->AsIntConstant())
2517 : Location::RequiresRegister());
2518 locations->SetInAt(2, Location::ConstantLocation(shift));
2519 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2520}
2521
2522void InstructionCodeGeneratorARM64::VisitIntermediateAddressIndex(
2523 HIntermediateAddressIndex* instruction) {
2524 Register index_reg = InputRegisterAt(instruction, 0);
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002525 uint32_t shift = Int64FromLocation(instruction->GetLocations()->InAt(2));
Artem Serove1811ed2017-04-27 16:50:47 +01002526 uint32_t offset = instruction->GetOffset()->AsIntConstant()->GetValue();
2527
2528 if (shift == 0) {
2529 __ Add(OutputRegister(instruction), index_reg, offset);
2530 } else {
2531 Register offset_reg = InputRegisterAt(instruction, 1);
2532 __ Add(OutputRegister(instruction), offset_reg, Operand(index_reg, LSL, shift));
2533 }
2534}
2535
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002536void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002537 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002538 new (GetGraph()->GetAllocator()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002539 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2540 if (instr->GetOpKind() == HInstruction::kSub &&
2541 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002542 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002543 // Don't allocate register for Mneg instruction.
2544 } else {
2545 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2546 Location::RequiresRegister());
2547 }
2548 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2549 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002550 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2551}
2552
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002553void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002554 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002555 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2556 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002557
2558 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2559 // This fixup should be carried out for all multiply-accumulate instructions:
2560 // madd, msub, smaddl, smsubl, umaddl and umsubl.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002561 if (instr->GetType() == DataType::Type::kInt64 &&
Alexandre Rames418318f2015-11-20 15:55:47 +00002562 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2563 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002564 vixl::aarch64::Instruction* prev =
2565 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002566 if (prev->IsLoadOrStore()) {
2567 // Make sure we emit only exactly one nop.
Artem Serov914d7a82017-02-07 14:33:49 +00002568 ExactAssemblyScope scope(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002569 __ nop();
2570 }
2571 }
2572
2573 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002574 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002575 __ Madd(res, mul_left, mul_right, accumulator);
2576 } else {
2577 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002578 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002579 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002580 __ Mneg(res, mul_left, mul_right);
2581 } else {
2582 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2583 __ Msub(res, mul_left, mul_right, accumulator);
2584 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002585 }
2586}
2587
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002588void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002589 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002590 kEmitCompilerReadBarrier && (instruction->GetType() == DataType::Type::kReference);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002591 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002592 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2593 object_array_get_with_read_barrier
2594 ? LocationSummary::kCallOnSlowPath
2595 : LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002596 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002597 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002598 if (instruction->GetIndex()->IsConstant()) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002599 // Array loads with constant index are treated as field loads.
Vladimir Marko008e09f32018-08-06 15:42:43 +01002600 // We need a temporary register for the read barrier load in
2601 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier()
2602 // only if the offset is too big.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002603 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
2604 uint32_t index = instruction->GetIndex()->AsIntConstant()->GetValue();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002605 offset += index << DataType::SizeShift(DataType::Type::kReference);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002606 if (offset >= kReferenceLoadMinFarOffset) {
2607 locations->AddTemp(FixedTempLocation());
2608 }
2609 } else {
Vladimir Marko008e09f32018-08-06 15:42:43 +01002610 // We need a non-scratch temporary for the array data pointer in
2611 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier().
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002612 locations->AddTemp(Location::RequiresRegister());
2613 }
Vladimir Marko70e97462016-08-09 11:04:26 +01002614 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002615 locations->SetInAt(0, Location::RequiresRegister());
2616 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002617 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002618 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2619 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002620 // The output overlaps in the case of an object array get with
2621 // read barriers enabled: we do not want the move to overwrite the
2622 // array's location, as we need it to emit the read barrier.
2623 locations->SetOut(
2624 Location::RequiresRegister(),
2625 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002626 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002627}
2628
2629void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002630 DataType::Type type = instruction->GetType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002631 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002632 LocationSummary* locations = instruction->GetLocations();
2633 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002634 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002635 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002636 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2637 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002638 MacroAssembler* masm = GetVIXLAssembler();
2639 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002640
Roland Levillain19c54192016-11-04 13:44:09 +00002641 // The read barrier instrumentation of object ArrayGet instructions
2642 // does not support the HIntermediateAddress instruction.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002643 DCHECK(!((type == DataType::Type::kReference) &&
Roland Levillain19c54192016-11-04 13:44:09 +00002644 instruction->GetArray()->IsIntermediateAddress() &&
2645 kEmitCompilerReadBarrier));
2646
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002647 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00002648 // Object ArrayGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002649 // Note that a potential implicit null check is handled in the
2650 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
Vladimir Marko66d691d2017-04-07 17:53:39 +01002651 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002652 if (index.IsConstant()) {
2653 // Array load with a constant index can be treated as a field load.
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002654 offset += Int64FromLocation(index) << DataType::SizeShift(type);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002655 Location maybe_temp =
2656 (locations->GetTempCount() != 0) ? locations->GetTemp(0) : Location::NoLocation();
2657 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2658 out,
2659 obj.W(),
2660 offset,
2661 maybe_temp,
Vladimir Marko66d691d2017-04-07 17:53:39 +01002662 /* needs_null_check */ false,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002663 /* use_load_acquire */ false);
2664 } else {
2665 Register temp = WRegisterFrom(locations->GetTemp(0));
2666 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko008e09f32018-08-06 15:42:43 +01002667 out, obj.W(), offset, index, temp, /* needs_null_check */ false);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00002668 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002669 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002670 // General case.
2671 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002672 Register length;
2673 if (maybe_compressed_char_at) {
2674 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2675 length = temps.AcquireW();
Artem Serov914d7a82017-02-07 14:33:49 +00002676 {
2677 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2678 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2679
2680 if (instruction->GetArray()->IsIntermediateAddress()) {
2681 DCHECK_LT(count_offset, offset);
2682 int64_t adjusted_offset =
2683 static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2684 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2685 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2686 } else {
2687 __ Ldr(length, HeapOperand(obj, count_offset));
2688 }
2689 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002690 }
jessicahandojo05765752016-09-09 19:01:32 -07002691 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002692 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002693 if (maybe_compressed_char_at) {
2694 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002695 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2696 "Expecting 0=compressed, 1=uncompressed");
2697 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002698 __ Ldrb(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002699 HeapOperand(obj, offset + Int64FromLocation(index)));
jessicahandojo05765752016-09-09 19:01:32 -07002700 __ B(&done);
2701 __ Bind(&uncompressed_load);
2702 __ Ldrh(Register(OutputCPURegister(instruction)),
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002703 HeapOperand(obj, offset + (Int64FromLocation(index) << 1)));
jessicahandojo05765752016-09-09 19:01:32 -07002704 __ Bind(&done);
2705 } else {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002706 offset += Int64FromLocation(index) << DataType::SizeShift(type);
jessicahandojo05765752016-09-09 19:01:32 -07002707 source = HeapOperand(obj, offset);
2708 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002709 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002710 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002711 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002712 // We do not need to compute the intermediate address from the array: the
2713 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002714 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002715 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002716 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002717 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2718 }
2719 temp = obj;
2720 } else {
2721 __ Add(temp, obj, offset);
2722 }
jessicahandojo05765752016-09-09 19:01:32 -07002723 if (maybe_compressed_char_at) {
2724 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002725 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2726 "Expecting 0=compressed, 1=uncompressed");
2727 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002728 __ Ldrb(Register(OutputCPURegister(instruction)),
2729 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2730 __ B(&done);
2731 __ Bind(&uncompressed_load);
2732 __ Ldrh(Register(OutputCPURegister(instruction)),
2733 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2734 __ Bind(&done);
2735 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002736 source = HeapOperand(temp, XRegisterFrom(index), LSL, DataType::SizeShift(type));
jessicahandojo05765752016-09-09 19:01:32 -07002737 }
Roland Levillain44015862016-01-22 11:47:17 +00002738 }
jessicahandojo05765752016-09-09 19:01:32 -07002739 if (!maybe_compressed_char_at) {
Artem Serov914d7a82017-02-07 14:33:49 +00002740 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2741 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
jessicahandojo05765752016-09-09 19:01:32 -07002742 codegen_->Load(type, OutputCPURegister(instruction), source);
2743 codegen_->MaybeRecordImplicitNullCheck(instruction);
2744 }
Roland Levillain44015862016-01-22 11:47:17 +00002745
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002746 if (type == DataType::Type::kReference) {
Roland Levillain44015862016-01-22 11:47:17 +00002747 static_assert(
2748 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2749 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2750 Location obj_loc = locations->InAt(0);
2751 if (index.IsConstant()) {
2752 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2753 } else {
2754 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2755 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002756 }
Roland Levillain4d027112015-07-01 15:41:14 +01002757 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002758}
2759
Alexandre Rames5319def2014-10-23 10:03:10 +01002760void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002761 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002762 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002763 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002764}
2765
2766void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002767 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002768 vixl::aarch64::Register out = OutputRegister(instruction);
Artem Serov914d7a82017-02-07 14:33:49 +00002769 {
2770 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2771 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2772 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
2773 codegen_->MaybeRecordImplicitNullCheck(instruction);
2774 }
jessicahandojo05765752016-09-09 19:01:32 -07002775 // Mask out compression flag from String's array length.
2776 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002777 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002778 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002779}
2780
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002781void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002782 DataType::Type value_type = instruction->GetComponentType();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002783
2784 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Vladimir Markoca6fff82017-10-03 14:49:14 +01002785 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002786 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002787 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002788 LocationSummary::kCallOnSlowPath :
2789 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002790 locations->SetInAt(0, Location::RequiresRegister());
2791 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002792 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2793 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002794 } else if (DataType::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002795 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002796 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002797 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002798 }
2799}
2800
2801void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002802 DataType::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002803 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002804 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002805 bool needs_write_barrier =
2806 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002807
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002808 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002809 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002810 CPURegister source = value;
2811 Location index = locations->InAt(1);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002812 size_t offset = mirror::Array::DataOffset(DataType::Size(value_type)).Uint32Value();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002813 MemOperand destination = HeapOperand(array);
2814 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002815
2816 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002817 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002818 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002819 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002820 destination = HeapOperand(array, offset);
2821 } else {
2822 UseScratchRegisterScope temps(masm);
2823 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002824 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002825 // We do not need to compute the intermediate address from the array: the
2826 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002827 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002828 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002829 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002830 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2831 }
2832 temp = array;
2833 } else {
2834 __ Add(temp, array, offset);
2835 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002836 destination = HeapOperand(temp,
2837 XRegisterFrom(index),
2838 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002839 DataType::SizeShift(value_type));
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002840 }
Artem Serov914d7a82017-02-07 14:33:49 +00002841 {
2842 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2843 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2844 codegen_->Store(value_type, value, destination);
2845 codegen_->MaybeRecordImplicitNullCheck(instruction);
2846 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002847 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002848 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002849 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002850 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002851 {
2852 // We use a block to end the scratch scope before the write barrier, thus
2853 // freeing the temporary registers so they can be used in `MarkGCCard`.
2854 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002855 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002856 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01002857 offset += Int64FromLocation(index) << DataType::SizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002858 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002859 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002860 destination = HeapOperand(temp,
2861 XRegisterFrom(index),
2862 LSL,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002863 DataType::SizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002864 }
2865
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002866 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2867 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2868 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2869
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002870 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002871 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathARM64(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002872 codegen_->AddSlowPath(slow_path);
2873 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002874 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002875 __ Cbnz(Register(value), &non_zero);
2876 if (!index.IsConstant()) {
2877 __ Add(temp, array, offset);
2878 }
Artem Serov914d7a82017-02-07 14:33:49 +00002879 {
2880 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools
2881 // emitted.
2882 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2883 __ Str(wzr, destination);
2884 codegen_->MaybeRecordImplicitNullCheck(instruction);
2885 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002886 __ B(&done);
2887 __ Bind(&non_zero);
2888 }
2889
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002890 // Note that when Baker read barriers are enabled, the type
2891 // checks are performed without read barriers. This is fine,
2892 // even in the case where a class object is in the from-space
2893 // after the flip, as a comparison involving such a type would
2894 // not produce a false positive; it may of course produce a
2895 // false negative, in which case we would take the ArraySet
2896 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01002897
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002898 Register temp2 = temps.AcquireSameSizeAs(array);
2899 // /* HeapReference<Class> */ temp = array->klass_
Artem Serov914d7a82017-02-07 14:33:49 +00002900 {
2901 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2902 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2903 __ Ldr(temp, HeapOperand(array, class_offset));
2904 codegen_->MaybeRecordImplicitNullCheck(instruction);
2905 }
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002906 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01002907
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002908 // /* HeapReference<Class> */ temp = temp->component_type_
2909 __ Ldr(temp, HeapOperand(temp, component_offset));
2910 // /* HeapReference<Class> */ temp2 = value->klass_
2911 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2912 // If heap poisoning is enabled, no need to unpoison `temp`
2913 // nor `temp2`, as we are comparing two poisoned references.
2914 __ Cmp(temp, temp2);
2915 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01002916
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002917 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2918 vixl::aarch64::Label do_put;
2919 __ B(eq, &do_put);
2920 // If heap poisoning is enabled, the `temp` reference has
2921 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002922 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2923
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002924 // /* HeapReference<Class> */ temp = temp->super_class_
2925 __ Ldr(temp, HeapOperand(temp, super_offset));
2926 // If heap poisoning is enabled, no need to unpoison
2927 // `temp`, as we are comparing against null below.
2928 __ Cbnz(temp, slow_path->GetEntryLabel());
2929 __ Bind(&do_put);
2930 } else {
2931 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002932 }
2933 }
2934
2935 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002936 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002937 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002938 __ Mov(temp2, value.W());
2939 GetAssembler()->PoisonHeapReference(temp2);
2940 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002941 }
2942
2943 if (!index.IsConstant()) {
2944 __ Add(temp, array, offset);
Vladimir Markod1ef8732017-04-18 13:55:13 +01002945 } else {
2946 // We no longer need the `temp` here so release it as the store below may
2947 // need a scratch register (if the constant index makes the offset too large)
2948 // and the poisoned `source` could be using the other scratch register.
2949 temps.Release(temp);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002950 }
Artem Serov914d7a82017-02-07 14:33:49 +00002951 {
2952 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2953 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2954 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002955
Artem Serov914d7a82017-02-07 14:33:49 +00002956 if (!may_need_runtime_call_for_type_check) {
2957 codegen_->MaybeRecordImplicitNullCheck(instruction);
2958 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002959 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002960 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002961
2962 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2963
2964 if (done.IsLinked()) {
2965 __ Bind(&done);
2966 }
2967
2968 if (slow_path != nullptr) {
2969 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002970 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002971 }
2972}
2973
Alexandre Rames67555f72014-11-18 10:55:16 +00002974void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002975 RegisterSet caller_saves = RegisterSet::Empty();
2976 InvokeRuntimeCallingConvention calling_convention;
2977 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
2978 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
2979 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00002980 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002981 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002982}
2983
2984void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002985 BoundsCheckSlowPathARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002986 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002987 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00002988 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2989 __ B(slow_path->GetEntryLabel(), hs);
2990}
2991
Alexandre Rames67555f72014-11-18 10:55:16 +00002992void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2993 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002994 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexandre Rames67555f72014-11-18 10:55:16 +00002995 locations->SetInAt(0, Location::RequiresRegister());
2996 if (check->HasUses()) {
2997 locations->SetOut(Location::SameAsFirstInput());
2998 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01002999 // Rely on the type initialization to save everything we need.
3000 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Alexandre Rames67555f72014-11-18 10:55:16 +00003001}
3002
3003void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
3004 // We assume the class is not null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01003005 SlowPathCodeARM64* slow_path =
3006 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(check->GetLoadClass(), check);
Alexandre Rames67555f72014-11-18 10:55:16 +00003007 codegen_->AddSlowPath(slow_path);
3008 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
3009}
3010
Roland Levillain1a653882016-03-18 18:05:57 +00003011static bool IsFloatingPointZeroConstant(HInstruction* inst) {
3012 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
3013 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
3014}
3015
3016void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
3017 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
3018 Location rhs_loc = instruction->GetLocations()->InAt(1);
3019 if (rhs_loc.IsConstant()) {
3020 // 0.0 is the only immediate that can be encoded directly in
3021 // an FCMP instruction.
3022 //
3023 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
3024 // specify that in a floating-point comparison, positive zero
3025 // and negative zero are considered equal, so we can use the
3026 // literal 0.0 for both cases here.
3027 //
3028 // Note however that some methods (Float.equal, Float.compare,
3029 // Float.compareTo, Double.equal, Double.compare,
3030 // Double.compareTo, Math.max, Math.min, StrictMath.max,
3031 // StrictMath.min) consider 0.0 to be (strictly) greater than
3032 // -0.0. So if we ever translate calls to these methods into a
3033 // HCompare instruction, we must handle the -0.0 case with
3034 // care here.
3035 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
3036 __ Fcmp(lhs_reg, 0.0);
3037 } else {
3038 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
3039 }
Roland Levillain7f63c522015-07-13 15:54:55 +00003040}
3041
Serban Constantinescu02164b32014-11-13 14:05:07 +00003042void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003043 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003044 new (GetGraph()->GetAllocator()) LocationSummary(compare, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003045 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01003046 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003047 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003048 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003049 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003050 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003051 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003052 case DataType::Type::kInt32:
3053 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003054 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00003055 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00003056 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3057 break;
3058 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003059 case DataType::Type::kFloat32:
3060 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003061 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00003062 locations->SetInAt(1,
3063 IsFloatingPointZeroConstant(compare->InputAt(1))
3064 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
3065 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00003066 locations->SetOut(Location::RequiresRegister());
3067 break;
3068 }
3069 default:
3070 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3071 }
3072}
3073
3074void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003075 DataType::Type in_type = compare->InputAt(0)->GetType();
Serban Constantinescu02164b32014-11-13 14:05:07 +00003076
3077 // 0 if: left == right
3078 // 1 if: left > right
3079 // -1 if: left < right
3080 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003081 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003082 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003083 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003084 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003085 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003086 case DataType::Type::kInt32:
3087 case DataType::Type::kInt64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003088 Register result = OutputRegister(compare);
3089 Register left = InputRegisterAt(compare, 0);
3090 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003091 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08003092 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
3093 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00003094 break;
3095 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003096 case DataType::Type::kFloat32:
3097 case DataType::Type::kFloat64: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003098 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00003099 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003100 __ Cset(result, ne);
3101 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01003102 break;
3103 }
3104 default:
3105 LOG(FATAL) << "Unimplemented compare type " << in_type;
3106 }
3107}
3108
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003109void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003110 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00003111
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003112 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003113 locations->SetInAt(0, Location::RequiresFpuRegister());
3114 locations->SetInAt(1,
3115 IsFloatingPointZeroConstant(instruction->InputAt(1))
3116 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
3117 : Location::RequiresFpuRegister());
3118 } else {
3119 // Integer cases.
3120 locations->SetInAt(0, Location::RequiresRegister());
3121 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
3122 }
3123
David Brazdilb3e773e2016-01-26 11:28:37 +00003124 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00003125 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01003126 }
3127}
3128
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003129void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003130 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003131 return;
3132 }
3133
3134 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01003135 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00003136 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01003137
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003138 if (DataType::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00003139 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003140 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00003141 } else {
3142 // Integer cases.
3143 Register lhs = InputRegisterAt(instruction, 0);
3144 Operand rhs = InputOperandAt(instruction, 1);
3145 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00003146 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00003147 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003148}
3149
3150#define FOR_EACH_CONDITION_INSTRUCTION(M) \
3151 M(Equal) \
3152 M(NotEqual) \
3153 M(LessThan) \
3154 M(LessThanOrEqual) \
3155 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07003156 M(GreaterThanOrEqual) \
3157 M(Below) \
3158 M(BelowOrEqual) \
3159 M(Above) \
3160 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01003161#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003162void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
3163void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01003164FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00003165#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01003166#undef FOR_EACH_CONDITION_INSTRUCTION
3167
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003168void InstructionCodeGeneratorARM64::GenerateIntDivForPower2Denom(HDiv* instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003169 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003170 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003171 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
3172
3173 Register out = OutputRegister(instruction);
3174 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01003175
3176 if (abs_imm == 2) {
3177 int bits = DataType::Size(instruction->GetResultType()) * kBitsPerByte;
3178 __ Add(out, dividend, Operand(dividend, LSR, bits - 1));
3179 } else {
3180 UseScratchRegisterScope temps(GetVIXLAssembler());
3181 Register temp = temps.AcquireSameSizeAs(out);
3182 __ Add(temp, dividend, abs_imm - 1);
3183 __ Cmp(dividend, 0);
3184 __ Csel(out, temp, dividend, lt);
3185 }
3186
Zheng Xuc6667102015-05-15 16:08:45 +08003187 int ctz_imm = CTZ(abs_imm);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003188 if (imm > 0) {
3189 __ Asr(out, out, ctz_imm);
Zheng Xuc6667102015-05-15 16:08:45 +08003190 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003191 __ Neg(out, Operand(out, ASR, ctz_imm));
Zheng Xuc6667102015-05-15 16:08:45 +08003192 }
3193}
3194
3195void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3196 DCHECK(instruction->IsDiv() || instruction->IsRem());
3197
3198 LocationSummary* locations = instruction->GetLocations();
3199 Location second = locations->InAt(1);
3200 DCHECK(second.IsConstant());
3201
3202 Register out = OutputRegister(instruction);
3203 Register dividend = InputRegisterAt(instruction, 0);
3204 int64_t imm = Int64FromConstant(second.GetConstant());
3205
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003206 DataType::Type type = instruction->GetResultType();
3207 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Zheng Xuc6667102015-05-15 16:08:45 +08003208
3209 int64_t magic;
3210 int shift;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003211 CalculateMagicAndShiftForDivRem(
3212 imm, type == DataType::Type::kInt64 /* is_long */, &magic, &shift);
Zheng Xuc6667102015-05-15 16:08:45 +08003213
3214 UseScratchRegisterScope temps(GetVIXLAssembler());
3215 Register temp = temps.AcquireSameSizeAs(out);
3216
3217 // temp = get_high(dividend * magic)
3218 __ Mov(temp, magic);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003219 if (type == DataType::Type::kInt64) {
Zheng Xuc6667102015-05-15 16:08:45 +08003220 __ Smulh(temp, dividend, temp);
3221 } else {
3222 __ Smull(temp.X(), dividend, temp);
3223 __ Lsr(temp.X(), temp.X(), 32);
3224 }
3225
3226 if (imm > 0 && magic < 0) {
3227 __ Add(temp, temp, dividend);
3228 } else if (imm < 0 && magic > 0) {
3229 __ Sub(temp, temp, dividend);
3230 }
3231
3232 if (shift != 0) {
3233 __ Asr(temp, temp, shift);
3234 }
3235
3236 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003237 __ Sub(out, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08003238 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003239 __ Sub(temp, temp, Operand(temp, ASR, type == DataType::Type::kInt64 ? 63 : 31));
Zheng Xuc6667102015-05-15 16:08:45 +08003240 // TODO: Strength reduction for msub.
3241 Register temp_imm = temps.AcquireSameSizeAs(out);
3242 __ Mov(temp_imm, imm);
3243 __ Msub(out, temp, temp_imm, dividend);
3244 }
3245}
3246
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003247void InstructionCodeGeneratorARM64::GenerateIntDivForConstDenom(HDiv *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003248 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Zheng Xuc6667102015-05-15 16:08:45 +08003249
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003250 if (imm == 0) {
3251 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3252 return;
3253 }
Zheng Xuc6667102015-05-15 16:08:45 +08003254
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003255 if (IsPowerOfTwo(AbsOrMin(imm))) {
3256 GenerateIntDivForPower2Denom(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003257 } else {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003258 // Cases imm == -1 or imm == 1 are handled by InstructionSimplifier.
3259 DCHECK(imm < -2 || imm > 2) << imm;
3260 GenerateDivRemWithAnyConstant(instruction);
3261 }
3262}
3263
3264void InstructionCodeGeneratorARM64::GenerateIntDiv(HDiv *instruction) {
3265 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
3266 << instruction->GetResultType();
3267
3268 if (instruction->GetLocations()->InAt(1).IsConstant()) {
3269 GenerateIntDivForConstDenom(instruction);
3270 } else {
3271 Register out = OutputRegister(instruction);
Zheng Xuc6667102015-05-15 16:08:45 +08003272 Register dividend = InputRegisterAt(instruction, 0);
3273 Register divisor = InputRegisterAt(instruction, 1);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003274 __ Sdiv(out, dividend, divisor);
Zheng Xuc6667102015-05-15 16:08:45 +08003275 }
3276}
3277
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003278void LocationsBuilderARM64::VisitDiv(HDiv* div) {
3279 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003280 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003281 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003282 case DataType::Type::kInt32:
3283 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003284 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08003285 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003286 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3287 break;
3288
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003289 case DataType::Type::kFloat32:
3290 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003291 locations->SetInAt(0, Location::RequiresFpuRegister());
3292 locations->SetInAt(1, Location::RequiresFpuRegister());
3293 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3294 break;
3295
3296 default:
3297 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3298 }
3299}
3300
3301void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003302 DataType::Type type = div->GetResultType();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003303 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003304 case DataType::Type::kInt32:
3305 case DataType::Type::kInt64:
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01003306 GenerateIntDiv(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003307 break;
3308
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003309 case DataType::Type::kFloat32:
3310 case DataType::Type::kFloat64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003311 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3312 break;
3313
3314 default:
3315 LOG(FATAL) << "Unexpected div type " << type;
3316 }
3317}
3318
Alexandre Rames67555f72014-11-18 10:55:16 +00003319void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003320 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003321 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003322}
3323
3324void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3325 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003326 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003327 codegen_->AddSlowPath(slow_path);
3328 Location value = instruction->GetLocations()->InAt(0);
3329
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003330 DataType::Type type = instruction->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +00003331
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003332 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003333 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00003334 return;
3335 }
3336
Alexandre Rames67555f72014-11-18 10:55:16 +00003337 if (value.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01003338 int64_t divisor = Int64FromLocation(value);
Alexandre Rames67555f72014-11-18 10:55:16 +00003339 if (divisor == 0) {
3340 __ B(slow_path->GetEntryLabel());
3341 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003342 // A division by a non-null constant is valid. We don't need to perform
3343 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003344 }
3345 } else {
3346 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3347 }
3348}
3349
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003350void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3351 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003352 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003353 locations->SetOut(Location::ConstantLocation(constant));
3354}
3355
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003356void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3357 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003358 // Will be generated at use site.
3359}
3360
Alexandre Rames5319def2014-10-23 10:03:10 +01003361void LocationsBuilderARM64::VisitExit(HExit* exit) {
3362 exit->SetLocations(nullptr);
3363}
3364
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003365void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003366}
3367
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003368void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3369 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003370 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003371 locations->SetOut(Location::ConstantLocation(constant));
3372}
3373
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003374void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003375 // Will be generated at use site.
3376}
3377
David Brazdilfc6a86a2015-06-26 10:33:45 +00003378void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003379 if (successor->IsExitBlock()) {
3380 DCHECK(got->GetPrevious()->AlwaysThrows());
3381 return; // no code needed
3382 }
3383
Serban Constantinescu02164b32014-11-13 14:05:07 +00003384 HBasicBlock* block = got->GetBlock();
3385 HInstruction* previous = got->GetPrevious();
3386 HLoopInformation* info = block->GetLoopInformation();
3387
David Brazdil46e2a392015-03-16 17:31:52 +00003388 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray8d728322018-01-18 22:44:32 +00003389 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
3390 UseScratchRegisterScope temps(GetVIXLAssembler());
3391 Register temp1 = temps.AcquireX();
3392 Register temp2 = temps.AcquireX();
3393 __ Ldr(temp1, MemOperand(sp, 0));
3394 __ Ldrh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3395 __ Add(temp2, temp2, 1);
3396 __ Strh(temp2, MemOperand(temp1, ArtMethod::HotnessCountOffset().Int32Value()));
3397 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003398 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3399 return;
3400 }
3401 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3402 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01003403 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003404 }
3405 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003406 __ B(codegen_->GetLabelOf(successor));
3407 }
3408}
3409
David Brazdilfc6a86a2015-06-26 10:33:45 +00003410void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3411 got->SetLocations(nullptr);
3412}
3413
3414void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3415 HandleGoto(got, got->GetSuccessor());
3416}
3417
3418void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3419 try_boundary->SetLocations(nullptr);
3420}
3421
3422void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3423 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3424 if (!successor->IsExitBlock()) {
3425 HandleGoto(try_boundary, successor);
3426 }
3427}
3428
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003429void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003430 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003431 vixl::aarch64::Label* true_target,
3432 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003433 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003434
David Brazdil0debae72015-11-12 18:37:00 +00003435 if (true_target == nullptr && false_target == nullptr) {
3436 // Nothing to do. The code always falls through.
3437 return;
3438 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003439 // Constant condition, statically compared against "true" (integer value 1).
3440 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003441 if (true_target != nullptr) {
3442 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003443 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003444 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003445 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003446 if (false_target != nullptr) {
3447 __ B(false_target);
3448 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003449 }
David Brazdil0debae72015-11-12 18:37:00 +00003450 return;
3451 }
3452
3453 // The following code generates these patterns:
3454 // (1) true_target == nullptr && false_target != nullptr
3455 // - opposite condition true => branch to false_target
3456 // (2) true_target != nullptr && false_target == nullptr
3457 // - condition true => branch to true_target
3458 // (3) true_target != nullptr && false_target != nullptr
3459 // - condition true => branch to true_target
3460 // - branch to false_target
3461 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003462 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003463 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003464 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003465 if (true_target == nullptr) {
3466 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3467 } else {
3468 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3469 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003470 } else {
3471 // The condition instruction has not been materialized, use its inputs as
3472 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003473 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003474
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003475 DataType::Type type = condition->InputAt(0)->GetType();
3476 if (DataType::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003477 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003478 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003479 IfCondition opposite_condition = condition->GetOppositeCondition();
3480 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003481 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003482 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003483 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003484 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003485 // Integer cases.
3486 Register lhs = InputRegisterAt(condition, 0);
3487 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003488
3489 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003490 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003491 if (true_target == nullptr) {
3492 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3493 non_fallthrough_target = false_target;
3494 } else {
3495 arm64_cond = ARM64Condition(condition->GetCondition());
3496 non_fallthrough_target = true_target;
3497 }
3498
Aart Bik086d27e2016-01-20 17:02:00 -08003499 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003500 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003501 switch (arm64_cond) {
3502 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003503 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003504 break;
3505 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003506 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003507 break;
3508 case lt:
3509 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003510 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003511 break;
3512 case ge:
3513 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003514 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003515 break;
3516 default:
3517 // Without the `static_cast` the compiler throws an error for
3518 // `-Werror=sign-promo`.
3519 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3520 }
3521 } else {
3522 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003523 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003524 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003525 }
3526 }
David Brazdil0debae72015-11-12 18:37:00 +00003527
3528 // If neither branch falls through (case 3), the conditional branch to `true_target`
3529 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3530 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003531 __ B(false_target);
3532 }
3533}
3534
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003535void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003536 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003537 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003538 locations->SetInAt(0, Location::RequiresRegister());
3539 }
3540}
3541
3542void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003543 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3544 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003545 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3546 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3547 true_target = nullptr;
3548 }
3549 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3550 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3551 false_target = nullptr;
3552 }
David Brazdil0debae72015-11-12 18:37:00 +00003553 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003554}
3555
3556void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003557 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003558 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01003559 InvokeRuntimeCallingConvention calling_convention;
3560 RegisterSet caller_saves = RegisterSet::Empty();
3561 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
3562 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00003563 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003564 locations->SetInAt(0, Location::RequiresRegister());
3565 }
3566}
3567
3568void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003569 SlowPathCodeARM64* slow_path =
3570 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003571 GenerateTestAndBranch(deoptimize,
3572 /* condition_input_index */ 0,
3573 slow_path->GetEntryLabel(),
3574 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003575}
3576
Mingyao Yang063fc772016-08-02 11:02:54 -07003577void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003578 LocationSummary* locations = new (GetGraph()->GetAllocator())
Mingyao Yang063fc772016-08-02 11:02:54 -07003579 LocationSummary(flag, LocationSummary::kNoCall);
3580 locations->SetOut(Location::RequiresRegister());
3581}
3582
3583void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3584 __ Ldr(OutputRegister(flag),
3585 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3586}
3587
David Brazdilc0b601b2016-02-08 14:20:45 +00003588static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3589 return condition->IsCondition() &&
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003590 DataType::IsFloatingPointType(condition->InputAt(0)->GetType());
David Brazdilc0b601b2016-02-08 14:20:45 +00003591}
3592
Alexandre Rames880f1192016-06-13 16:04:50 +01003593static inline Condition GetConditionForSelect(HCondition* condition) {
3594 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003595 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3596 : ARM64Condition(cond);
3597}
3598
David Brazdil74eb1b22015-12-14 11:44:01 +00003599void LocationsBuilderARM64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003600 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003601 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003602 locations->SetInAt(0, Location::RequiresFpuRegister());
3603 locations->SetInAt(1, Location::RequiresFpuRegister());
Donghui Bai426b49c2016-11-08 14:55:38 +08003604 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames880f1192016-06-13 16:04:50 +01003605 } else {
3606 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3607 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3608 bool is_true_value_constant = cst_true_value != nullptr;
3609 bool is_false_value_constant = cst_false_value != nullptr;
3610 // Ask VIXL whether we should synthesize constants in registers.
3611 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3612 Operand true_op = is_true_value_constant ?
3613 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3614 Operand false_op = is_false_value_constant ?
3615 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3616 bool true_value_in_register = false;
3617 bool false_value_in_register = false;
3618 MacroAssembler::GetCselSynthesisInformation(
3619 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3620 true_value_in_register |= !is_true_value_constant;
3621 false_value_in_register |= !is_false_value_constant;
3622
3623 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3624 : Location::ConstantLocation(cst_true_value));
3625 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3626 : Location::ConstantLocation(cst_false_value));
Donghui Bai426b49c2016-11-08 14:55:38 +08003627 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
David Brazdil74eb1b22015-12-14 11:44:01 +00003628 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003629
David Brazdil74eb1b22015-12-14 11:44:01 +00003630 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3631 locations->SetInAt(2, Location::RequiresRegister());
3632 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003633}
3634
3635void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003636 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003637 Condition csel_cond;
3638
3639 if (IsBooleanValueOrMaterializedCondition(cond)) {
3640 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003641 // Use the condition flags set by the previous instruction.
3642 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003643 } else {
3644 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003645 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003646 }
3647 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003648 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003649 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003650 } else {
3651 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003652 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003653 }
3654
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003655 if (DataType::IsFloatingPointType(select->GetType())) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003656 __ Fcsel(OutputFPRegister(select),
3657 InputFPRegisterAt(select, 1),
3658 InputFPRegisterAt(select, 0),
3659 csel_cond);
3660 } else {
3661 __ Csel(OutputRegister(select),
3662 InputOperandAt(select, 1),
3663 InputOperandAt(select, 0),
3664 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003665 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003666}
3667
David Srbecky0cf44932015-12-09 14:09:59 +00003668void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003669 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00003670}
3671
David Srbeckyd28f4a02016-03-14 17:14:24 +00003672void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3673 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003674}
3675
3676void CodeGeneratorARM64::GenerateNop() {
3677 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003678}
3679
Alexandre Rames5319def2014-10-23 10:03:10 +01003680void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00003681 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003682}
3683
3684void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003685 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003686}
3687
3688void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003689 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003690}
3691
3692void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003693 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003694}
3695
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003696// Temp is used for read barrier.
3697static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3698 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003699 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003700 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3701 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3702 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3703 return 1;
3704 }
3705 return 0;
3706}
3707
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003708// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003709// interface pointer, one for loading the current interface.
3710// The other checks have one temp for loading the object's class.
3711static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3712 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3713 return 3;
3714 }
3715 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003716}
3717
Alexandre Rames67555f72014-11-18 10:55:16 +00003718void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003719 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003720 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003721 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003722 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003723 case TypeCheckKind::kExactCheck:
3724 case TypeCheckKind::kAbstractClassCheck:
3725 case TypeCheckKind::kClassHierarchyCheck:
Vladimir Marko87584542017-12-12 17:47:52 +00003726 case TypeCheckKind::kArrayObjectCheck: {
3727 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
3728 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
3729 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003730 break;
Vladimir Marko87584542017-12-12 17:47:52 +00003731 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003732 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003733 case TypeCheckKind::kUnresolvedCheck:
3734 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003735 call_kind = LocationSummary::kCallOnSlowPath;
3736 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00003737 case TypeCheckKind::kBitstringCheck:
3738 break;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003739 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003740
Vladimir Markoca6fff82017-10-03 14:49:14 +01003741 LocationSummary* locations =
3742 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003743 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003744 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003745 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003746 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00003747 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
3748 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
3749 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
3750 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
3751 } else {
3752 locations->SetInAt(1, Location::RequiresRegister());
3753 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003754 // The "out" register is used as a temporary, so it overlaps with the inputs.
3755 // Note that TypeCheckSlowPathARM64 uses this register too.
3756 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003757 // Add temps if necessary for read barriers.
3758 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003759}
3760
3761void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003762 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003763 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003764 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003765 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00003766 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
3767 ? Register()
3768 : InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003769 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003770 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003771 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3772 DCHECK_LE(num_temps, 1u);
3773 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003774 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3775 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3776 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3777 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003778
Scott Wakeling97c72b72016-06-24 16:19:36 +01003779 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003780 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003781
3782 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003783 // Avoid null check if we know `obj` is not null.
3784 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003785 __ Cbz(obj, &zero);
3786 }
3787
Roland Levillain44015862016-01-22 11:47:17 +00003788 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003789 case TypeCheckKind::kExactCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003790 ReadBarrierOption read_barrier_option =
3791 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003792 // /* HeapReference<Class> */ out = obj->klass_
3793 GenerateReferenceLoadTwoRegisters(instruction,
3794 out_loc,
3795 obj_loc,
3796 class_offset,
3797 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003798 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003799 __ Cmp(out, cls);
3800 __ Cset(out, eq);
3801 if (zero.IsLinked()) {
3802 __ B(&done);
3803 }
3804 break;
3805 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003806
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003807 case TypeCheckKind::kAbstractClassCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003808 ReadBarrierOption read_barrier_option =
3809 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003810 // /* HeapReference<Class> */ out = obj->klass_
3811 GenerateReferenceLoadTwoRegisters(instruction,
3812 out_loc,
3813 obj_loc,
3814 class_offset,
3815 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003816 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003817 // If the class is abstract, we eagerly fetch the super class of the
3818 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003819 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003820 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003821 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003822 GenerateReferenceLoadOneRegister(instruction,
3823 out_loc,
3824 super_offset,
3825 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003826 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003827 // If `out` is null, we use it for the result, and jump to `done`.
3828 __ Cbz(out, &done);
3829 __ Cmp(out, cls);
3830 __ B(ne, &loop);
3831 __ Mov(out, 1);
3832 if (zero.IsLinked()) {
3833 __ B(&done);
3834 }
3835 break;
3836 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003837
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003838 case TypeCheckKind::kClassHierarchyCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003839 ReadBarrierOption read_barrier_option =
3840 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003841 // /* HeapReference<Class> */ out = obj->klass_
3842 GenerateReferenceLoadTwoRegisters(instruction,
3843 out_loc,
3844 obj_loc,
3845 class_offset,
3846 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003847 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003848 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003849 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003850 __ Bind(&loop);
3851 __ Cmp(out, cls);
3852 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003853 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003854 GenerateReferenceLoadOneRegister(instruction,
3855 out_loc,
3856 super_offset,
3857 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003858 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003859 __ Cbnz(out, &loop);
3860 // If `out` is null, we use it for the result, and jump to `done`.
3861 __ B(&done);
3862 __ Bind(&success);
3863 __ Mov(out, 1);
3864 if (zero.IsLinked()) {
3865 __ B(&done);
3866 }
3867 break;
3868 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003869
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003870 case TypeCheckKind::kArrayObjectCheck: {
Vladimir Marko87584542017-12-12 17:47:52 +00003871 ReadBarrierOption read_barrier_option =
3872 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003873 // /* HeapReference<Class> */ out = obj->klass_
3874 GenerateReferenceLoadTwoRegisters(instruction,
3875 out_loc,
3876 obj_loc,
3877 class_offset,
3878 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003879 read_barrier_option);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003880 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003881 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003882 __ Cmp(out, cls);
3883 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003884 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003885 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003886 GenerateReferenceLoadOneRegister(instruction,
3887 out_loc,
3888 component_offset,
3889 maybe_temp_loc,
Vladimir Marko87584542017-12-12 17:47:52 +00003890 read_barrier_option);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003891 // If `out` is null, we use it for the result, and jump to `done`.
3892 __ Cbz(out, &done);
3893 __ Ldrh(out, HeapOperand(out, primitive_offset));
3894 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3895 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003896 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003897 __ Mov(out, 1);
3898 __ B(&done);
3899 break;
3900 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003901
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003902 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003903 // No read barrier since the slow path will retry upon failure.
3904 // /* HeapReference<Class> */ out = obj->klass_
3905 GenerateReferenceLoadTwoRegisters(instruction,
3906 out_loc,
3907 obj_loc,
3908 class_offset,
3909 maybe_temp_loc,
3910 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003911 __ Cmp(out, cls);
3912 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01003913 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
3914 instruction, /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003915 codegen_->AddSlowPath(slow_path);
3916 __ B(ne, slow_path->GetEntryLabel());
3917 __ Mov(out, 1);
3918 if (zero.IsLinked()) {
3919 __ B(&done);
3920 }
3921 break;
3922 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003923
Calin Juravle98893e12015-10-02 21:05:03 +01003924 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003925 case TypeCheckKind::kInterfaceCheck: {
3926 // Note that we indeed only call on slow path, but we always go
3927 // into the slow path for the unresolved and interface check
3928 // cases.
3929 //
3930 // We cannot directly call the InstanceofNonTrivial runtime
3931 // entry point without resorting to a type checking slow path
3932 // here (i.e. by calling InvokeRuntime directly), as it would
3933 // require to assign fixed registers for the inputs of this
3934 // HInstanceOf instruction (following the runtime calling
3935 // convention), which might be cluttered by the potential first
3936 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003937 //
3938 // TODO: Introduce a new runtime entry point taking the object
3939 // to test (instead of its class) as argument, and let it deal
3940 // with the read barrier issues. This will let us refactor this
3941 // case of the `switch` code as it was previously (with a direct
3942 // call to the runtime not using a type checking slow path).
3943 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003944 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01003945 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
3946 instruction, /* is_fatal */ false);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003947 codegen_->AddSlowPath(slow_path);
3948 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003949 if (zero.IsLinked()) {
3950 __ B(&done);
3951 }
3952 break;
3953 }
Vladimir Marko175e7862018-03-27 09:03:13 +00003954
3955 case TypeCheckKind::kBitstringCheck: {
3956 // /* HeapReference<Class> */ temp = obj->klass_
3957 GenerateReferenceLoadTwoRegisters(instruction,
3958 out_loc,
3959 obj_loc,
3960 class_offset,
3961 maybe_temp_loc,
3962 kWithoutReadBarrier);
3963
3964 GenerateBitstringTypeCheckCompare(instruction, out);
3965 __ Cset(out, eq);
3966 if (zero.IsLinked()) {
3967 __ B(&done);
3968 }
3969 break;
3970 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003971 }
3972
3973 if (zero.IsLinked()) {
3974 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003975 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003976 }
3977
3978 if (done.IsLinked()) {
3979 __ Bind(&done);
3980 }
3981
3982 if (slow_path != nullptr) {
3983 __ Bind(slow_path->GetExitLabel());
3984 }
3985}
3986
3987void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003988 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko87584542017-12-12 17:47:52 +00003989 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01003990 LocationSummary* locations =
3991 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003992 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00003993 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
3994 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
3995 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
3996 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
3997 } else {
3998 locations->SetInAt(1, Location::RequiresRegister());
3999 }
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004000 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
4001 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004002}
4003
4004void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00004005 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004006 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004007 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004008 Register obj = InputRegisterAt(instruction, 0);
Vladimir Marko175e7862018-03-27 09:03:13 +00004009 Register cls = (type_check_kind == TypeCheckKind::kBitstringCheck)
4010 ? Register()
4011 : InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004012 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
4013 DCHECK_GE(num_temps, 1u);
4014 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004015 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004016 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
4017 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004018 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004019 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4020 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4021 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
4022 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
4023 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
4024 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
4025 const uint32_t object_array_data_offset =
4026 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004027
Vladimir Marko87584542017-12-12 17:47:52 +00004028 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004029 SlowPathCodeARM64* type_check_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01004030 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathARM64(
4031 instruction, is_type_check_slow_path_fatal);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004032 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004033
Scott Wakeling97c72b72016-06-24 16:19:36 +01004034 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004035 // Avoid null check if we know obj is not null.
4036 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01004037 __ Cbz(obj, &done);
4038 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004039
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004040 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004041 case TypeCheckKind::kExactCheck:
4042 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004043 // /* HeapReference<Class> */ temp = obj->klass_
4044 GenerateReferenceLoadTwoRegisters(instruction,
4045 temp_loc,
4046 obj_loc,
4047 class_offset,
4048 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004049 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004050
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004051 __ Cmp(temp, cls);
4052 // Jump to slow path for throwing the exception or doing a
4053 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004054 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004055 break;
4056 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004057
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004058 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004059 // /* HeapReference<Class> */ temp = obj->klass_
4060 GenerateReferenceLoadTwoRegisters(instruction,
4061 temp_loc,
4062 obj_loc,
4063 class_offset,
4064 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004065 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004066
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004067 // If the class is abstract, we eagerly fetch the super class of the
4068 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004069 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004070 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004071 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004072 GenerateReferenceLoadOneRegister(instruction,
4073 temp_loc,
4074 super_offset,
4075 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004076 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004077
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004078 // If the class reference currently in `temp` is null, jump to the slow path to throw the
4079 // exception.
4080 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
4081 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004082 __ Cmp(temp, cls);
4083 __ B(ne, &loop);
4084 break;
4085 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004086
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004087 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004088 // /* HeapReference<Class> */ temp = obj->klass_
4089 GenerateReferenceLoadTwoRegisters(instruction,
4090 temp_loc,
4091 obj_loc,
4092 class_offset,
4093 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004094 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004095
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004096 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004097 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004098 __ Bind(&loop);
4099 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004100 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004101
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004102 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004103 GenerateReferenceLoadOneRegister(instruction,
4104 temp_loc,
4105 super_offset,
4106 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004107 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004108
4109 // If the class reference currently in `temp` is not null, jump
4110 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004111 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004112 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004113 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004114 break;
4115 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004116
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004117 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004118 // /* HeapReference<Class> */ temp = obj->klass_
4119 GenerateReferenceLoadTwoRegisters(instruction,
4120 temp_loc,
4121 obj_loc,
4122 class_offset,
4123 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004124 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004125
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01004126 // Do an exact check.
4127 __ Cmp(temp, cls);
4128 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004129
4130 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004131 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08004132 GenerateReferenceLoadOneRegister(instruction,
4133 temp_loc,
4134 component_offset,
4135 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004136 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004137
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004138 // If the component type is null, jump to the slow path to throw the exception.
4139 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
4140 // Otherwise, the object is indeed an array. Further check that this component type is not a
4141 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004142 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
4143 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08004144 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004145 break;
4146 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004147
Calin Juravle98893e12015-10-02 21:05:03 +01004148 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004149 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004150 //
4151 // We cannot directly call the CheckCast runtime entry point
4152 // without resorting to a type checking slow path here (i.e. by
4153 // calling InvokeRuntime directly), as it would require to
4154 // assign fixed registers for the inputs of this HInstanceOf
4155 // instruction (following the runtime calling convention), which
4156 // might be cluttered by the potential first read barrier
4157 // emission at the beginning of this method.
4158 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004159 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004160 case TypeCheckKind::kInterfaceCheck: {
4161 // /* HeapReference<Class> */ temp = obj->klass_
4162 GenerateReferenceLoadTwoRegisters(instruction,
4163 temp_loc,
4164 obj_loc,
4165 class_offset,
4166 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004167 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004168
4169 // /* HeapReference<Class> */ temp = temp->iftable_
4170 GenerateReferenceLoadTwoRegisters(instruction,
4171 temp_loc,
4172 temp_loc,
4173 iftable_offset,
4174 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004175 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08004176 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004177 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08004178 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004179 vixl::aarch64::Label start_loop;
4180 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004181 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004182 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
4183 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004184 // Go to next interface.
4185 __ Add(temp, temp, 2 * kHeapReferenceSize);
4186 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08004187 // Compare the classes and continue the loop if they do not match.
4188 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
4189 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07004190 break;
4191 }
Vladimir Marko175e7862018-03-27 09:03:13 +00004192
4193 case TypeCheckKind::kBitstringCheck: {
4194 // /* HeapReference<Class> */ temp = obj->klass_
4195 GenerateReferenceLoadTwoRegisters(instruction,
4196 temp_loc,
4197 obj_loc,
4198 class_offset,
4199 maybe_temp2_loc,
4200 kWithoutReadBarrier);
4201
4202 GenerateBitstringTypeCheckCompare(instruction, temp);
4203 __ B(ne, type_check_slow_path->GetEntryLabel());
4204 break;
4205 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004206 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00004207 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00004208
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004209 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004210}
4211
Alexandre Rames5319def2014-10-23 10:03:10 +01004212void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004213 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01004214 locations->SetOut(Location::ConstantLocation(constant));
4215}
4216
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004217void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004218 // Will be generated at use site.
4219}
4220
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004221void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004222 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004223 locations->SetOut(Location::ConstantLocation(constant));
4224}
4225
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004226void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004227 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004228}
4229
Calin Juravle175dc732015-08-25 15:42:32 +01004230void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4231 // The trampoline uses the same calling convention as dex calling conventions,
4232 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
4233 // the method_idx.
4234 HandleInvoke(invoke);
4235}
4236
4237void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4238 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004239 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Calin Juravle175dc732015-08-25 15:42:32 +01004240}
4241
Alexandre Rames5319def2014-10-23 10:03:10 +01004242void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01004243 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01004244 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01004245}
4246
Alexandre Rames67555f72014-11-18 10:55:16 +00004247void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4248 HandleInvoke(invoke);
4249}
4250
4251void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4252 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004253 LocationSummary* locations = invoke->GetLocations();
4254 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004255 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00004256 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004257 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00004258
4259 // The register ip1 is required to be used for the hidden argument in
4260 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01004261 MacroAssembler* masm = GetVIXLAssembler();
4262 UseScratchRegisterScope scratch_scope(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00004263 scratch_scope.Exclude(ip1);
4264 __ Mov(ip1, invoke->GetDexMethodIndex());
4265
Artem Serov914d7a82017-02-07 14:33:49 +00004266 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
Alexandre Rames67555f72014-11-18 10:55:16 +00004267 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004268 __ Ldr(temp.W(), StackOperandFrom(receiver));
Artem Serov914d7a82017-02-07 14:33:49 +00004269 {
4270 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4271 // /* HeapReference<Class> */ temp = temp->klass_
4272 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
4273 codegen_->MaybeRecordImplicitNullCheck(invoke);
4274 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004275 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00004276 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004277 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07004278 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Artem Serov914d7a82017-02-07 14:33:49 +00004279 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00004280 }
Artem Serov914d7a82017-02-07 14:33:49 +00004281
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004282 // Instead of simply (possibly) unpoisoning `temp` here, we should
4283 // emit a read barrier for the previous class reference load.
4284 // However this is not required in practice, as this is an
4285 // intermediate/temporary reference and because the current
4286 // concurrent copying collector keeps the from-space memory
4287 // intact/accessible until the end of the marking phase (the
4288 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01004289 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004290 __ Ldr(temp,
4291 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
4292 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004293 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00004294 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004295 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00004296 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07004297 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004298
4299 {
4300 // Ensure the pc position is recorded immediately after the `blr` instruction.
4301 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4302
4303 // lr();
4304 __ blr(lr);
4305 DCHECK(!codegen_->IsLeafMethod());
4306 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4307 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004308
4309 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00004310}
4311
4312void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004313 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004314 if (intrinsic.TryDispatch(invoke)) {
4315 return;
4316 }
4317
Alexandre Rames67555f72014-11-18 10:55:16 +00004318 HandleInvoke(invoke);
4319}
4320
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00004321void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004322 // Explicit clinit checks triggered by static invokes must have been pruned by
4323 // art::PrepareForRegisterAllocation.
4324 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004325
Vladimir Markoca6fff82017-10-03 14:49:14 +01004326 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetAllocator(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004327 if (intrinsic.TryDispatch(invoke)) {
4328 return;
4329 }
4330
Alexandre Rames67555f72014-11-18 10:55:16 +00004331 HandleInvoke(invoke);
4332}
4333
Andreas Gampe878d58c2015-01-15 23:24:00 -08004334static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
4335 if (invoke->GetLocations()->Intrinsified()) {
4336 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
4337 intrinsic.Dispatch(invoke);
4338 return true;
4339 }
4340 return false;
4341}
4342
Vladimir Markodc151b22015-10-15 18:02:30 +01004343HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
4344 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004345 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00004346 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01004347 return desired_dispatch_info;
4348}
4349
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004350void CodeGeneratorARM64::GenerateStaticOrDirectCall(
4351 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004352 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004353 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4354 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004355 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4356 uint32_t offset =
4357 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004358 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004359 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004360 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004361 }
Vladimir Marko58155012015-08-19 12:49:41 +00004362 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004363 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004364 break;
Vladimir Marko65979462017-05-19 17:25:12 +01004365 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
4366 DCHECK(GetCompilerOptions().IsBootImage());
4367 // Add ADRP with its PC-relative method patch.
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004368 vixl::aarch64::Label* adrp_label = NewBootImageMethodPatch(invoke->GetTargetMethod());
Vladimir Marko65979462017-05-19 17:25:12 +01004369 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4370 // Add ADD with its PC-relative method patch.
4371 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004372 NewBootImageMethodPatch(invoke->GetTargetMethod(), adrp_label);
Vladimir Marko65979462017-05-19 17:25:12 +01004373 EmitAddPlaceholder(add_label, XRegisterFrom(temp), XRegisterFrom(temp));
4374 break;
4375 }
Vladimir Markob066d432018-01-03 13:14:37 +00004376 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
4377 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004378 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00004379 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_offset);
4380 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
4381 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
4382 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_offset, adrp_label);
4383 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
4384 EmitLdrOffsetPlaceholder(ldr_label, WRegisterFrom(temp), XRegisterFrom(temp));
4385 break;
4386 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004387 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Vladimir Markob066d432018-01-03 13:14:37 +00004388 // Add ADRP with its PC-relative .bss entry patch.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004389 MethodReference target_method(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex());
4390 vixl::aarch64::Label* adrp_label = NewMethodBssEntryPatch(target_method);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004391 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Markob066d432018-01-03 13:14:37 +00004392 // Add LDR with its PC-relative .bss entry patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004393 vixl::aarch64::Label* ldr_label =
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004394 NewMethodBssEntryPatch(target_method, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004395 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004396 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004397 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004398 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
4399 // Load method address from literal pool.
4400 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
4401 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004402 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
4403 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
4404 return; // No code pointer retrieval; the runtime performs the call directly.
Vladimir Marko58155012015-08-19 12:49:41 +00004405 }
4406 }
4407
4408 switch (invoke->GetCodePtrLocation()) {
4409 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004410 {
4411 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
4412 ExactAssemblyScope eas(GetVIXLAssembler(),
4413 kInstructionSize,
4414 CodeBufferCheckScope::kExactSize);
4415 __ bl(&frame_entry_label_);
4416 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
4417 }
Vladimir Marko58155012015-08-19 12:49:41 +00004418 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004419 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4420 // LR = callee_method->entry_point_from_quick_compiled_code_;
4421 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004422 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004423 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004424 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004425 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004426 ExactAssemblyScope eas(GetVIXLAssembler(),
4427 kInstructionSize,
4428 CodeBufferCheckScope::kExactSize);
4429 // lr()
4430 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004431 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004432 }
Vladimir Marko58155012015-08-19 12:49:41 +00004433 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004434 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004435
Andreas Gampe878d58c2015-01-15 23:24:00 -08004436 DCHECK(!IsLeafMethod());
4437}
4438
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004439void CodeGeneratorARM64::GenerateVirtualCall(
4440 HInvokeVirtual* invoke, Location temp_in, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004441 // Use the calling convention instead of the location of the receiver, as
4442 // intrinsics may have put the receiver in a different register. In the intrinsics
4443 // slow path, the arguments have been moved to the right place, so here we are
4444 // guaranteed that the receiver is the first register of the calling convention.
4445 InvokeDexCallingConvention calling_convention;
4446 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004447 Register temp = XRegisterFrom(temp_in);
4448 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4449 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4450 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004451 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004452
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004453 DCHECK(receiver.IsRegister());
Artem Serov914d7a82017-02-07 14:33:49 +00004454
4455 {
4456 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4457 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4458 // /* HeapReference<Class> */ temp = receiver->klass_
4459 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
4460 MaybeRecordImplicitNullCheck(invoke);
4461 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004462 // Instead of simply (possibly) unpoisoning `temp` here, we should
4463 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004464 // intermediate/temporary reference and because the current
4465 // concurrent copying collector keeps the from-space memory
4466 // intact/accessible until the end of the marking phase (the
4467 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004468 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
4469 // temp = temp->GetMethodAt(method_offset);
4470 __ Ldr(temp, MemOperand(temp, method_offset));
4471 // lr = temp->GetEntryPoint();
4472 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
Artem Serov914d7a82017-02-07 14:33:49 +00004473 {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004474 // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
Artem Serov914d7a82017-02-07 14:33:49 +00004475 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4476 // lr();
4477 __ blr(lr);
Vladimir Markoe7197bf2017-06-02 17:00:23 +01004478 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00004479 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004480}
4481
Orion Hodsonac141392017-01-13 11:53:47 +00004482void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4483 HandleInvoke(invoke);
4484}
4485
4486void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4487 codegen_->GenerateInvokePolymorphicCall(invoke);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004488 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Orion Hodsonac141392017-01-13 11:53:47 +00004489}
4490
Orion Hodson4c8e12e2018-05-18 08:33:20 +01004491void LocationsBuilderARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4492 HandleInvoke(invoke);
4493}
4494
4495void InstructionCodeGeneratorARM64::VisitInvokeCustom(HInvokeCustom* invoke) {
4496 codegen_->GenerateInvokeCustomCall(invoke);
4497 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
4498}
4499
Vladimir Marko6fd16062018-06-26 11:02:04 +01004500vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageIntrinsicPatch(
4501 uint32_t intrinsic_data,
4502 vixl::aarch64::Label* adrp_label) {
4503 return NewPcRelativePatch(
4504 /* dex_file */ nullptr, intrinsic_data, adrp_label, &boot_image_intrinsic_patches_);
4505}
4506
Vladimir Markob066d432018-01-03 13:14:37 +00004507vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageRelRoPatch(
4508 uint32_t boot_image_offset,
4509 vixl::aarch64::Label* adrp_label) {
4510 return NewPcRelativePatch(
4511 /* dex_file */ nullptr, boot_image_offset, adrp_label, &boot_image_method_patches_);
4512}
4513
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004514vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageMethodPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004515 MethodReference target_method,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004516 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004517 return NewPcRelativePatch(
4518 target_method.dex_file, target_method.index, adrp_label, &boot_image_method_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004519}
4520
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004521vixl::aarch64::Label* CodeGeneratorARM64::NewMethodBssEntryPatch(
4522 MethodReference target_method,
4523 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004524 return NewPcRelativePatch(
4525 target_method.dex_file, target_method.index, adrp_label, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004526}
4527
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004528vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageTypePatch(
Scott Wakeling97c72b72016-06-24 16:19:36 +01004529 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004530 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004531 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004532 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &boot_image_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004533}
4534
Vladimir Marko1998cd02017-01-13 13:02:58 +00004535vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4536 const DexFile& dex_file,
4537 dex::TypeIndex type_index,
4538 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004539 return NewPcRelativePatch(&dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004540}
4541
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004542vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageStringPatch(
Vladimir Marko65979462017-05-19 17:25:12 +01004543 const DexFile& dex_file,
4544 dex::StringIndex string_index,
4545 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004546 return NewPcRelativePatch(
4547 &dex_file, string_index.index_, adrp_label, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01004548}
4549
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004550vixl::aarch64::Label* CodeGeneratorARM64::NewStringBssEntryPatch(
4551 const DexFile& dex_file,
4552 dex::StringIndex string_index,
4553 vixl::aarch64::Label* adrp_label) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004554 return NewPcRelativePatch(&dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004555}
4556
Vladimir Marko966b46f2018-08-03 10:20:19 +00004557void CodeGeneratorARM64::EmitBakerReadBarrierCbnz(uint32_t custom_data) {
4558 ExactAssemblyScope guard(GetVIXLAssembler(), 1 * vixl::aarch64::kInstructionSize);
4559 if (Runtime::Current()->UseJitCompilation()) {
4560 auto it = jit_baker_read_barrier_slow_paths_.FindOrAdd(custom_data);
4561 vixl::aarch64::Label* slow_path_entry = &it->second.label;
4562 __ cbnz(mr, slow_path_entry);
4563 } else {
4564 baker_read_barrier_patches_.emplace_back(custom_data);
4565 vixl::aarch64::Label* cbnz_label = &baker_read_barrier_patches_.back().label;
4566 __ bind(cbnz_label);
4567 __ cbnz(mr, static_cast<int64_t>(0)); // Placeholder, patched at link-time.
4568 }
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004569}
4570
Scott Wakeling97c72b72016-06-24 16:19:36 +01004571vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004572 const DexFile* dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004573 uint32_t offset_or_index,
4574 vixl::aarch64::Label* adrp_label,
4575 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004576 // Add a patch entry and return the label.
4577 patches->emplace_back(dex_file, offset_or_index);
4578 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004579 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004580 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4581 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4582 return label;
4583}
4584
Scott Wakeling97c72b72016-06-24 16:19:36 +01004585vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4586 uint64_t address) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004587 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004588}
4589
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004590vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004591 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004592 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004593 return jit_string_patches_.GetOrCreate(
4594 StringReference(&dex_file, string_index),
4595 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4596}
4597
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004598vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004599 const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01004600 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004601 return jit_class_patches_.GetOrCreate(
4602 TypeReference(&dex_file, type_index),
4603 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4604}
4605
Vladimir Markoaad75c62016-10-03 08:46:48 +00004606void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4607 vixl::aarch64::Register reg) {
4608 DCHECK(reg.IsX());
4609 SingleEmissionCheckScope guard(GetVIXLAssembler());
4610 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004611 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004612}
4613
4614void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4615 vixl::aarch64::Register out,
4616 vixl::aarch64::Register base) {
4617 DCHECK(out.IsX());
4618 DCHECK(base.IsX());
4619 SingleEmissionCheckScope guard(GetVIXLAssembler());
4620 __ Bind(fixup_label);
4621 __ add(out, base, Operand(/* offset placeholder */ 0));
4622}
4623
4624void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4625 vixl::aarch64::Register out,
4626 vixl::aarch64::Register base) {
4627 DCHECK(base.IsX());
4628 SingleEmissionCheckScope guard(GetVIXLAssembler());
4629 __ Bind(fixup_label);
4630 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4631}
4632
Vladimir Markoeebb8212018-06-05 14:57:24 +01004633void CodeGeneratorARM64::LoadBootImageAddress(vixl::aarch64::Register reg,
Vladimir Marko6fd16062018-06-26 11:02:04 +01004634 uint32_t boot_image_reference) {
4635 if (GetCompilerOptions().IsBootImage()) {
4636 // Add ADRP with its PC-relative type patch.
4637 vixl::aarch64::Label* adrp_label = NewBootImageIntrinsicPatch(boot_image_reference);
4638 EmitAdrpPlaceholder(adrp_label, reg.X());
4639 // Add ADD with its PC-relative type patch.
4640 vixl::aarch64::Label* add_label = NewBootImageIntrinsicPatch(boot_image_reference, adrp_label);
4641 EmitAddPlaceholder(add_label, reg.X(), reg.X());
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004642 } else if (Runtime::Current()->IsAotCompiler()) {
Vladimir Markoeebb8212018-06-05 14:57:24 +01004643 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004644 vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004645 EmitAdrpPlaceholder(adrp_label, reg.X());
4646 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6fd16062018-06-26 11:02:04 +01004647 vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_reference, adrp_label);
Vladimir Markoeebb8212018-06-05 14:57:24 +01004648 EmitLdrOffsetPlaceholder(ldr_label, reg.W(), reg.X());
4649 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004650 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01004651 gc::Heap* heap = Runtime::Current()->GetHeap();
4652 DCHECK(!heap->GetBootImageSpaces().empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004653 const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference;
Vladimir Markoeebb8212018-06-05 14:57:24 +01004654 __ Ldr(reg.W(), DeduplicateBootImageAddressLiteral(reinterpret_cast<uintptr_t>(address)));
4655 }
4656}
4657
Vladimir Marko6fd16062018-06-26 11:02:04 +01004658void CodeGeneratorARM64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
4659 uint32_t boot_image_offset) {
4660 DCHECK(invoke->IsStatic());
4661 InvokeRuntimeCallingConvention calling_convention;
4662 Register argument = calling_convention.GetRegisterAt(0);
4663 if (GetCompilerOptions().IsBootImage()) {
4664 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
4665 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
4666 MethodReference target_method = invoke->GetTargetMethod();
4667 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
4668 // Add ADRP with its PC-relative type patch.
4669 vixl::aarch64::Label* adrp_label = NewBootImageTypePatch(*target_method.dex_file, type_idx);
4670 EmitAdrpPlaceholder(adrp_label, argument.X());
4671 // Add ADD with its PC-relative type patch.
4672 vixl::aarch64::Label* add_label =
4673 NewBootImageTypePatch(*target_method.dex_file, type_idx, adrp_label);
4674 EmitAddPlaceholder(add_label, argument.X(), argument.X());
4675 } else {
4676 LoadBootImageAddress(argument, boot_image_offset);
4677 }
4678 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
4679 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
4680}
4681
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004682template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Vladimir Markoaad75c62016-10-03 08:46:48 +00004683inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4684 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004685 ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00004686 for (const PcRelativePatchInfo& info : infos) {
4687 linker_patches->push_back(Factory(info.label.GetLocation(),
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004688 info.target_dex_file,
Vladimir Markoaad75c62016-10-03 08:46:48 +00004689 info.pc_insn_label->GetLocation(),
4690 info.offset_or_index));
4691 }
4692}
4693
Vladimir Marko6fd16062018-06-26 11:02:04 +01004694template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
4695linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
4696 const DexFile* target_dex_file,
4697 uint32_t pc_insn_offset,
4698 uint32_t boot_image_offset) {
4699 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
4700 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00004701}
4702
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004703void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Vladimir Marko58155012015-08-19 12:49:41 +00004704 DCHECK(linker_patches->empty());
4705 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004706 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004707 method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004708 boot_image_type_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004709 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004710 boot_image_string_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01004711 string_bss_entry_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01004712 boot_image_intrinsic_patches_.size() +
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004713 baker_read_barrier_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004714 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01004715 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004716 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004717 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004718 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004719 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004720 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004721 boot_image_string_patches_, linker_patches);
Vladimir Marko6fd16062018-06-26 11:02:04 +01004722 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
4723 boot_image_intrinsic_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01004724 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +01004725 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
Vladimir Markob066d432018-01-03 13:14:37 +00004726 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004727 DCHECK(boot_image_type_patches_.empty());
4728 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01004729 DCHECK(boot_image_intrinsic_patches_.empty());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004730 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004731 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
4732 method_bss_entry_patches_, linker_patches);
4733 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
4734 type_bss_entry_patches_, linker_patches);
4735 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
4736 string_bss_entry_patches_, linker_patches);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004737 for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01004738 linker_patches->push_back(linker::LinkerPatch::BakerReadBarrierBranchPatch(
4739 info.label.GetLocation(), info.custom_data));
Vladimir Markof4f2daa2017-03-20 18:26:59 +00004740 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004741 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004742}
4743
Vladimir Markoca1e0382018-04-11 09:58:41 +00004744bool CodeGeneratorARM64::NeedsThunkCode(const linker::LinkerPatch& patch) const {
4745 return patch.GetType() == linker::LinkerPatch::Type::kBakerReadBarrierBranch ||
4746 patch.GetType() == linker::LinkerPatch::Type::kCallRelative;
4747}
4748
4749void CodeGeneratorARM64::EmitThunkCode(const linker::LinkerPatch& patch,
4750 /*out*/ ArenaVector<uint8_t>* code,
4751 /*out*/ std::string* debug_name) {
4752 Arm64Assembler assembler(GetGraph()->GetAllocator());
4753 switch (patch.GetType()) {
4754 case linker::LinkerPatch::Type::kCallRelative: {
4755 // The thunk just uses the entry point in the ArtMethod. This works even for calls
4756 // to the generic JNI and interpreter trampolines.
4757 Offset offset(ArtMethod::EntryPointFromQuickCompiledCodeOffset(
4758 kArm64PointerSize).Int32Value());
4759 assembler.JumpTo(ManagedRegister(arm64::X0), offset, ManagedRegister(arm64::IP0));
4760 if (GetCompilerOptions().GenerateAnyDebugInfo()) {
4761 *debug_name = "MethodCallThunk";
4762 }
4763 break;
4764 }
4765 case linker::LinkerPatch::Type::kBakerReadBarrierBranch: {
4766 DCHECK_EQ(patch.GetBakerCustomValue2(), 0u);
4767 CompileBakerReadBarrierThunk(assembler, patch.GetBakerCustomValue1(), debug_name);
4768 break;
4769 }
4770 default:
4771 LOG(FATAL) << "Unexpected patch type " << patch.GetType();
4772 UNREACHABLE();
4773 }
4774
4775 // Ensure we emit the literal pool if any.
4776 assembler.FinalizeCode();
4777 code->resize(assembler.CodeSize());
4778 MemoryRegion code_region(code->data(), code->size());
4779 assembler.FinalizeInstructions(code_region);
4780}
4781
Vladimir Marko0eb882b2017-05-15 13:39:18 +01004782vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value) {
4783 return uint32_literals_.GetOrCreate(
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004784 value,
4785 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4786}
4787
Scott Wakeling97c72b72016-06-24 16:19:36 +01004788vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004789 return uint64_literals_.GetOrCreate(
4790 value,
4791 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004792}
4793
Andreas Gampe878d58c2015-01-15 23:24:00 -08004794void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004795 // Explicit clinit checks triggered by static invokes must have been pruned by
4796 // art::PrepareForRegisterAllocation.
4797 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004798
Andreas Gampe878d58c2015-01-15 23:24:00 -08004799 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004800 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004801 return;
4802 }
4803
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004804 {
4805 // Ensure that between the BLR (emitted by GenerateStaticOrDirectCall) and RecordPcInfo there
4806 // are no pools emitted.
4807 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4808 LocationSummary* locations = invoke->GetLocations();
4809 codegen_->GenerateStaticOrDirectCall(
4810 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
4811 }
4812
4813 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004814}
4815
4816void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004817 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004818 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004819 return;
4820 }
4821
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004822 {
4823 // Ensure that between the BLR (emitted by GenerateVirtualCall) and RecordPcInfo there
4824 // are no pools emitted.
4825 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
4826 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
4827 DCHECK(!codegen_->IsLeafMethod());
4828 }
4829
4830 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01004831}
4832
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004833HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4834 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004835 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004836 case HLoadClass::LoadKind::kInvalid:
4837 LOG(FATAL) << "UNREACHABLE";
4838 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004839 case HLoadClass::LoadKind::kReferrersClass:
4840 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004841 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004842 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004843 case HLoadClass::LoadKind::kBssEntry:
4844 DCHECK(!Runtime::Current()->UseJitCompilation());
4845 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004846 case HLoadClass::LoadKind::kJitBootImageAddress:
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004847 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004848 DCHECK(Runtime::Current()->UseJitCompilation());
4849 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004850 case HLoadClass::LoadKind::kRuntimeCall:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004851 break;
4852 }
4853 return desired_class_load_kind;
4854}
4855
Alexandre Rames67555f72014-11-18 10:55:16 +00004856void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00004857 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004858 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004859 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00004860 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004861 cls,
4862 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00004863 LocationFrom(vixl::aarch64::x0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00004864 DCHECK(calling_convention.GetRegisterAt(0).Is(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004865 return;
4866 }
Vladimir Marko41559982017-01-06 14:04:23 +00004867 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004868
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004869 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
4870 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004871 ? LocationSummary::kCallOnSlowPath
4872 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01004873 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004874 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004875 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004876 }
4877
Vladimir Marko41559982017-01-06 14:04:23 +00004878 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004879 locations->SetInAt(0, Location::RequiresRegister());
4880 }
4881 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004882 if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
4883 if (!kUseReadBarrier || kUseBakerReadBarrier) {
4884 // Rely on the type resolution or initialization and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01004885 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004886 } else {
4887 // For non-Baker read barrier we have a temp-clobbering call.
4888 }
4889 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004890}
4891
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004892// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4893// move.
4894void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00004895 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004896 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00004897 codegen_->GenerateLoadClassRuntimeCall(cls);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01004898 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Calin Juravle580b6092015-10-06 17:35:58 +01004899 return;
4900 }
Vladimir Marko41559982017-01-06 14:04:23 +00004901 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01004902
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004903 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004904 Register out = OutputRegister(cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00004905
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004906 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
4907 ? kWithoutReadBarrier
4908 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004909 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00004910 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004911 case HLoadClass::LoadKind::kReferrersClass: {
4912 DCHECK(!cls->CanCallRuntime());
4913 DCHECK(!cls->MustGenerateClinitCheck());
4914 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4915 Register current_method = InputRegisterAt(cls, 0);
Vladimir Markoca1e0382018-04-11 09:58:41 +00004916 codegen_->GenerateGcRootFieldLoad(cls,
4917 out_loc,
4918 current_method,
4919 ArtMethod::DeclaringClassOffset().Int32Value(),
4920 /* fixup_label */ nullptr,
4921 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004922 break;
4923 }
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004924 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004925 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004926 // Add ADRP with its PC-relative type patch.
4927 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08004928 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004929 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004930 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004931 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004932 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00004933 codegen_->NewBootImageTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004934 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004935 break;
4936 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004937 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004938 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004939 uint32_t boot_image_offset = codegen_->GetBootImageOffset(cls);
4940 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
4941 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004942 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004943 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004944 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004945 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004946 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
Vladimir Marko94ec2db2017-09-06 17:21:03 +01004947 break;
4948 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004949 case HLoadClass::LoadKind::kBssEntry: {
4950 // Add ADRP with its PC-relative Class .bss entry patch.
4951 const DexFile& dex_file = cls->GetDexFile();
4952 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Markof3c52b42017-11-17 17:32:12 +00004953 vixl::aarch64::Register temp = XRegisterFrom(out_loc);
4954 vixl::aarch64::Label* adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
4955 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00004956 // Add LDR with its PC-relative Class .bss entry patch.
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004957 vixl::aarch64::Label* ldr_label =
Vladimir Markof3c52b42017-11-17 17:32:12 +00004958 codegen_->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004959 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoca1e0382018-04-11 09:58:41 +00004960 codegen_->GenerateGcRootFieldLoad(cls,
4961 out_loc,
4962 temp,
4963 /* offset placeholder */ 0u,
4964 ldr_label,
4965 read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004966 generate_null_check = true;
4967 break;
4968 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01004969 case HLoadClass::LoadKind::kJitBootImageAddress: {
4970 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
4971 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
4972 DCHECK_NE(address, 0u);
4973 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
4974 break;
4975 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004976 case HLoadClass::LoadKind::kJitTableAddress: {
4977 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
4978 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004979 cls->GetClass()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00004980 codegen_->GenerateGcRootFieldLoad(cls,
4981 out_loc,
4982 out.X(),
4983 /* offset */ 0,
4984 /* fixup_label */ nullptr,
4985 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004986 break;
4987 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01004988 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004989 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00004990 LOG(FATAL) << "UNREACHABLE";
4991 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004992 }
4993
Vladimir Markoea4c1262017-02-06 19:59:33 +00004994 bool do_clinit = cls->MustGenerateClinitCheck();
4995 if (generate_null_check || do_clinit) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004996 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01004997 SlowPathCodeARM64* slow_path =
4998 new (codegen_->GetScopedAllocator()) LoadClassSlowPathARM64(cls, cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004999 codegen_->AddSlowPath(slow_path);
5000 if (generate_null_check) {
5001 __ Cbz(out, slow_path->GetEntryLabel());
5002 }
5003 if (cls->MustGenerateClinitCheck()) {
5004 GenerateClassInitializationCheck(slow_path, out);
5005 } else {
5006 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00005007 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005008 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005009 }
5010}
5011
Orion Hodsondbaa5c72018-05-10 08:22:46 +01005012void LocationsBuilderARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
5013 InvokeRuntimeCallingConvention calling_convention;
5014 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
5015 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, location, location);
5016}
5017
5018void InstructionCodeGeneratorARM64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
5019 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
5020}
5021
Orion Hodson18259d72018-04-12 11:18:23 +01005022void LocationsBuilderARM64::VisitLoadMethodType(HLoadMethodType* load) {
5023 InvokeRuntimeCallingConvention calling_convention;
5024 Location location = LocationFrom(calling_convention.GetRegisterAt(0));
5025 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
5026}
5027
5028void InstructionCodeGeneratorARM64::VisitLoadMethodType(HLoadMethodType* load) {
5029 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
5030}
5031
David Brazdilcb1c0552015-08-04 16:22:25 +01005032static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005033 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01005034}
5035
Alexandre Rames67555f72014-11-18 10:55:16 +00005036void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
5037 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005038 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexandre Rames67555f72014-11-18 10:55:16 +00005039 locations->SetOut(Location::RequiresRegister());
5040}
5041
5042void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01005043 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
5044}
5045
5046void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005047 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01005048}
5049
5050void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5051 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00005052}
5053
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005054HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
5055 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005056 switch (desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005057 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005058 case HLoadString::LoadKind::kBootImageRelRo:
Vladimir Markoaad75c62016-10-03 08:46:48 +00005059 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01005060 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005061 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005062 case HLoadString::LoadKind::kJitBootImageAddress:
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005063 case HLoadString::LoadKind::kJitTableAddress:
5064 DCHECK(Runtime::Current()->UseJitCompilation());
5065 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005066 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005067 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005068 }
5069 return desired_string_load_kind;
5070}
5071
Alexandre Rames67555f72014-11-18 10:55:16 +00005072void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005073 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01005074 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005075 if (load->GetLoadKind() == HLoadString::LoadKind::kRuntimeCall) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005076 InvokeRuntimeCallingConvention calling_convention;
5077 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
5078 } else {
5079 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005080 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
5081 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00005082 // Rely on the pResolveString and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01005083 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005084 } else {
5085 // For non-Baker read barrier we have a temp-clobbering call.
5086 }
5087 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005088 }
Alexandre Rames67555f72014-11-18 10:55:16 +00005089}
5090
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005091// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5092// move.
5093void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00005094 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005095 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005096
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005097 switch (load->GetLoadKind()) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005098 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005099 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005100 // Add ADRP with its PC-relative String patch.
5101 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005102 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005103 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005104 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005105 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005106 vixl::aarch64::Label* add_label =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00005107 codegen_->NewBootImageStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005108 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005109 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005110 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005111 case HLoadString::LoadKind::kBootImageRelRo: {
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005112 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005113 // Add ADRP with its PC-relative .data.bimg.rel.ro patch.
5114 uint32_t boot_image_offset = codegen_->GetBootImageOffset(load);
5115 vixl::aarch64::Label* adrp_label = codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005116 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005117 // Add LDR with its PC-relative .data.bimg.rel.ro patch.
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005118 vixl::aarch64::Label* ldr_label =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005119 codegen_->NewBootImageRelRoPatch(boot_image_offset, adrp_label);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005120 codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
5121 return;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005122 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00005123 case HLoadString::LoadKind::kBssEntry: {
5124 // Add ADRP with its PC-relative String .bss entry patch.
5125 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005126 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00005127 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markof3c52b42017-11-17 17:32:12 +00005128 Register temp = XRegisterFrom(out_loc);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005129 vixl::aarch64::Label* adrp_label = codegen_->NewStringBssEntryPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005130 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005131 // Add LDR with its PC-relative String .bss entry patch.
Vladimir Markoaad75c62016-10-03 08:46:48 +00005132 vixl::aarch64::Label* ldr_label =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005133 codegen_->NewStringBssEntryPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005134 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoca1e0382018-04-11 09:58:41 +00005135 codegen_->GenerateGcRootFieldLoad(load,
5136 out_loc,
5137 temp,
5138 /* offset placeholder */ 0u,
5139 ldr_label,
5140 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005141 SlowPathCodeARM64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00005142 new (codegen_->GetScopedAllocator()) LoadStringSlowPathARM64(load);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005143 codegen_->AddSlowPath(slow_path);
5144 __ Cbz(out.X(), slow_path->GetEntryLabel());
5145 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005146 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Vladimir Markoaad75c62016-10-03 08:46:48 +00005147 return;
5148 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01005149 case HLoadString::LoadKind::kJitBootImageAddress: {
5150 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
5151 DCHECK_NE(address, 0u);
5152 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
5153 return;
5154 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005155 case HLoadString::LoadKind::kJitTableAddress: {
5156 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005157 load->GetStringIndex(),
5158 load->GetString()));
Vladimir Markoca1e0382018-04-11 09:58:41 +00005159 codegen_->GenerateGcRootFieldLoad(load,
5160 out_loc,
5161 out.X(),
5162 /* offset */ 0,
5163 /* fixup_label */ nullptr,
5164 kCompilerReadBarrierOption);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005165 return;
5166 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005167 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005168 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005169 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005170
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005171 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005172 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01005173 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08005174 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07005175 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
5176 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005177 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005178}
5179
Alexandre Rames5319def2014-10-23 10:03:10 +01005180void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005181 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01005182 locations->SetOut(Location::ConstantLocation(constant));
5183}
5184
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005185void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005186 // Will be generated at use site.
5187}
5188
Alexandre Rames67555f72014-11-18 10:55:16 +00005189void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005190 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5191 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005192 InvokeRuntimeCallingConvention calling_convention;
5193 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5194}
5195
5196void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01005197 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005198 instruction,
5199 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005200 if (instruction->IsEnter()) {
5201 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5202 } else {
5203 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5204 }
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005205 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames67555f72014-11-18 10:55:16 +00005206}
5207
Alexandre Rames42d641b2014-10-27 14:00:51 +00005208void LocationsBuilderARM64::VisitMul(HMul* mul) {
5209 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005210 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005211 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005212 case DataType::Type::kInt32:
5213 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005214 locations->SetInAt(0, Location::RequiresRegister());
5215 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005216 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005217 break;
5218
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005219 case DataType::Type::kFloat32:
5220 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005221 locations->SetInAt(0, Location::RequiresFpuRegister());
5222 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00005223 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00005224 break;
5225
5226 default:
5227 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5228 }
5229}
5230
5231void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
5232 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005233 case DataType::Type::kInt32:
5234 case DataType::Type::kInt64:
Alexandre Rames42d641b2014-10-27 14:00:51 +00005235 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
5236 break;
5237
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005238 case DataType::Type::kFloat32:
5239 case DataType::Type::kFloat64:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005240 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00005241 break;
5242
5243 default:
5244 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5245 }
5246}
5247
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005248void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
5249 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005250 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005251 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005252 case DataType::Type::kInt32:
5253 case DataType::Type::kInt64:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00005254 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00005255 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005256 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005257
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005258 case DataType::Type::kFloat32:
5259 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005260 locations->SetInAt(0, Location::RequiresFpuRegister());
5261 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005262 break;
5263
5264 default:
5265 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5266 }
5267}
5268
5269void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
5270 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005271 case DataType::Type::kInt32:
5272 case DataType::Type::kInt64:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005273 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
5274 break;
5275
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005276 case DataType::Type::kFloat32:
5277 case DataType::Type::kFloat64:
Alexandre Rames67555f72014-11-18 10:55:16 +00005278 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005279 break;
5280
5281 default:
5282 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5283 }
5284}
5285
5286void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005287 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5288 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005289 InvokeRuntimeCallingConvention calling_convention;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005290 locations->SetOut(LocationFrom(x0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005291 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5292 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005293}
5294
5295void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01005296 // Note: if heap poisoning is enabled, the entry point takes cares
5297 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00005298 QuickEntrypointEnum entrypoint =
5299 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
5300 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005301 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005302 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00005303}
5304
Alexandre Rames5319def2014-10-23 10:03:10 +01005305void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005306 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5307 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01005308 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07005309 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005310 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexandre Rames5319def2014-10-23 10:03:10 +01005311}
5312
5313void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07005314 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
5315 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005316 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005317}
5318
5319void LocationsBuilderARM64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005320 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00005321 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00005322 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01005323}
5324
5325void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00005326 switch (instruction->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005327 case DataType::Type::kInt32:
5328 case DataType::Type::kInt64:
Roland Levillain55dcfb52014-10-24 18:09:09 +01005329 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01005330 break;
5331
5332 default:
5333 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5334 }
5335}
5336
David Brazdil66d126e2015-04-03 16:02:44 +01005337void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005338 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
David Brazdil66d126e2015-04-03 16:02:44 +01005339 locations->SetInAt(0, Location::RequiresRegister());
5340 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5341}
5342
5343void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005344 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01005345}
5346
Alexandre Rames5319def2014-10-23 10:03:10 +01005347void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005348 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5349 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01005350}
5351
Calin Juravle2ae48182016-03-16 14:05:09 +00005352void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5353 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005354 return;
5355 }
Artem Serov914d7a82017-02-07 14:33:49 +00005356 {
5357 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
5358 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5359 Location obj = instruction->GetLocations()->InAt(0);
5360 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
5361 RecordPcInfo(instruction, instruction->GetDexPc());
5362 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005363}
5364
Calin Juravle2ae48182016-03-16 14:05:09 +00005365void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005366 SlowPathCodeARM64* slow_path = new (GetScopedAllocator()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005367 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01005368
5369 LocationSummary* locations = instruction->GetLocations();
5370 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005371
5372 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01005373}
5374
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005375void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005376 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005377}
5378
Alexandre Rames67555f72014-11-18 10:55:16 +00005379void LocationsBuilderARM64::VisitOr(HOr* instruction) {
5380 HandleBinaryOp(instruction);
5381}
5382
5383void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
5384 HandleBinaryOp(instruction);
5385}
5386
Alexandre Rames3e69f162014-12-10 10:36:50 +00005387void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5388 LOG(FATAL) << "Unreachable";
5389}
5390
5391void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01005392 if (instruction->GetNext()->IsSuspendCheck() &&
5393 instruction->GetBlock()->GetLoopInformation() != nullptr) {
5394 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
5395 // The back edge will generate the suspend check.
5396 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
5397 }
5398
Alexandre Rames3e69f162014-12-10 10:36:50 +00005399 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5400}
5401
Alexandre Rames5319def2014-10-23 10:03:10 +01005402void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005403 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005404 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5405 if (location.IsStackSlot()) {
5406 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5407 } else if (location.IsDoubleStackSlot()) {
5408 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5409 }
5410 locations->SetOut(location);
5411}
5412
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005413void InstructionCodeGeneratorARM64::VisitParameterValue(
5414 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005415 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005416}
5417
5418void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
5419 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005420 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01005421 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005422}
5423
5424void InstructionCodeGeneratorARM64::VisitCurrentMethod(
5425 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5426 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01005427}
5428
5429void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005430 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005431 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005432 locations->SetInAt(i, Location::Any());
5433 }
5434 locations->SetOut(Location::Any());
5435}
5436
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005437void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005438 LOG(FATAL) << "Unreachable";
5439}
5440
Serban Constantinescu02164b32014-11-13 14:05:07 +00005441void LocationsBuilderARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005442 DataType::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00005443 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005444 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005445 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01005446 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005447
5448 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005449 case DataType::Type::kInt32:
5450 case DataType::Type::kInt64:
Serban Constantinescu02164b32014-11-13 14:05:07 +00005451 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08005452 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00005453 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5454 break;
5455
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005456 case DataType::Type::kFloat32:
5457 case DataType::Type::kFloat64: {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005458 InvokeRuntimeCallingConvention calling_convention;
5459 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
5460 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
5461 locations->SetOut(calling_convention.GetReturnLocation(type));
5462
5463 break;
5464 }
5465
Serban Constantinescu02164b32014-11-13 14:05:07 +00005466 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005467 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00005468 }
5469}
5470
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005471void InstructionCodeGeneratorARM64::GenerateIntRemForPower2Denom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005472 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005473 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
5474 DCHECK(IsPowerOfTwo(abs_imm)) << abs_imm;
5475
5476 Register out = OutputRegister(instruction);
5477 Register dividend = InputRegisterAt(instruction, 0);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005478
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005479 if (abs_imm == 2) {
5480 __ Cmp(dividend, 0);
5481 __ And(out, dividend, 1);
5482 __ Csneg(out, out, out, ge);
5483 } else {
5484 UseScratchRegisterScope temps(GetVIXLAssembler());
5485 Register temp = temps.AcquireSameSizeAs(out);
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005486
Evgeny Astigeevicha3234e92018-06-19 23:26:15 +01005487 __ Negs(temp, dividend);
5488 __ And(out, dividend, abs_imm - 1);
5489 __ And(temp, temp, abs_imm - 1);
5490 __ Csneg(out, out, temp, mi);
5491 }
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005492}
5493
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005494void InstructionCodeGeneratorARM64::GenerateIntRemForConstDenom(HRem *instruction) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01005495 int64_t imm = Int64FromLocation(instruction->GetLocations()->InAt(1));
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005496
5497 if (imm == 0) {
5498 // Do not generate anything.
5499 // DivZeroCheck would prevent any code to be executed.
5500 return;
5501 }
5502
Evgeny Astigeevichf58dc652018-06-25 17:54:07 +01005503 if (IsPowerOfTwo(AbsOrMin(imm))) {
5504 // Cases imm == -1 or imm == 1 are handled in constant folding by
5505 // InstructionWithAbsorbingInputSimplifier.
5506 // If the cases have survided till code generation they are handled in
5507 // GenerateIntRemForPower2Denom becauses -1 and 1 are the power of 2 (2^0).
5508 // The correct code is generated for them, just more instructions.
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005509 GenerateIntRemForPower2Denom(instruction);
5510 } else {
5511 DCHECK(imm < -2 || imm > 2) << imm;
5512 GenerateDivRemWithAnyConstant(instruction);
5513 }
5514}
5515
5516void InstructionCodeGeneratorARM64::GenerateIntRem(HRem* instruction) {
5517 DCHECK(DataType::IsIntOrLongType(instruction->GetResultType()))
5518 << instruction->GetResultType();
5519
5520 if (instruction->GetLocations()->InAt(1).IsConstant()) {
5521 GenerateIntRemForConstDenom(instruction);
5522 } else {
5523 Register out = OutputRegister(instruction);
5524 Register dividend = InputRegisterAt(instruction, 0);
5525 Register divisor = InputRegisterAt(instruction, 1);
5526 UseScratchRegisterScope temps(GetVIXLAssembler());
5527 Register temp = temps.AcquireSameSizeAs(out);
5528 __ Sdiv(temp, dividend, divisor);
5529 __ Msub(out, temp, divisor, dividend);
5530 }
5531}
5532
Serban Constantinescu02164b32014-11-13 14:05:07 +00005533void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005534 DataType::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005535
Serban Constantinescu02164b32014-11-13 14:05:07 +00005536 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005537 case DataType::Type::kInt32:
5538 case DataType::Type::kInt64: {
Evgeny Astigeevich878f17d2018-06-01 16:53:58 +01005539 GenerateIntRem(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005540 break;
5541 }
5542
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005543 case DataType::Type::kFloat32:
5544 case DataType::Type::kFloat64: {
5545 QuickEntrypointEnum entrypoint =
5546 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005547 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005548 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00005549 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5550 } else {
5551 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5552 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005553 break;
5554 }
5555
Serban Constantinescu02164b32014-11-13 14:05:07 +00005556 default:
5557 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005558 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00005559 }
5560}
5561
Aart Bik1f8d51b2018-02-15 10:42:37 -08005562void LocationsBuilderARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005563 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005564}
5565
Aart Bik1f8d51b2018-02-15 10:42:37 -08005566void InstructionCodeGeneratorARM64::VisitMin(HMin* min) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005567 HandleBinaryOp(min);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005568}
5569
5570void LocationsBuilderARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005571 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005572}
5573
5574void InstructionCodeGeneratorARM64::VisitMax(HMax* max) {
Petre-Ionut Tudor2227fe42018-04-20 17:12:05 +01005575 HandleBinaryOp(max);
Aart Bik1f8d51b2018-02-15 10:42:37 -08005576}
5577
Aart Bik3dad3412018-02-28 12:01:46 -08005578void LocationsBuilderARM64::VisitAbs(HAbs* abs) {
5579 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
5580 switch (abs->GetResultType()) {
5581 case DataType::Type::kInt32:
5582 case DataType::Type::kInt64:
5583 locations->SetInAt(0, Location::RequiresRegister());
5584 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5585 break;
5586 case DataType::Type::kFloat32:
5587 case DataType::Type::kFloat64:
5588 locations->SetInAt(0, Location::RequiresFpuRegister());
5589 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5590 break;
5591 default:
5592 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5593 }
5594}
5595
5596void InstructionCodeGeneratorARM64::VisitAbs(HAbs* abs) {
5597 switch (abs->GetResultType()) {
5598 case DataType::Type::kInt32:
5599 case DataType::Type::kInt64: {
5600 Register in_reg = InputRegisterAt(abs, 0);
5601 Register out_reg = OutputRegister(abs);
5602 __ Cmp(in_reg, Operand(0));
5603 __ Cneg(out_reg, in_reg, lt);
5604 break;
5605 }
5606 case DataType::Type::kFloat32:
5607 case DataType::Type::kFloat64: {
5608 FPRegister in_reg = InputFPRegisterAt(abs, 0);
5609 FPRegister out_reg = OutputFPRegister(abs);
5610 __ Fabs(out_reg, in_reg);
5611 break;
5612 }
5613 default:
5614 LOG(FATAL) << "Unexpected type for abs operation " << abs->GetResultType();
5615 }
5616}
5617
Igor Murashkind01745e2017-04-05 16:40:31 -07005618void LocationsBuilderARM64::VisitConstructorFence(HConstructorFence* constructor_fence) {
5619 constructor_fence->SetLocations(nullptr);
5620}
5621
5622void InstructionCodeGeneratorARM64::VisitConstructorFence(
5623 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
5624 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
5625}
5626
Calin Juravle27df7582015-04-17 19:12:31 +01005627void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5628 memory_barrier->SetLocations(nullptr);
5629}
5630
5631void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005632 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01005633}
5634
Alexandre Rames5319def2014-10-23 10:03:10 +01005635void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005636 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005637 DataType::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005638 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01005639}
5640
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005641void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005642 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005643}
5644
5645void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
5646 instruction->SetLocations(nullptr);
5647}
5648
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005649void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005650 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005651}
5652
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005653void LocationsBuilderARM64::VisitRor(HRor* ror) {
5654 HandleBinaryOp(ror);
5655}
5656
5657void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
5658 HandleBinaryOp(ror);
5659}
5660
Serban Constantinescu02164b32014-11-13 14:05:07 +00005661void LocationsBuilderARM64::VisitShl(HShl* shl) {
5662 HandleShift(shl);
5663}
5664
5665void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5666 HandleShift(shl);
5667}
5668
5669void LocationsBuilderARM64::VisitShr(HShr* shr) {
5670 HandleShift(shr);
5671}
5672
5673void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5674 HandleShift(shr);
5675}
5676
Alexandre Rames5319def2014-10-23 10:03:10 +01005677void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005678 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005679}
5680
5681void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005682 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005683}
5684
Alexandre Rames67555f72014-11-18 10:55:16 +00005685void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Vladimir Markof4f2daa2017-03-20 18:26:59 +00005686 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005687}
5688
5689void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005690 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005691}
5692
5693void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005694 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005695}
5696
Alexandre Rames67555f72014-11-18 10:55:16 +00005697void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005698 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005699}
5700
Calin Juravlee460d1d2015-09-29 04:52:17 +01005701void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5702 HUnresolvedInstanceFieldGet* instruction) {
5703 FieldAccessCallingConventionARM64 calling_convention;
5704 codegen_->CreateUnresolvedFieldLocationSummary(
5705 instruction, instruction->GetFieldType(), calling_convention);
5706}
5707
5708void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5709 HUnresolvedInstanceFieldGet* instruction) {
5710 FieldAccessCallingConventionARM64 calling_convention;
5711 codegen_->GenerateUnresolvedFieldAccess(instruction,
5712 instruction->GetFieldType(),
5713 instruction->GetFieldIndex(),
5714 instruction->GetDexPc(),
5715 calling_convention);
5716}
5717
5718void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5719 HUnresolvedInstanceFieldSet* instruction) {
5720 FieldAccessCallingConventionARM64 calling_convention;
5721 codegen_->CreateUnresolvedFieldLocationSummary(
5722 instruction, instruction->GetFieldType(), calling_convention);
5723}
5724
5725void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5726 HUnresolvedInstanceFieldSet* instruction) {
5727 FieldAccessCallingConventionARM64 calling_convention;
5728 codegen_->GenerateUnresolvedFieldAccess(instruction,
5729 instruction->GetFieldType(),
5730 instruction->GetFieldIndex(),
5731 instruction->GetDexPc(),
5732 calling_convention);
5733}
5734
5735void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5736 HUnresolvedStaticFieldGet* instruction) {
5737 FieldAccessCallingConventionARM64 calling_convention;
5738 codegen_->CreateUnresolvedFieldLocationSummary(
5739 instruction, instruction->GetFieldType(), calling_convention);
5740}
5741
5742void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5743 HUnresolvedStaticFieldGet* instruction) {
5744 FieldAccessCallingConventionARM64 calling_convention;
5745 codegen_->GenerateUnresolvedFieldAccess(instruction,
5746 instruction->GetFieldType(),
5747 instruction->GetFieldIndex(),
5748 instruction->GetDexPc(),
5749 calling_convention);
5750}
5751
5752void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5753 HUnresolvedStaticFieldSet* instruction) {
5754 FieldAccessCallingConventionARM64 calling_convention;
5755 codegen_->CreateUnresolvedFieldLocationSummary(
5756 instruction, instruction->GetFieldType(), calling_convention);
5757}
5758
5759void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5760 HUnresolvedStaticFieldSet* instruction) {
5761 FieldAccessCallingConventionARM64 calling_convention;
5762 codegen_->GenerateUnresolvedFieldAccess(instruction,
5763 instruction->GetFieldType(),
5764 instruction->GetFieldIndex(),
5765 instruction->GetDexPc(),
5766 calling_convention);
5767}
5768
Alexandre Rames5319def2014-10-23 10:03:10 +01005769void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005770 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5771 instruction, LocationSummary::kCallOnSlowPath);
Artem Serov7957d952017-04-04 15:44:09 +01005772 // In suspend check slow path, usually there are no caller-save registers at all.
5773 // If SIMD instructions are present, however, we force spilling all live SIMD
5774 // registers in full width (since the runtime only saves/restores lower part).
5775 locations->SetCustomSlowPathCallerSaves(
5776 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexandre Rames5319def2014-10-23 10:03:10 +01005777}
5778
5779void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005780 HBasicBlock* block = instruction->GetBlock();
5781 if (block->GetLoopInformation() != nullptr) {
5782 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5783 // The back edge will generate the suspend check.
5784 return;
5785 }
5786 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5787 // The goto will generate the suspend check.
5788 return;
5789 }
5790 GenerateSuspendCheck(instruction, nullptr);
Roland Levillain2b03a1f2017-06-06 16:09:59 +01005791 codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Alexandre Rames5319def2014-10-23 10:03:10 +01005792}
5793
Alexandre Rames67555f72014-11-18 10:55:16 +00005794void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005795 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
5796 instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005797 InvokeRuntimeCallingConvention calling_convention;
5798 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5799}
5800
5801void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005802 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08005803 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005804}
5805
5806void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
5807 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005808 new (GetGraph()->GetAllocator()) LocationSummary(conversion, LocationSummary::kNoCall);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005809 DataType::Type input_type = conversion->GetInputType();
5810 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005811 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
5812 << input_type << " -> " << result_type;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005813 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
5814 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005815 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5816 }
5817
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005818 if (DataType::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005819 locations->SetInAt(0, Location::RequiresFpuRegister());
5820 } else {
5821 locations->SetInAt(0, Location::RequiresRegister());
5822 }
5823
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005824 if (DataType::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005825 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5826 } else {
5827 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5828 }
5829}
5830
5831void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005832 DataType::Type result_type = conversion->GetResultType();
5833 DataType::Type input_type = conversion->GetInputType();
Alexandre Rames67555f72014-11-18 10:55:16 +00005834
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005835 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
5836 << input_type << " -> " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005837
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005838 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
5839 int result_size = DataType::Size(result_type);
5840 int input_size = DataType::Size(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00005841 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005842 Register output = OutputRegister(conversion);
5843 Register source = InputRegisterAt(conversion, 0);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005844 if (result_type == DataType::Type::kInt32 && input_type == DataType::Type::kInt64) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01005845 // 'int' values are used directly as W registers, discarding the top
5846 // bits, so we don't need to sign-extend and can just perform a move.
5847 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
5848 // top 32 bits of the target register. We theoretically could leave those
5849 // bits unchanged, but we would have to make sure that no code uses a
5850 // 32bit input value as a 64bit value assuming that the top 32 bits are
5851 // zero.
5852 __ Mov(output.W(), source.W());
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01005853 } else if (DataType::IsUnsignedType(result_type) ||
5854 (DataType::IsUnsignedType(input_type) && input_size < result_size)) {
5855 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, result_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005856 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00005857 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005858 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005859 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005860 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005861 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
5862 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005863 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005864 } else if (DataType::IsFloatingPointType(result_type) &&
5865 DataType::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005866 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
5867 } else {
5868 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
5869 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005870 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00005871}
Alexandre Rames67555f72014-11-18 10:55:16 +00005872
Serban Constantinescu02164b32014-11-13 14:05:07 +00005873void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
5874 HandleShift(ushr);
5875}
5876
5877void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
5878 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00005879}
5880
5881void LocationsBuilderARM64::VisitXor(HXor* instruction) {
5882 HandleBinaryOp(instruction);
5883}
5884
5885void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
5886 HandleBinaryOp(instruction);
5887}
5888
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005889void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005890 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005891 LOG(FATAL) << "Unreachable";
5892}
5893
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005894void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005895 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005896 LOG(FATAL) << "Unreachable";
5897}
5898
Mark Mendellfe57faa2015-09-18 09:26:15 -04005899// Simple implementation of packed switch - generate cascaded compare/jumps.
5900void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5901 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01005902 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005903 locations->SetInAt(0, Location::RequiresRegister());
5904}
5905
5906void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5907 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08005908 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04005909 Register value_reg = InputRegisterAt(switch_instr, 0);
5910 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
5911
Zheng Xu3927c8b2015-11-18 17:46:25 +08005912 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005913 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08005914 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
5915 // make sure we don't emit it if the target may run out of range.
5916 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
5917 // ranges and emit the tables only as required.
5918 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04005919
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005920 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08005921 // Current instruction id is an upper bound of the number of HIRs in the graph.
5922 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
5923 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005924 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5925 Register temp = temps.AcquireW();
5926 __ Subs(temp, value_reg, Operand(lower_bound));
5927
Zheng Xu3927c8b2015-11-18 17:46:25 +08005928 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005929 // Jump to successors[0] if value == lower_bound.
5930 __ B(eq, codegen_->GetLabelOf(successors[0]));
5931 int32_t last_index = 0;
5932 for (; num_entries - last_index > 2; last_index += 2) {
5933 __ Subs(temp, temp, Operand(2));
5934 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
5935 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
5936 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
5937 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
5938 }
5939 if (num_entries - last_index == 2) {
5940 // The last missing case_value.
5941 __ Cmp(temp, Operand(1));
5942 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08005943 }
5944
5945 // And the default for any other value.
5946 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
5947 __ B(codegen_->GetLabelOf(default_block));
5948 }
5949 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01005950 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08005951
5952 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5953
5954 // Below instructions should use at most one blocked register. Since there are two blocked
5955 // registers, we are free to block one.
5956 Register temp_w = temps.AcquireW();
5957 Register index;
5958 // Remove the bias.
5959 if (lower_bound != 0) {
5960 index = temp_w;
5961 __ Sub(index, value_reg, Operand(lower_bound));
5962 } else {
5963 index = value_reg;
5964 }
5965
5966 // Jump to default block if index is out of the range.
5967 __ Cmp(index, Operand(num_entries));
5968 __ B(hs, codegen_->GetLabelOf(default_block));
5969
5970 // In current VIXL implementation, it won't require any blocked registers to encode the
5971 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
5972 // register pressure.
5973 Register table_base = temps.AcquireX();
5974 // Load jump offset from the table.
5975 __ Adr(table_base, jump_table->GetTableStartLabel());
5976 Register jump_offset = temp_w;
5977 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
5978
5979 // Jump to target block by branching to table_base(pc related) + offset.
5980 Register target_address = table_base;
5981 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
5982 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005983 }
5984}
5985
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005986void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
5987 HInstruction* instruction,
5988 Location out,
5989 uint32_t offset,
5990 Location maybe_temp,
5991 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005992 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00005993 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005994 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005995 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005996 if (kUseBakerReadBarrier) {
5997 // Load with fast path based Baker's read barrier.
5998 // /* HeapReference<Object> */ out = *(out + offset)
5999 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6000 out,
6001 out_reg,
6002 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006003 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006004 /* needs_null_check */ false,
6005 /* use_load_acquire */ false);
6006 } else {
6007 // Load with slow path based read barrier.
6008 // Save the value of `out` into `maybe_temp` before overwriting it
6009 // in the following move operation, as we will need it for the
6010 // read barrier below.
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006011 Register temp_reg = RegisterFrom(maybe_temp, type);
Roland Levillain44015862016-01-22 11:47:17 +00006012 __ Mov(temp_reg, out_reg);
6013 // /* HeapReference<Object> */ out = *(out + offset)
6014 __ Ldr(out_reg, HeapOperand(out_reg, offset));
6015 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6016 }
6017 } else {
6018 // Plain load with no read barrier.
6019 // /* HeapReference<Object> */ out = *(out + offset)
6020 __ Ldr(out_reg, HeapOperand(out_reg, offset));
6021 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
6022 }
6023}
6024
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006025void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
6026 HInstruction* instruction,
6027 Location out,
6028 Location obj,
6029 uint32_t offset,
6030 Location maybe_temp,
6031 ReadBarrierOption read_barrier_option) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006032 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006033 Register out_reg = RegisterFrom(out, type);
6034 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006035 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006036 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006037 if (kUseBakerReadBarrier) {
6038 // Load with fast path based Baker's read barrier.
Roland Levillain44015862016-01-22 11:47:17 +00006039 // /* HeapReference<Object> */ out = *(obj + offset)
6040 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6041 out,
6042 obj_reg,
6043 offset,
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006044 maybe_temp,
Roland Levillain44015862016-01-22 11:47:17 +00006045 /* needs_null_check */ false,
6046 /* use_load_acquire */ false);
6047 } else {
6048 // Load with slow path based read barrier.
6049 // /* HeapReference<Object> */ out = *(obj + offset)
6050 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
6051 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6052 }
6053 } else {
6054 // Plain load with no read barrier.
6055 // /* HeapReference<Object> */ out = *(obj + offset)
6056 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
6057 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
6058 }
6059}
6060
Vladimir Markoca1e0382018-04-11 09:58:41 +00006061void CodeGeneratorARM64::GenerateGcRootFieldLoad(
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006062 HInstruction* instruction,
6063 Location root,
6064 Register obj,
6065 uint32_t offset,
6066 vixl::aarch64::Label* fixup_label,
6067 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00006068 DCHECK(fixup_label == nullptr || offset == 0u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006069 Register root_reg = RegisterFrom(root, DataType::Type::kReference);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006070 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006071 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00006072 if (kUseBakerReadBarrier) {
6073 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillainba650a42017-03-06 13:52:32 +00006074 // Baker's read barrier are used.
Roland Levillain44015862016-01-22 11:47:17 +00006075
Vladimir Marko008e09f32018-08-06 15:42:43 +01006076 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in
6077 // the Marking Register) to decide whether we need to enter
6078 // the slow path to mark the GC root.
6079 //
6080 // We use shared thunks for the slow path; shared within the method
6081 // for JIT, across methods for AOT. That thunk checks the reference
6082 // and jumps to the entrypoint if needed.
6083 //
6084 // lr = &return_address;
6085 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
6086 // if (mr) { // Thread::Current()->GetIsGcMarking()
6087 // goto gc_root_thunk<root_reg>(lr)
6088 // }
6089 // return_address:
Roland Levillainba650a42017-03-06 13:52:32 +00006090
Vladimir Marko008e09f32018-08-06 15:42:43 +01006091 UseScratchRegisterScope temps(GetVIXLAssembler());
6092 DCHECK(temps.IsAvailable(ip0));
6093 DCHECK(temps.IsAvailable(ip1));
6094 temps.Exclude(ip0, ip1);
6095 uint32_t custom_data = EncodeBakerReadBarrierGcRootData(root_reg.GetCode());
Roland Levillain44015862016-01-22 11:47:17 +00006096
Vladimir Marko008e09f32018-08-06 15:42:43 +01006097 ExactAssemblyScope guard(GetVIXLAssembler(), 3 * vixl::aarch64::kInstructionSize);
6098 vixl::aarch64::Label return_address;
6099 __ adr(lr, &return_address);
6100 if (fixup_label != nullptr) {
6101 __ bind(fixup_label);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006102 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006103 static_assert(BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_OFFSET == -8,
6104 "GC root LDR must be 2 instruction (8B) before the return address label.");
6105 __ ldr(root_reg, MemOperand(obj.X(), offset));
6106 EmitBakerReadBarrierCbnz(custom_data);
6107 __ bind(&return_address);
Roland Levillain44015862016-01-22 11:47:17 +00006108 } else {
6109 // GC root loaded through a slow path for read barriers other
6110 // than Baker's.
6111 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006112 if (fixup_label == nullptr) {
6113 __ Add(root_reg.X(), obj.X(), offset);
6114 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006115 EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006116 }
Roland Levillain44015862016-01-22 11:47:17 +00006117 // /* mirror::Object* */ root = root->Read()
Vladimir Markoca1e0382018-04-11 09:58:41 +00006118 GenerateReadBarrierForRootSlow(instruction, root, root);
Roland Levillain44015862016-01-22 11:47:17 +00006119 }
6120 } else {
6121 // Plain GC root load with no read barrier.
6122 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006123 if (fixup_label == nullptr) {
6124 __ Ldr(root_reg, MemOperand(obj, offset));
6125 } else {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006126 EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006127 }
Roland Levillain44015862016-01-22 11:47:17 +00006128 // Note that GC roots are not affected by heap poisoning, thus we
6129 // do not have to unpoison `root_reg` here.
6130 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006131 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillain44015862016-01-22 11:47:17 +00006132}
6133
6134void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6135 Location ref,
Vladimir Marko248141f2018-08-10 10:40:07 +01006136 vixl::aarch64::Register obj,
6137 const vixl::aarch64::MemOperand& src,
Roland Levillain44015862016-01-22 11:47:17 +00006138 bool needs_null_check,
6139 bool use_load_acquire) {
6140 DCHECK(kEmitCompilerReadBarrier);
6141 DCHECK(kUseBakerReadBarrier);
6142
Vladimir Marko0ecac682018-08-07 10:40:38 +01006143 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6144 // Marking Register) to decide whether we need to enter the slow
6145 // path to mark the reference. Then, in the slow path, check the
6146 // gray bit in the lock word of the reference's holder (`obj`) to
6147 // decide whether to mark `ref` or not.
6148 //
6149 // We use shared thunks for the slow path; shared within the method
6150 // for JIT, across methods for AOT. That thunk checks the holder
6151 // and jumps to the entrypoint if needed. If the holder is not gray,
6152 // it creates a fake dependency and returns to the LDR instruction.
6153 //
6154 // lr = &gray_return_address;
6155 // if (mr) { // Thread::Current()->GetIsGcMarking()
6156 // goto field_thunk<holder_reg, base_reg, use_load_acquire>(lr)
6157 // }
6158 // not_gray_return_address:
6159 // // Original reference load. If the offset is too large to fit
6160 // // into LDR, we use an adjusted base register here.
6161 // HeapReference<mirror::Object> reference = *(obj+offset);
6162 // gray_return_address:
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006163
Vladimir Marko248141f2018-08-10 10:40:07 +01006164 DCHECK(src.GetAddrMode() == vixl::aarch64::Offset);
6165 DCHECK_ALIGNED(src.GetOffset(), sizeof(mirror::HeapReference<mirror::Object>));
6166
6167 UseScratchRegisterScope temps(GetVIXLAssembler());
6168 DCHECK(temps.IsAvailable(ip0));
6169 DCHECK(temps.IsAvailable(ip1));
6170 temps.Exclude(ip0, ip1);
6171 uint32_t custom_data = use_load_acquire
6172 ? EncodeBakerReadBarrierAcquireData(src.GetBaseRegister().GetCode(), obj.GetCode())
6173 : EncodeBakerReadBarrierFieldData(src.GetBaseRegister().GetCode(), obj.GetCode());
6174
6175 {
6176 ExactAssemblyScope guard(GetVIXLAssembler(),
6177 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6178 vixl::aarch64::Label return_address;
6179 __ adr(lr, &return_address);
6180 EmitBakerReadBarrierCbnz(custom_data);
6181 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6182 "Field LDR must be 1 instruction (4B) before the return address label; "
6183 " 2 instructions (8B) for heap poisoning.");
6184 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
6185 if (use_load_acquire) {
6186 DCHECK_EQ(src.GetOffset(), 0);
6187 __ ldar(ref_reg, src);
6188 } else {
6189 __ ldr(ref_reg, src);
6190 }
6191 if (needs_null_check) {
6192 MaybeRecordImplicitNullCheck(instruction);
6193 }
6194 // Unpoison the reference explicitly if needed. MaybeUnpoisonHeapReference() uses
6195 // macro instructions disallowed in ExactAssemblyScope.
6196 if (kPoisonHeapReferences) {
6197 __ neg(ref_reg, Operand(ref_reg));
6198 }
6199 __ bind(&return_address);
6200 }
6201 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__, /* temp_loc */ LocationFrom(ip1));
6202}
6203
6204void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6205 Location ref,
6206 Register obj,
6207 uint32_t offset,
6208 Location maybe_temp,
6209 bool needs_null_check,
6210 bool use_load_acquire) {
Vladimir Marko0ecac682018-08-07 10:40:38 +01006211 DCHECK_ALIGNED(offset, sizeof(mirror::HeapReference<mirror::Object>));
6212 Register base = obj;
6213 if (use_load_acquire) {
6214 DCHECK(maybe_temp.IsRegister());
6215 base = WRegisterFrom(maybe_temp);
6216 __ Add(base, obj, offset);
6217 offset = 0u;
6218 } else if (offset >= kReferenceLoadMinFarOffset) {
6219 DCHECK(maybe_temp.IsRegister());
6220 base = WRegisterFrom(maybe_temp);
6221 static_assert(IsPowerOfTwo(kReferenceLoadMinFarOffset), "Expecting a power of 2.");
6222 __ Add(base, obj, Operand(offset & ~(kReferenceLoadMinFarOffset - 1u)));
6223 offset &= (kReferenceLoadMinFarOffset - 1u);
Vladimir Markof4f2daa2017-03-20 18:26:59 +00006224 }
Vladimir Marko248141f2018-08-10 10:40:07 +01006225 MemOperand src(base.X(), offset);
6226 GenerateFieldLoadWithBakerReadBarrier(
6227 instruction, ref, obj, src, needs_null_check, use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00006228}
6229
Vladimir Marko008e09f32018-08-06 15:42:43 +01006230void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01006231 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00006232 uint32_t data_offset,
6233 Location index,
6234 Register temp,
6235 bool needs_null_check) {
6236 DCHECK(kEmitCompilerReadBarrier);
6237 DCHECK(kUseBakerReadBarrier);
6238
Vladimir Marko66d691d2017-04-07 17:53:39 +01006239 static_assert(
6240 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6241 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006242 size_t scale_factor = DataType::SizeShift(DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006243
Vladimir Marko008e09f32018-08-06 15:42:43 +01006244 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6245 // Marking Register) to decide whether we need to enter the slow
6246 // path to mark the reference. Then, in the slow path, check the
6247 // gray bit in the lock word of the reference's holder (`obj`) to
6248 // decide whether to mark `ref` or not.
6249 //
6250 // We use shared thunks for the slow path; shared within the method
6251 // for JIT, across methods for AOT. That thunk checks the holder
6252 // and jumps to the entrypoint if needed. If the holder is not gray,
6253 // it creates a fake dependency and returns to the LDR instruction.
6254 //
6255 // lr = &gray_return_address;
6256 // if (mr) { // Thread::Current()->GetIsGcMarking()
6257 // goto array_thunk<base_reg>(lr)
6258 // }
6259 // not_gray_return_address:
6260 // // Original reference load. If the offset is too large to fit
6261 // // into LDR, we use an adjusted base register here.
6262 // HeapReference<mirror::Object> reference = data[index];
6263 // gray_return_address:
Vladimir Marko66d691d2017-04-07 17:53:39 +01006264
Vladimir Marko008e09f32018-08-06 15:42:43 +01006265 DCHECK(index.IsValid());
6266 Register index_reg = RegisterFrom(index, DataType::Type::kInt32);
6267 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006268
Vladimir Marko008e09f32018-08-06 15:42:43 +01006269 UseScratchRegisterScope temps(GetVIXLAssembler());
6270 DCHECK(temps.IsAvailable(ip0));
6271 DCHECK(temps.IsAvailable(ip1));
6272 temps.Exclude(ip0, ip1);
6273 uint32_t custom_data = EncodeBakerReadBarrierArrayData(temp.GetCode());
Vladimir Marko66d691d2017-04-07 17:53:39 +01006274
Vladimir Marko008e09f32018-08-06 15:42:43 +01006275 __ Add(temp.X(), obj.X(), Operand(data_offset));
6276 {
6277 ExactAssemblyScope guard(GetVIXLAssembler(),
6278 (kPoisonHeapReferences ? 4u : 3u) * vixl::aarch64::kInstructionSize);
6279 vixl::aarch64::Label return_address;
6280 __ adr(lr, &return_address);
6281 EmitBakerReadBarrierCbnz(custom_data);
6282 static_assert(BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6283 "Array LDR must be 1 instruction (4B) before the return address label; "
6284 " 2 instructions (8B) for heap poisoning.");
6285 __ ldr(ref_reg, MemOperand(temp.X(), index_reg.X(), LSL, scale_factor));
6286 DCHECK(!needs_null_check); // The thunk cannot handle the null check.
6287 // Unpoison the reference explicitly if needed. MaybeUnpoisonHeapReference() uses
6288 // macro instructions disallowed in ExactAssemblyScope.
6289 if (kPoisonHeapReferences) {
6290 __ neg(ref_reg, Operand(ref_reg));
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006291 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006292 __ bind(&return_address);
Vladimir Marko66d691d2017-04-07 17:53:39 +01006293 }
Vladimir Marko008e09f32018-08-06 15:42:43 +01006294 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__, /* temp_loc */ LocationFrom(ip1));
Roland Levillain44015862016-01-22 11:47:17 +00006295}
6296
Roland Levillainff487002017-03-07 16:50:01 +00006297void CodeGeneratorARM64::UpdateReferenceFieldWithBakerReadBarrier(HInstruction* instruction,
6298 Location ref,
6299 Register obj,
6300 Location field_offset,
6301 Register temp,
6302 bool needs_null_check,
6303 bool use_load_acquire) {
6304 DCHECK(kEmitCompilerReadBarrier);
6305 DCHECK(kUseBakerReadBarrier);
6306 // If we are emitting an array load, we should not be using a
6307 // Load Acquire instruction. In other words:
6308 // `instruction->IsArrayGet()` => `!use_load_acquire`.
6309 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
6310
Roland Levillain97c46462017-05-11 14:04:03 +01006311 // Query `art::Thread::Current()->GetIsGcMarking()` (stored in the
6312 // Marking Register) to decide whether we need to enter the slow
6313 // path to update the reference field within `obj`. Then, in the
6314 // slow path, check the gray bit in the lock word of the reference's
6315 // holder (`obj`) to decide whether to mark `ref` and update the
6316 // field or not.
Roland Levillainff487002017-03-07 16:50:01 +00006317 //
Roland Levillain97c46462017-05-11 14:04:03 +01006318 // if (mr) { // Thread::Current()->GetIsGcMarking()
Roland Levillainff487002017-03-07 16:50:01 +00006319 // // Slow path.
6320 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6321 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6322 // HeapReference<mirror::Object> ref = *(obj + field_offset); // Reference load.
6323 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6324 // if (is_gray) {
6325 // old_ref = ref;
Roland Levillain97c46462017-05-11 14:04:03 +01006326 // entrypoint = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6327 // ref = entrypoint(ref); // ref = ReadBarrier::Mark(ref); // Runtime entry point call.
Roland Levillainff487002017-03-07 16:50:01 +00006328 // compareAndSwapObject(obj, field_offset, old_ref, ref);
6329 // }
6330 // }
6331
6332 // Slow path updating the object reference at address `obj + field_offset`
Roland Levillain97c46462017-05-11 14:04:03 +01006333 // when the GC is marking. The entrypoint will be loaded by the slow path code.
Roland Levillainff487002017-03-07 16:50:01 +00006334 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006335 new (GetScopedAllocator()) LoadReferenceWithBakerReadBarrierAndUpdateFieldSlowPathARM64(
Roland Levillainff487002017-03-07 16:50:01 +00006336 instruction,
6337 ref,
6338 obj,
6339 /* offset */ 0u,
6340 /* index */ field_offset,
6341 /* scale_factor */ 0u /* "times 1" */,
6342 needs_null_check,
6343 use_load_acquire,
Roland Levillain97c46462017-05-11 14:04:03 +01006344 temp);
Roland Levillainff487002017-03-07 16:50:01 +00006345 AddSlowPath(slow_path);
6346
Roland Levillain97c46462017-05-11 14:04:03 +01006347 __ Cbnz(mr, slow_path->GetEntryLabel());
Roland Levillainff487002017-03-07 16:50:01 +00006348 // Fast path: the GC is not marking: nothing to do (the field is
6349 // up-to-date, and we don't need to load the reference).
6350 __ Bind(slow_path->GetExitLabel());
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006351 MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__);
Roland Levillainff487002017-03-07 16:50:01 +00006352}
6353
Roland Levillainba650a42017-03-06 13:52:32 +00006354void CodeGeneratorARM64::GenerateRawReferenceLoad(HInstruction* instruction,
6355 Location ref,
6356 Register obj,
6357 uint32_t offset,
6358 Location index,
6359 size_t scale_factor,
6360 bool needs_null_check,
6361 bool use_load_acquire) {
6362 DCHECK(obj.IsW());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006363 DataType::Type type = DataType::Type::kReference;
Roland Levillain44015862016-01-22 11:47:17 +00006364 Register ref_reg = RegisterFrom(ref, type);
Roland Levillain44015862016-01-22 11:47:17 +00006365
Roland Levillainba650a42017-03-06 13:52:32 +00006366 // If needed, vixl::EmissionCheckScope guards are used to ensure
6367 // that no pools are emitted between the load (macro) instruction
6368 // and MaybeRecordImplicitNullCheck.
Roland Levillain44015862016-01-22 11:47:17 +00006369
Roland Levillain44015862016-01-22 11:47:17 +00006370 if (index.IsValid()) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006371 // Load types involving an "index": ArrayGet,
6372 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6373 // intrinsics.
Roland Levillainbfea3352016-06-23 13:48:47 +01006374 if (use_load_acquire) {
6375 // UnsafeGetObjectVolatile intrinsic case.
6376 // Register `index` is not an index in an object array, but an
6377 // offset to an object reference field within object `obj`.
6378 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
6379 DCHECK(instruction->GetLocations()->Intrinsified());
6380 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
6381 << instruction->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01006382 DCHECK_EQ(offset, 0u);
6383 DCHECK_EQ(scale_factor, 0u);
Roland Levillainba650a42017-03-06 13:52:32 +00006384 DCHECK_EQ(needs_null_check, false);
6385 // /* HeapReference<mirror::Object> */ ref = *(obj + index)
Roland Levillainbfea3352016-06-23 13:48:47 +01006386 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
6387 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00006388 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006389 // ArrayGet and UnsafeGetObject and UnsafeCASObject intrinsics cases.
6390 // /* HeapReference<mirror::Object> */ ref = *(obj + offset + (index << scale_factor))
Roland Levillainbfea3352016-06-23 13:48:47 +01006391 if (index.IsConstant()) {
Evgeny Astigeevichf9e90542018-06-25 13:43:53 +01006392 uint32_t computed_offset = offset + (Int64FromLocation(index) << scale_factor);
Roland Levillainba650a42017-03-06 13:52:32 +00006393 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillainbfea3352016-06-23 13:48:47 +01006394 Load(type, ref_reg, HeapOperand(obj, computed_offset));
Roland Levillainba650a42017-03-06 13:52:32 +00006395 if (needs_null_check) {
6396 MaybeRecordImplicitNullCheck(instruction);
6397 }
Roland Levillainbfea3352016-06-23 13:48:47 +01006398 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006399 UseScratchRegisterScope temps(GetVIXLAssembler());
6400 Register temp = temps.AcquireW();
6401 __ Add(temp, obj, offset);
6402 {
6403 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
6404 Load(type, ref_reg, HeapOperand(temp, XRegisterFrom(index), LSL, scale_factor));
6405 if (needs_null_check) {
6406 MaybeRecordImplicitNullCheck(instruction);
6407 }
6408 }
Roland Levillainbfea3352016-06-23 13:48:47 +01006409 }
Roland Levillain44015862016-01-22 11:47:17 +00006410 }
Roland Levillain44015862016-01-22 11:47:17 +00006411 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006412 // /* HeapReference<mirror::Object> */ ref = *(obj + offset)
Roland Levillain44015862016-01-22 11:47:17 +00006413 MemOperand field = HeapOperand(obj, offset);
6414 if (use_load_acquire) {
Roland Levillainba650a42017-03-06 13:52:32 +00006415 // Implicit null checks are handled by CodeGeneratorARM64::LoadAcquire.
6416 LoadAcquire(instruction, ref_reg, field, needs_null_check);
Roland Levillain44015862016-01-22 11:47:17 +00006417 } else {
Roland Levillainba650a42017-03-06 13:52:32 +00006418 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain44015862016-01-22 11:47:17 +00006419 Load(type, ref_reg, field);
Roland Levillainba650a42017-03-06 13:52:32 +00006420 if (needs_null_check) {
6421 MaybeRecordImplicitNullCheck(instruction);
6422 }
Roland Levillain44015862016-01-22 11:47:17 +00006423 }
6424 }
6425
6426 // Object* ref = ref_addr->AsMirrorPtr()
6427 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
Roland Levillain44015862016-01-22 11:47:17 +00006428}
6429
Roland Levillain2b03a1f2017-06-06 16:09:59 +01006430void CodeGeneratorARM64::MaybeGenerateMarkingRegisterCheck(int code, Location temp_loc) {
6431 // The following condition is a compile-time one, so it does not have a run-time cost.
6432 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier && kIsDebugBuild) {
6433 // The following condition is a run-time one; it is executed after the
6434 // previous compile-time test, to avoid penalizing non-debug builds.
6435 if (GetCompilerOptions().EmitRunTimeChecksInDebugMode()) {
6436 UseScratchRegisterScope temps(GetVIXLAssembler());
6437 Register temp = temp_loc.IsValid() ? WRegisterFrom(temp_loc) : temps.AcquireW();
6438 GetAssembler()->GenerateMarkingRegisterCheck(temp, code);
6439 }
6440 }
6441}
6442
Roland Levillain44015862016-01-22 11:47:17 +00006443void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
6444 Location out,
6445 Location ref,
6446 Location obj,
6447 uint32_t offset,
6448 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006449 DCHECK(kEmitCompilerReadBarrier);
6450
Roland Levillain44015862016-01-22 11:47:17 +00006451 // Insert a slow path based read barrier *after* the reference load.
6452 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006453 // If heap poisoning is enabled, the unpoisoning of the loaded
6454 // reference will be carried out by the runtime within the slow
6455 // path.
6456 //
6457 // Note that `ref` currently does not get unpoisoned (when heap
6458 // poisoning is enabled), which is alright as the `ref` argument is
6459 // not used by the artReadBarrierSlow entry point.
6460 //
6461 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01006462 SlowPathCodeARM64* slow_path = new (GetScopedAllocator())
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006463 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
6464 AddSlowPath(slow_path);
6465
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006466 __ B(slow_path->GetEntryLabel());
6467 __ Bind(slow_path->GetExitLabel());
6468}
6469
Roland Levillain44015862016-01-22 11:47:17 +00006470void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6471 Location out,
6472 Location ref,
6473 Location obj,
6474 uint32_t offset,
6475 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006476 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00006477 // Baker's read barriers shall be handled by the fast path
6478 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
6479 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006480 // If heap poisoning is enabled, unpoisoning will be taken care of
6481 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00006482 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006483 } else if (kPoisonHeapReferences) {
6484 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
6485 }
6486}
6487
Roland Levillain44015862016-01-22 11:47:17 +00006488void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6489 Location out,
6490 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006491 DCHECK(kEmitCompilerReadBarrier);
6492
Roland Levillain44015862016-01-22 11:47:17 +00006493 // Insert a slow path based read barrier *after* the GC root load.
6494 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006495 // Note that GC roots are not affected by heap poisoning, so we do
6496 // not need to do anything special for this here.
6497 SlowPathCodeARM64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006498 new (GetScopedAllocator()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006499 AddSlowPath(slow_path);
6500
Roland Levillain22ccc3a2015-11-24 13:10:05 +00006501 __ B(slow_path->GetEntryLabel());
6502 __ Bind(slow_path->GetExitLabel());
6503}
6504
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006505void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
6506 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006507 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006508 locations->SetInAt(0, Location::RequiresRegister());
6509 locations->SetOut(Location::RequiresRegister());
6510}
6511
6512void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
6513 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00006514 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006515 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006516 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006517 __ Ldr(XRegisterFrom(locations->Out()),
6518 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006519 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006520 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006521 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006522 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
6523 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01006524 __ Ldr(XRegisterFrom(locations->Out()),
6525 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006526 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006527}
6528
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006529static void PatchJitRootUse(uint8_t* code,
6530 const uint8_t* roots_data,
6531 vixl::aarch64::Literal<uint32_t>* literal,
6532 uint64_t index_in_table) {
6533 uint32_t literal_offset = literal->GetOffset();
6534 uintptr_t address =
6535 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
6536 uint8_t* data = code + literal_offset;
6537 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
6538}
6539
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006540void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
6541 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006542 const StringReference& string_reference = entry.first;
6543 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006544 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006545 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006546 }
6547 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006548 const TypeReference& type_reference = entry.first;
6549 vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01006550 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01006551 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006552 }
6553}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006554
Alexandre Rames67555f72014-11-18 10:55:16 +00006555#undef __
6556#undef QUICK_ENTRY_POINT
6557
Vladimir Markoca1e0382018-04-11 09:58:41 +00006558#define __ assembler.GetVIXLAssembler()->
6559
6560static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler,
6561 vixl::aarch64::Register base_reg,
6562 vixl::aarch64::MemOperand& lock_word,
Vladimir Marko7a695052018-04-12 10:26:50 +01006563 vixl::aarch64::Label* slow_path,
6564 vixl::aarch64::Label* throw_npe = nullptr) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006565 // Load the lock word containing the rb_state.
6566 __ Ldr(ip0.W(), lock_word);
6567 // Given the numeric representation, it's enough to check the low bit of the rb_state.
6568 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6569 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6570 __ Tbnz(ip0.W(), LockWord::kReadBarrierStateShift, slow_path);
6571 static_assert(
6572 BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET == BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET,
6573 "Field and array LDR offsets must be the same to reuse the same code.");
Vladimir Marko7a695052018-04-12 10:26:50 +01006574 // To throw NPE, we return to the fast path; the artificial dependence below does not matter.
6575 if (throw_npe != nullptr) {
6576 __ Bind(throw_npe);
6577 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006578 // Adjust the return address back to the LDR (1 instruction; 2 for heap poisoning).
6579 static_assert(BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET == (kPoisonHeapReferences ? -8 : -4),
6580 "Field LDR must be 1 instruction (4B) before the return address label; "
6581 " 2 instructions (8B) for heap poisoning.");
6582 __ Add(lr, lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6583 // Introduce a dependency on the lock_word including rb_state,
6584 // to prevent load-load reordering, and without using
6585 // a memory barrier (which would be more expensive).
6586 __ Add(base_reg, base_reg, Operand(ip0, LSR, 32));
6587 __ Br(lr); // And return back to the function.
6588 // Note: The fake dependency is unnecessary for the slow path.
6589}
6590
6591// Load the read barrier introspection entrypoint in register `entrypoint`.
6592static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler,
6593 vixl::aarch64::Register entrypoint) {
6594 // entrypoint = Thread::Current()->pReadBarrierMarkReg16, i.e. pReadBarrierMarkIntrospection.
6595 DCHECK_EQ(ip0.GetCode(), 16u);
6596 const int32_t entry_point_offset =
6597 Thread::ReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ip0.GetCode());
6598 __ Ldr(entrypoint, MemOperand(tr, entry_point_offset));
6599}
6600
6601void CodeGeneratorARM64::CompileBakerReadBarrierThunk(Arm64Assembler& assembler,
6602 uint32_t encoded_data,
6603 /*out*/ std::string* debug_name) {
6604 BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
6605 switch (kind) {
Vladimir Marko0ecac682018-08-07 10:40:38 +01006606 case BakerReadBarrierKind::kField:
6607 case BakerReadBarrierKind::kAcquire: {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006608 auto base_reg =
6609 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6610 CheckValidReg(base_reg.GetCode());
6611 auto holder_reg =
6612 Register::GetXRegFromCode(BakerReadBarrierSecondRegField::Decode(encoded_data));
6613 CheckValidReg(holder_reg.GetCode());
6614 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6615 temps.Exclude(ip0, ip1);
Vladimir Marko7a695052018-04-12 10:26:50 +01006616 // If base_reg differs from holder_reg, the offset was too large and we must have emitted
6617 // an explicit null check before the load. Otherwise, for implicit null checks, we need to
6618 // null-check the holder as we do not necessarily do that check before going to the thunk.
6619 vixl::aarch64::Label throw_npe_label;
6620 vixl::aarch64::Label* throw_npe = nullptr;
6621 if (GetCompilerOptions().GetImplicitNullChecks() && holder_reg.Is(base_reg)) {
6622 throw_npe = &throw_npe_label;
6623 __ Cbz(holder_reg.W(), throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006624 }
Vladimir Marko7a695052018-04-12 10:26:50 +01006625 // Check if the holder is gray and, if not, add fake dependency to the base register
6626 // and return to the LDR instruction to load the reference. Otherwise, use introspection
6627 // to load the reference and call the entrypoint that performs further checks on the
6628 // reference and marks it if needed.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006629 vixl::aarch64::Label slow_path;
6630 MemOperand lock_word(holder_reg, mirror::Object::MonitorOffset().Int32Value());
Vladimir Marko7a695052018-04-12 10:26:50 +01006631 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, throw_npe);
Vladimir Markoca1e0382018-04-11 09:58:41 +00006632 __ Bind(&slow_path);
Vladimir Marko0ecac682018-08-07 10:40:38 +01006633 if (kind == BakerReadBarrierKind::kField) {
6634 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_FIELD_LDR_OFFSET);
6635 __ Ldr(ip0.W(), ldr_address); // Load the LDR (immediate) unsigned offset.
6636 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6637 __ Ubfx(ip0.W(), ip0.W(), 10, 12); // Extract the offset.
6638 __ Ldr(ip0.W(), MemOperand(base_reg, ip0, LSL, 2)); // Load the reference.
6639 } else {
6640 DCHECK(kind == BakerReadBarrierKind::kAcquire);
6641 DCHECK(!base_reg.Is(holder_reg));
6642 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6643 __ Ldar(ip0.W(), MemOperand(base_reg));
6644 }
Vladimir Markoca1e0382018-04-11 09:58:41 +00006645 // Do not unpoison. With heap poisoning enabled, the entrypoint expects a poisoned reference.
6646 __ Br(ip1); // Jump to the entrypoint.
Vladimir Markoca1e0382018-04-11 09:58:41 +00006647 break;
6648 }
6649 case BakerReadBarrierKind::kArray: {
6650 auto base_reg =
6651 Register::GetXRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6652 CheckValidReg(base_reg.GetCode());
6653 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6654 BakerReadBarrierSecondRegField::Decode(encoded_data));
6655 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6656 temps.Exclude(ip0, ip1);
6657 vixl::aarch64::Label slow_path;
6658 int32_t data_offset =
6659 mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
6660 MemOperand lock_word(base_reg, mirror::Object::MonitorOffset().Int32Value() - data_offset);
6661 DCHECK_LT(lock_word.GetOffset(), 0);
6662 EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path);
6663 __ Bind(&slow_path);
6664 MemOperand ldr_address(lr, BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET);
6665 __ Ldr(ip0.W(), ldr_address); // Load the LDR (register) unsigned offset.
6666 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6667 __ Ubfx(ip0, ip0, 16, 6); // Extract the index register, plus 32 (bit 21 is set).
6668 __ Bfi(ip1, ip0, 3, 6); // Insert ip0 to the entrypoint address to create
6669 // a switch case target based on the index register.
6670 __ Mov(ip0, base_reg); // Move the base register to ip0.
6671 __ Br(ip1); // Jump to the entrypoint's array switch case.
6672 break;
6673 }
6674 case BakerReadBarrierKind::kGcRoot: {
6675 // Check if the reference needs to be marked and if so (i.e. not null, not marked yet
6676 // and it does not have a forwarding address), call the correct introspection entrypoint;
6677 // otherwise return the reference (or the extracted forwarding address).
6678 // There is no gray bit check for GC roots.
6679 auto root_reg =
6680 Register::GetWRegFromCode(BakerReadBarrierFirstRegField::Decode(encoded_data));
6681 CheckValidReg(root_reg.GetCode());
6682 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6683 BakerReadBarrierSecondRegField::Decode(encoded_data));
6684 UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
6685 temps.Exclude(ip0, ip1);
6686 vixl::aarch64::Label return_label, not_marked, forwarding_address;
6687 __ Cbz(root_reg, &return_label);
6688 MemOperand lock_word(root_reg.X(), mirror::Object::MonitorOffset().Int32Value());
6689 __ Ldr(ip0.W(), lock_word);
6690 __ Tbz(ip0.W(), LockWord::kMarkBitStateShift, &not_marked);
6691 __ Bind(&return_label);
6692 __ Br(lr);
6693 __ Bind(&not_marked);
6694 __ Tst(ip0.W(), Operand(ip0.W(), LSL, 1));
6695 __ B(&forwarding_address, mi);
6696 LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ip1);
6697 // Adjust the art_quick_read_barrier_mark_introspection address in IP1 to
6698 // art_quick_read_barrier_mark_introspection_gc_roots.
6699 __ Add(ip1, ip1, Operand(BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRYPOINT_OFFSET));
6700 __ Mov(ip0.W(), root_reg);
6701 __ Br(ip1);
6702 __ Bind(&forwarding_address);
6703 __ Lsl(root_reg, ip0.W(), LockWord::kForwardingAddressShift);
6704 __ Br(lr);
6705 break;
6706 }
6707 default:
6708 LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
6709 UNREACHABLE();
6710 }
6711
Vladimir Marko966b46f2018-08-03 10:20:19 +00006712 // For JIT, the slow path is considered part of the compiled method,
6713 // so JIT should pass null as `debug_name`. Tests may not have a runtime.
6714 DCHECK(Runtime::Current() == nullptr ||
6715 !Runtime::Current()->UseJitCompilation() ||
6716 debug_name == nullptr);
6717 if (debug_name != nullptr && GetCompilerOptions().GenerateAnyDebugInfo()) {
Vladimir Markoca1e0382018-04-11 09:58:41 +00006718 std::ostringstream oss;
6719 oss << "BakerReadBarrierThunk";
6720 switch (kind) {
6721 case BakerReadBarrierKind::kField:
6722 oss << "Field_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
6723 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
6724 break;
Vladimir Marko0ecac682018-08-07 10:40:38 +01006725 case BakerReadBarrierKind::kAcquire:
6726 oss << "Acquire_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
6727 << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
6728 break;
Vladimir Markoca1e0382018-04-11 09:58:41 +00006729 case BakerReadBarrierKind::kArray:
6730 oss << "Array_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
6731 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6732 BakerReadBarrierSecondRegField::Decode(encoded_data));
6733 break;
6734 case BakerReadBarrierKind::kGcRoot:
6735 oss << "GcRoot_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
6736 DCHECK_EQ(kBakerReadBarrierInvalidEncodedReg,
6737 BakerReadBarrierSecondRegField::Decode(encoded_data));
6738 break;
6739 }
6740 *debug_name = oss.str();
6741 }
6742}
6743
6744#undef __
6745
Alexandre Rames5319def2014-10-23 10:03:10 +01006746} // namespace arm64
6747} // namespace art