blob: f5038fb1c0287d3127fa183bac4a68285b033d6b [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Zheng Xuc6667102015-05-15 16:08:45 +080021#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000022#include "compiled_method.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010023#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080024#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "gc/accounting/card_table.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "intrinsics.h"
27#include "intrinsics_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070029#include "mirror/class-inl.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000030#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010031#include "thread.h"
32#include "utils/arm64/assembler_arm64.h"
33#include "utils/assembler.h"
34#include "utils/stack_checks.h"
35
Scott Wakeling97c72b72016-06-24 16:19:36 +010036using namespace vixl::aarch64; // NOLINT(build/namespaces)
Artem Serov914d7a82017-02-07 14:33:49 +000037using vixl::ExactAssemblyScope;
38using vixl::CodeBufferCheckScope;
39using vixl::EmissionCheckScope;
Alexandre Rames5319def2014-10-23 10:03:10 +010040
41#ifdef __
42#error "ARM64 Codegen VIXL macro-assembler macro already defined."
43#endif
44
Alexandre Rames5319def2014-10-23 10:03:10 +010045namespace art {
46
Roland Levillain22ccc3a2015-11-24 13:10:05 +000047template<class MirrorType>
48class GcRoot;
49
Alexandre Rames5319def2014-10-23 10:03:10 +010050namespace arm64 {
51
Alexandre Ramesbe919d92016-08-23 18:33:36 +010052using helpers::ARM64EncodableConstantOrRegister;
53using helpers::ArtVixlRegCodeCoherentForRegSet;
Andreas Gampe878d58c2015-01-15 23:24:00 -080054using helpers::CPURegisterFrom;
55using helpers::DRegisterFrom;
56using helpers::FPRegisterFrom;
57using helpers::HeapOperand;
58using helpers::HeapOperandFrom;
59using helpers::InputCPURegisterAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010060using helpers::InputCPURegisterOrZeroRegAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080061using helpers::InputFPRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080062using helpers::InputOperandAt;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010063using helpers::InputRegisterAt;
Andreas Gampe878d58c2015-01-15 23:24:00 -080064using helpers::Int64ConstantFrom;
Alexandre Ramesbe919d92016-08-23 18:33:36 +010065using helpers::IsConstantZeroBitPattern;
Andreas Gampe878d58c2015-01-15 23:24:00 -080066using helpers::LocationFrom;
67using helpers::OperandFromMemOperand;
68using helpers::OutputCPURegister;
69using helpers::OutputFPRegister;
70using helpers::OutputRegister;
71using helpers::RegisterFrom;
72using helpers::StackOperandFrom;
73using helpers::VIXLRegCodeFromART;
74using helpers::WRegisterFrom;
75using helpers::XRegisterFrom;
76
Alexandre Rames5319def2014-10-23 10:03:10 +010077static constexpr int kCurrentMethodStackOffset = 0;
Vladimir Markof3e0ee22015-12-17 15:23:13 +000078// The compare/jump sequence will generate about (1.5 * num_entries + 3) instructions. While jump
Zheng Xu3927c8b2015-11-18 17:46:25 +080079// table version generates 7 instructions and num_entries literals. Compare/jump sequence will
80// generates less code/data with a small num_entries.
Vladimir Markof3e0ee22015-12-17 15:23:13 +000081static constexpr uint32_t kPackedSwitchCompareJumpThreshold = 7;
Alexandre Rames5319def2014-10-23 10:03:10 +010082
Alexandre Rames5319def2014-10-23 10:03:10 +010083inline Condition ARM64Condition(IfCondition cond) {
84 switch (cond) {
85 case kCondEQ: return eq;
86 case kCondNE: return ne;
87 case kCondLT: return lt;
88 case kCondLE: return le;
89 case kCondGT: return gt;
90 case kCondGE: return ge;
Aart Bike9f37602015-10-09 11:15:55 -070091 case kCondB: return lo;
92 case kCondBE: return ls;
93 case kCondA: return hi;
94 case kCondAE: return hs;
Alexandre Rames5319def2014-10-23 10:03:10 +010095 }
Roland Levillain7f63c522015-07-13 15:54:55 +000096 LOG(FATAL) << "Unreachable";
97 UNREACHABLE();
Alexandre Rames5319def2014-10-23 10:03:10 +010098}
99
Vladimir Markod6e069b2016-01-18 11:11:01 +0000100inline Condition ARM64FPCondition(IfCondition cond, bool gt_bias) {
101 // The ARM64 condition codes can express all the necessary branches, see the
102 // "Meaning (floating-point)" column in the table C1-1 in the ARMv8 reference manual.
103 // There is no dex instruction or HIR that would need the missing conditions
104 // "equal or unordered" or "not equal".
105 switch (cond) {
106 case kCondEQ: return eq;
107 case kCondNE: return ne /* unordered */;
108 case kCondLT: return gt_bias ? cc : lt /* unordered */;
109 case kCondLE: return gt_bias ? ls : le /* unordered */;
110 case kCondGT: return gt_bias ? hi /* unordered */ : gt;
111 case kCondGE: return gt_bias ? cs /* unordered */ : ge;
112 default:
113 LOG(FATAL) << "UNREACHABLE";
114 UNREACHABLE();
115 }
116}
117
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000118Location ARM64ReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000119 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
120 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
121 // but we use the exact registers for clarity.
122 if (return_type == Primitive::kPrimFloat) {
123 return LocationFrom(s0);
124 } else if (return_type == Primitive::kPrimDouble) {
125 return LocationFrom(d0);
126 } else if (return_type == Primitive::kPrimLong) {
127 return LocationFrom(x0);
Nicolas Geoffray925e5622015-06-03 12:23:32 +0100128 } else if (return_type == Primitive::kPrimVoid) {
129 return Location::NoLocation();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000130 } else {
131 return LocationFrom(w0);
132 }
133}
134
Alexandre Rames5319def2014-10-23 10:03:10 +0100135Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000136 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100137}
138
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100139// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
140#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700141#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100142
Zheng Xuda403092015-04-24 17:35:39 +0800143// Calculate memory accessing operand for save/restore live registers.
144static void SaveRestoreLiveRegistersHelper(CodeGenerator* codegen,
Vladimir Marko804b03f2016-09-14 16:26:36 +0100145 LocationSummary* locations,
Zheng Xuda403092015-04-24 17:35:39 +0800146 int64_t spill_offset,
147 bool is_save) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100148 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
149 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
150 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800151 codegen->GetNumberOfCoreRegisters(),
Vladimir Marko804b03f2016-09-14 16:26:36 +0100152 fp_spills,
Zheng Xuda403092015-04-24 17:35:39 +0800153 codegen->GetNumberOfFloatingPointRegisters()));
154
Vladimir Marko804b03f2016-09-14 16:26:36 +0100155 CPURegList core_list = CPURegList(CPURegister::kRegister, kXRegSize, core_spills);
156 CPURegList fp_list = CPURegList(CPURegister::kFPRegister, kDRegSize, fp_spills);
Zheng Xuda403092015-04-24 17:35:39 +0800157
158 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler();
159 UseScratchRegisterScope temps(masm);
160
161 Register base = masm->StackPointer();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100162 int64_t core_spill_size = core_list.GetTotalSizeInBytes();
163 int64_t fp_spill_size = fp_list.GetTotalSizeInBytes();
Zheng Xuda403092015-04-24 17:35:39 +0800164 int64_t reg_size = kXRegSizeInBytes;
165 int64_t max_ls_pair_offset = spill_offset + core_spill_size + fp_spill_size - 2 * reg_size;
166 uint32_t ls_access_size = WhichPowerOf2(reg_size);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100167 if (((core_list.GetCount() > 1) || (fp_list.GetCount() > 1)) &&
Zheng Xuda403092015-04-24 17:35:39 +0800168 !masm->IsImmLSPair(max_ls_pair_offset, ls_access_size)) {
169 // If the offset does not fit in the instruction's immediate field, use an alternate register
170 // to compute the base address(float point registers spill base address).
171 Register new_base = temps.AcquireSameSizeAs(base);
172 __ Add(new_base, base, Operand(spill_offset + core_spill_size));
173 base = new_base;
174 spill_offset = -core_spill_size;
175 int64_t new_max_ls_pair_offset = fp_spill_size - 2 * reg_size;
176 DCHECK(masm->IsImmLSPair(spill_offset, ls_access_size));
177 DCHECK(masm->IsImmLSPair(new_max_ls_pair_offset, ls_access_size));
178 }
179
180 if (is_save) {
181 __ StoreCPURegList(core_list, MemOperand(base, spill_offset));
182 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
183 } else {
184 __ LoadCPURegList(core_list, MemOperand(base, spill_offset));
185 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size));
186 }
187}
188
189void SlowPathCodeARM64::SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Zheng Xuda403092015-04-24 17:35:39 +0800190 size_t stack_offset = codegen->GetFirstRegisterSlotInSlowPath();
Vladimir Marko804b03f2016-09-14 16:26:36 +0100191 const uint32_t core_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ true);
192 for (uint32_t i : LowToHighBits(core_spills)) {
193 // If the register holds an object, update the stack mask.
194 if (locations->RegisterContainsObject(i)) {
195 locations->SetStackBit(stack_offset / kVRegSize);
Zheng Xuda403092015-04-24 17:35:39 +0800196 }
Vladimir Marko804b03f2016-09-14 16:26:36 +0100197 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
198 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
199 saved_core_stack_offsets_[i] = stack_offset;
200 stack_offset += kXRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800201 }
202
Vladimir Marko804b03f2016-09-14 16:26:36 +0100203 const uint32_t fp_spills = codegen->GetSlowPathSpills(locations, /* core_registers */ false);
204 for (uint32_t i : LowToHighBits(fp_spills)) {
205 DCHECK_LT(stack_offset, codegen->GetFrameSize() - codegen->FrameEntrySpillSize());
206 DCHECK_LT(i, kMaximumNumberOfExpectedRegisters);
207 saved_fpu_stack_offsets_[i] = stack_offset;
208 stack_offset += kDRegSizeInBytes;
Zheng Xuda403092015-04-24 17:35:39 +0800209 }
210
Vladimir Marko804b03f2016-09-14 16:26:36 +0100211 SaveRestoreLiveRegistersHelper(codegen,
212 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800213 codegen->GetFirstRegisterSlotInSlowPath(), true /* is_save */);
214}
215
216void SlowPathCodeARM64::RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100217 SaveRestoreLiveRegistersHelper(codegen,
218 locations,
Zheng Xuda403092015-04-24 17:35:39 +0800219 codegen->GetFirstRegisterSlotInSlowPath(), false /* is_save */);
220}
221
Alexandre Rames5319def2014-10-23 10:03:10 +0100222class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
223 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000224 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100225
Alexandre Rames67555f72014-11-18 10:55:16 +0000226 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100227 LocationSummary* locations = instruction_->GetLocations();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000228 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100229
Alexandre Rames5319def2014-10-23 10:03:10 +0100230 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000231 if (instruction_->CanThrowIntoCatchBlock()) {
232 // Live registers will be restored in the catch block if caught.
233 SaveLiveRegisters(codegen, instruction_->GetLocations());
234 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000235 // We're moving two locations to locations that could overlap, so we need a parallel
236 // move resolver.
237 InvokeRuntimeCallingConvention calling_convention;
238 codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100239 locations->InAt(0), LocationFrom(calling_convention.GetRegisterAt(0)), Primitive::kPrimInt,
240 locations->InAt(1), LocationFrom(calling_convention.GetRegisterAt(1)), Primitive::kPrimInt);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000241 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
242 ? kQuickThrowStringBounds
243 : kQuickThrowArrayBounds;
244 arm64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100245 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800246 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100247 }
248
Alexandre Rames8158f282015-08-07 10:26:17 +0100249 bool IsFatal() const OVERRIDE { return true; }
250
Alexandre Rames9931f312015-06-19 14:47:01 +0100251 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathARM64"; }
252
Alexandre Rames5319def2014-10-23 10:03:10 +0100253 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100254 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
255};
256
Alexandre Rames67555f72014-11-18 10:55:16 +0000257class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
258 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000259 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : SlowPathCodeARM64(instruction) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000260
261 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
262 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
263 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000264 arm64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800265 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000266 }
267
Alexandre Rames8158f282015-08-07 10:26:17 +0100268 bool IsFatal() const OVERRIDE { return true; }
269
Alexandre Rames9931f312015-06-19 14:47:01 +0100270 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathARM64"; }
271
Alexandre Rames67555f72014-11-18 10:55:16 +0000272 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000273 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
274};
275
276class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
277 public:
278 LoadClassSlowPathARM64(HLoadClass* cls,
279 HInstruction* at,
280 uint32_t dex_pc,
Vladimir Markoea4c1262017-02-06 19:59:33 +0000281 bool do_clinit,
282 vixl::aarch64::Register bss_entry_temp = vixl::aarch64::Register(),
283 vixl::aarch64::Label* bss_entry_adrp_label = nullptr)
284 : SlowPathCodeARM64(at),
285 cls_(cls),
286 dex_pc_(dex_pc),
287 do_clinit_(do_clinit),
288 bss_entry_temp_(bss_entry_temp),
289 bss_entry_adrp_label_(bss_entry_adrp_label) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000290 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
291 }
292
293 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000294 LocationSummary* locations = instruction_->GetLocations();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000295 Location out = locations->Out();
296 constexpr bool call_saves_everything_except_r0_ip0 = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexandre Rames67555f72014-11-18 10:55:16 +0000297 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
298
Vladimir Markoea4c1262017-02-06 19:59:33 +0000299 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the page address of
300 // the entry which is in a scratch register. Make sure it's not used for saving/restoring
301 // registers. Exclude the scratch register also for non-Baker read barrier for simplicity.
302 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
303 bool is_load_class_bss_entry =
304 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
305 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
306 if (is_load_class_bss_entry) {
307 // This temp is a scratch register.
308 DCHECK(bss_entry_temp_.IsValid());
309 temps.Exclude(bss_entry_temp_);
310 }
311
Alexandre Rames67555f72014-11-18 10:55:16 +0000312 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000313 SaveLiveRegisters(codegen, locations);
Alexandre Rames67555f72014-11-18 10:55:16 +0000314
315 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000316 dex::TypeIndex type_index = cls_->GetTypeIndex();
317 __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000318 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
319 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000320 arm64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800321 if (do_clinit_) {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100322 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800323 } else {
Vladimir Marko5ea536a2015-04-20 20:11:30 +0100324 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800325 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000326
327 // Move the class to the desired location.
Alexandre Rames67555f72014-11-18 10:55:16 +0000328 if (out.IsValid()) {
329 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000330 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000331 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000332 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000333 RestoreLiveRegisters(codegen, locations);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000334 // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
Vladimir Markoea4c1262017-02-06 19:59:33 +0000335 if (is_load_class_bss_entry) {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000336 DCHECK(out.IsValid());
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000337 const DexFile& dex_file = cls_->GetDexFile();
Vladimir Markoea4c1262017-02-06 19:59:33 +0000338 if (call_saves_everything_except_r0_ip0) {
339 // The class entry page address was preserved in bss_entry_temp_ thanks to kSaveEverything.
340 } else {
341 // For non-Baker read barrier, we need to re-calculate the address of the class entry page.
342 bss_entry_adrp_label_ = arm64_codegen->NewBssEntryTypePatch(dex_file, type_index);
343 arm64_codegen->EmitAdrpPlaceholder(bss_entry_adrp_label_, bss_entry_temp_);
344 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000345 vixl::aarch64::Label* strp_label =
Vladimir Markoea4c1262017-02-06 19:59:33 +0000346 arm64_codegen->NewBssEntryTypePatch(dex_file, type_index, bss_entry_adrp_label_);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000347 {
348 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
349 __ Bind(strp_label);
350 __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
Vladimir Markoea4c1262017-02-06 19:59:33 +0000351 MemOperand(bss_entry_temp_, /* offset placeholder */ 0));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000352 }
353 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000354 __ B(GetExitLabel());
355 }
356
Alexandre Rames9931f312015-06-19 14:47:01 +0100357 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathARM64"; }
358
Alexandre Rames67555f72014-11-18 10:55:16 +0000359 private:
360 // The class this slow path will load.
361 HLoadClass* const cls_;
362
Alexandre Rames67555f72014-11-18 10:55:16 +0000363 // The dex PC of `at_`.
364 const uint32_t dex_pc_;
365
366 // Whether to initialize the class.
367 const bool do_clinit_;
368
Vladimir Markoea4c1262017-02-06 19:59:33 +0000369 // For HLoadClass/kBssEntry, the temp register and the label of the ADRP where it was loaded.
370 vixl::aarch64::Register bss_entry_temp_;
371 vixl::aarch64::Label* bss_entry_adrp_label_;
372
Alexandre Rames67555f72014-11-18 10:55:16 +0000373 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
374};
375
Vladimir Markoaad75c62016-10-03 08:46:48 +0000376class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
377 public:
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100378 LoadStringSlowPathARM64(HLoadString* instruction, Register temp, vixl::aarch64::Label* adrp_label)
379 : SlowPathCodeARM64(instruction),
380 temp_(temp),
381 adrp_label_(adrp_label) {}
Vladimir Markoaad75c62016-10-03 08:46:48 +0000382
383 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
384 LocationSummary* locations = instruction_->GetLocations();
385 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
386 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
387
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100388 // temp_ is a scratch register. Make sure it's not used for saving/restoring registers.
389 UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
390 temps.Exclude(temp_);
391
Vladimir Markoaad75c62016-10-03 08:46:48 +0000392 __ Bind(GetEntryLabel());
393 SaveLiveRegisters(codegen, locations);
394
395 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000396 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
397 __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000398 arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
399 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
400 Primitive::Type type = instruction_->GetType();
401 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
402
403 RestoreLiveRegisters(codegen, locations);
404
405 // Store the resolved String to the BSS entry.
Vladimir Markoaad75c62016-10-03 08:46:48 +0000406 const DexFile& dex_file = instruction_->AsLoadString()->GetDexFile();
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100407 if (!kUseReadBarrier || kUseBakerReadBarrier) {
408 // The string entry page address was preserved in temp_ thanks to kSaveEverything.
409 } else {
410 // For non-Baker read barrier, we need to re-calculate the address of the string entry page.
411 adrp_label_ = arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index);
412 arm64_codegen->EmitAdrpPlaceholder(adrp_label_, temp_);
413 }
Vladimir Markoaad75c62016-10-03 08:46:48 +0000414 vixl::aarch64::Label* strp_label =
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100415 arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index, adrp_label_);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000416 {
417 SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
418 __ Bind(strp_label);
419 __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100420 MemOperand(temp_, /* offset placeholder */ 0));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000421 }
422
423 __ B(GetExitLabel());
424 }
425
426 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; }
427
428 private:
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100429 const Register temp_;
430 vixl::aarch64::Label* adrp_label_;
431
Vladimir Markoaad75c62016-10-03 08:46:48 +0000432 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
433};
434
Alexandre Rames5319def2014-10-23 10:03:10 +0100435class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
436 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000437 explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100438
Alexandre Rames67555f72014-11-18 10:55:16 +0000439 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
440 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100441 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000442 if (instruction_->CanThrowIntoCatchBlock()) {
443 // Live registers will be restored in the catch block if caught.
444 SaveLiveRegisters(codegen, instruction_->GetLocations());
445 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000446 arm64_codegen->InvokeRuntime(kQuickThrowNullPointer,
447 instruction_,
448 instruction_->GetDexPc(),
449 this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800450 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100451 }
452
Alexandre Rames8158f282015-08-07 10:26:17 +0100453 bool IsFatal() const OVERRIDE { return true; }
454
Alexandre Rames9931f312015-06-19 14:47:01 +0100455 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathARM64"; }
456
Alexandre Rames5319def2014-10-23 10:03:10 +0100457 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100458 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
459};
460
461class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
462 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100463 SuspendCheckSlowPathARM64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000464 : SlowPathCodeARM64(instruction), successor_(successor) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100465
Alexandre Rames67555f72014-11-18 10:55:16 +0000466 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
467 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100468 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000469 arm64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800470 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000471 if (successor_ == nullptr) {
472 __ B(GetReturnLabel());
473 } else {
474 __ B(arm64_codegen->GetLabelOf(successor_));
475 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100476 }
477
Scott Wakeling97c72b72016-06-24 16:19:36 +0100478 vixl::aarch64::Label* GetReturnLabel() {
Alexandre Rames5319def2014-10-23 10:03:10 +0100479 DCHECK(successor_ == nullptr);
480 return &return_label_;
481 }
482
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100483 HBasicBlock* GetSuccessor() const {
484 return successor_;
485 }
486
Alexandre Rames9931f312015-06-19 14:47:01 +0100487 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathARM64"; }
488
Alexandre Rames5319def2014-10-23 10:03:10 +0100489 private:
Alexandre Rames5319def2014-10-23 10:03:10 +0100490 // If not null, the block to branch to after the suspend check.
491 HBasicBlock* const successor_;
492
493 // If `successor_` is null, the label to branch to after the suspend check.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100494 vixl::aarch64::Label return_label_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100495
496 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
497};
498
Alexandre Rames67555f72014-11-18 10:55:16 +0000499class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
500 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000501 TypeCheckSlowPathARM64(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000502 : SlowPathCodeARM64(instruction), is_fatal_(is_fatal) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000503
504 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000505 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800506
Alexandre Rames3e69f162014-12-10 10:36:50 +0000507 DCHECK(instruction_->IsCheckCast()
508 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
509 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100510 uint32_t dex_pc = instruction_->GetDexPc();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000511
Alexandre Rames67555f72014-11-18 10:55:16 +0000512 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000513
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000514 if (!is_fatal_) {
515 SaveLiveRegisters(codegen, locations);
516 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000517
518 // We're moving two locations to locations that could overlap, so we need a parallel
519 // move resolver.
520 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800521 codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800522 LocationFrom(calling_convention.GetRegisterAt(0)),
523 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800524 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800525 LocationFrom(calling_convention.GetRegisterAt(1)),
526 Primitive::kPrimNot);
Alexandre Rames3e69f162014-12-10 10:36:50 +0000527 if (instruction_->IsInstanceOf()) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000528 arm64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800529 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000530 Primitive::Type ret_type = instruction_->GetType();
531 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
532 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
533 } else {
534 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800535 arm64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
536 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000537 }
538
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000539 if (!is_fatal_) {
540 RestoreLiveRegisters(codegen, locations);
541 __ B(GetExitLabel());
542 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000543 }
544
Alexandre Rames9931f312015-06-19 14:47:01 +0100545 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathARM64"; }
Roland Levillainf41f9562016-09-14 19:26:48 +0100546 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100547
Alexandre Rames67555f72014-11-18 10:55:16 +0000548 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000549 const bool is_fatal_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000550
Alexandre Rames67555f72014-11-18 10:55:16 +0000551 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
552};
553
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700554class DeoptimizationSlowPathARM64 : public SlowPathCodeARM64 {
555 public:
Aart Bik42249c32016-01-07 15:33:50 -0800556 explicit DeoptimizationSlowPathARM64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000557 : SlowPathCodeARM64(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700558
559 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800560 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700561 __ Bind(GetEntryLabel());
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000562 arm64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000563 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700564 }
565
Alexandre Rames9931f312015-06-19 14:47:01 +0100566 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathARM64"; }
567
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700568 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700569 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathARM64);
570};
571
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100572class ArraySetSlowPathARM64 : public SlowPathCodeARM64 {
573 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000574 explicit ArraySetSlowPathARM64(HInstruction* instruction) : SlowPathCodeARM64(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100575
576 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
577 LocationSummary* locations = instruction_->GetLocations();
578 __ Bind(GetEntryLabel());
579 SaveLiveRegisters(codegen, locations);
580
581 InvokeRuntimeCallingConvention calling_convention;
582 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
583 parallel_move.AddMove(
584 locations->InAt(0),
585 LocationFrom(calling_convention.GetRegisterAt(0)),
586 Primitive::kPrimNot,
587 nullptr);
588 parallel_move.AddMove(
589 locations->InAt(1),
590 LocationFrom(calling_convention.GetRegisterAt(1)),
591 Primitive::kPrimInt,
592 nullptr);
593 parallel_move.AddMove(
594 locations->InAt(2),
595 LocationFrom(calling_convention.GetRegisterAt(2)),
596 Primitive::kPrimNot,
597 nullptr);
598 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
599
600 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Serban Constantinescu22f81d32016-02-18 16:06:31 +0000601 arm64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100602 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
603 RestoreLiveRegisters(codegen, locations);
604 __ B(GetExitLabel());
605 }
606
607 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathARM64"; }
608
609 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100610 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathARM64);
611};
612
Zheng Xu3927c8b2015-11-18 17:46:25 +0800613void JumpTableARM64::EmitTable(CodeGeneratorARM64* codegen) {
614 uint32_t num_entries = switch_instr_->GetNumEntries();
Vladimir Markof3e0ee22015-12-17 15:23:13 +0000615 DCHECK_GE(num_entries, kPackedSwitchCompareJumpThreshold);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800616
617 // We are about to use the assembler to place literals directly. Make sure we have enough
618 // underlying code buffer and we have generated the jump table with right size.
Artem Serov914d7a82017-02-07 14:33:49 +0000619 EmissionCheckScope scope(codegen->GetVIXLAssembler(),
620 num_entries * sizeof(int32_t),
621 CodeBufferCheckScope::kExactSize);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800622
623 __ Bind(&table_start_);
624 const ArenaVector<HBasicBlock*>& successors = switch_instr_->GetBlock()->GetSuccessors();
625 for (uint32_t i = 0; i < num_entries; i++) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100626 vixl::aarch64::Label* target_label = codegen->GetLabelOf(successors[i]);
Zheng Xu3927c8b2015-11-18 17:46:25 +0800627 DCHECK(target_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100628 ptrdiff_t jump_offset = target_label->GetLocation() - table_start_.GetLocation();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800629 DCHECK_GT(jump_offset, std::numeric_limits<int32_t>::min());
630 DCHECK_LE(jump_offset, std::numeric_limits<int32_t>::max());
631 Literal<int32_t> literal(jump_offset);
632 __ place(&literal);
633 }
634}
635
Roland Levillain47b3ab22017-02-27 14:31:35 +0000636// Slow path marking an object reference `ref` during a read
637// barrier. The field `obj.field` in the object `obj` holding this
638// reference does not get updated by this slow path after marking (see
639// ReadBarrierMarkAndUpdateFieldSlowPathARM64 below for that).
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000640//
Roland Levillain47b3ab22017-02-27 14:31:35 +0000641// This means that after the execution of this slow path, `ref` will
642// always be up-to-date, but `obj.field` may not; i.e., after the
643// flip, `ref` will be a to-space reference, but `obj.field` will
644// probably still be a from-space reference (unless it gets updated by
645// another thread, or if another thread installed another object
646// reference (different from `ref`) in `obj.field`).
647//
648// If `entrypoint` is a valid location it is assumed to already be
649// holding the entrypoint. The case where the entrypoint is passed in
Roland Levillain35345a52017-02-27 14:32:08 +0000650// is for the GcRoot read barrier.
Roland Levillain47b3ab22017-02-27 14:31:35 +0000651class ReadBarrierMarkSlowPathARM64 : public SlowPathCodeARM64 {
652 public:
653 ReadBarrierMarkSlowPathARM64(HInstruction* instruction,
654 Location ref,
655 Location entrypoint = Location::NoLocation())
656 : SlowPathCodeARM64(instruction),
657 ref_(ref),
658 entrypoint_(entrypoint) {
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000659 DCHECK(kEmitCompilerReadBarrier);
660 }
661
Roland Levillain47b3ab22017-02-27 14:31:35 +0000662 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathARM64"; }
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000663
Roland Levillain47b3ab22017-02-27 14:31:35 +0000664 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
665 LocationSummary* locations = instruction_->GetLocations();
666 DCHECK(locations->CanCall());
667 DCHECK(ref_.IsRegister()) << ref_;
668 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
669 DCHECK(instruction_->IsInstanceFieldGet() ||
670 instruction_->IsStaticFieldGet() ||
671 instruction_->IsArrayGet() ||
672 instruction_->IsArraySet() ||
673 instruction_->IsLoadClass() ||
674 instruction_->IsLoadString() ||
675 instruction_->IsInstanceOf() ||
676 instruction_->IsCheckCast() ||
677 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
678 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
679 << "Unexpected instruction in read barrier marking slow path: "
680 << instruction_->DebugName();
681 // The read barrier instrumentation of object ArrayGet
682 // instructions does not support the HIntermediateAddress
683 // instruction.
684 DCHECK(!(instruction_->IsArrayGet() &&
685 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
686
687 __ Bind(GetEntryLabel());
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000688 // No need to save live registers; it's taken care of by the
689 // entrypoint. Also, there is no need to update the stack mask,
690 // as this runtime call will not trigger a garbage collection.
691 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
692 DCHECK_NE(ref_.reg(), LR);
693 DCHECK_NE(ref_.reg(), WSP);
694 DCHECK_NE(ref_.reg(), WZR);
695 // IP0 is used internally by the ReadBarrierMarkRegX entry point
696 // as a temporary, it cannot be the entry point's input/output.
697 DCHECK_NE(ref_.reg(), IP0);
698 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
699 // "Compact" slow path, saving two moves.
700 //
701 // Instead of using the standard runtime calling convention (input
702 // and output in W0):
703 //
704 // W0 <- ref
705 // W0 <- ReadBarrierMark(W0)
706 // ref <- W0
707 //
708 // we just use rX (the register containing `ref`) as input and output
709 // of a dedicated entrypoint:
710 //
711 // rX <- ReadBarrierMarkRegX(rX)
712 //
713 if (entrypoint_.IsValid()) {
714 arm64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
715 __ Blr(XRegisterFrom(entrypoint_));
716 } else {
717 // Entrypoint is not already loaded, load from the thread.
718 int32_t entry_point_offset =
719 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
720 // This runtime call does not require a stack map.
721 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
722 }
Roland Levillain47b3ab22017-02-27 14:31:35 +0000723 __ B(GetExitLabel());
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000724 }
725
Roland Levillain47b3ab22017-02-27 14:31:35 +0000726 private:
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000727 // The location (register) of the marked object reference.
728 const Location ref_;
729
730 // The location of the entrypoint if it is already loaded.
731 const Location entrypoint_;
732
Roland Levillain27b1f9c2017-01-17 16:56:34 +0000733 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathARM64);
734};
735
Roland Levillain47b3ab22017-02-27 14:31:35 +0000736// Slow path marking an object reference `ref` during a read barrier,
737// and if needed, atomically updating the field `obj.field` in the
738// object `obj` holding this reference after marking (contrary to
739// ReadBarrierMarkSlowPathARM64 above, which never tries to update
740// `obj.field`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100741//
742// This means that after the execution of this slow path, both `ref`
743// and `obj.field` will be up-to-date; i.e., after the flip, both will
744// hold the same to-space reference (unless another thread installed
745// another object reference (different from `ref`) in `obj.field`).
Roland Levillain47b3ab22017-02-27 14:31:35 +0000746class ReadBarrierMarkAndUpdateFieldSlowPathARM64 : public SlowPathCodeARM64 {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100747 public:
Roland Levillain47b3ab22017-02-27 14:31:35 +0000748 ReadBarrierMarkAndUpdateFieldSlowPathARM64(HInstruction* instruction,
749 Location ref,
750 Register obj,
751 Location field_offset,
Roland Levillain35345a52017-02-27 14:32:08 +0000752 Register temp)
Roland Levillain47b3ab22017-02-27 14:31:35 +0000753 : SlowPathCodeARM64(instruction),
754 ref_(ref),
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100755 obj_(obj),
Roland Levillain47b3ab22017-02-27 14:31:35 +0000756 field_offset_(field_offset),
Roland Levillain35345a52017-02-27 14:32:08 +0000757 temp_(temp) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100758 DCHECK(kEmitCompilerReadBarrier);
759 }
760
761 const char* GetDescription() const OVERRIDE {
Roland Levillain47b3ab22017-02-27 14:31:35 +0000762 return "ReadBarrierMarkAndUpdateFieldSlowPathARM64";
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100763 }
764
765 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
766 LocationSummary* locations = instruction_->GetLocations();
767 Register ref_reg = WRegisterFrom(ref_);
768 DCHECK(locations->CanCall());
769 DCHECK(ref_.IsRegister()) << ref_;
770 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_.reg())) << ref_.reg();
Roland Levillain47b3ab22017-02-27 14:31:35 +0000771 // This slow path is only used by the UnsafeCASObject intrinsic.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100772 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
773 << "Unexpected instruction in read barrier marking and field updating slow path: "
774 << instruction_->DebugName();
775 DCHECK(instruction_->GetLocations()->Intrinsified());
776 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
Roland Levillain47b3ab22017-02-27 14:31:35 +0000777 DCHECK(field_offset_.IsRegister()) << field_offset_;
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100778
779 __ Bind(GetEntryLabel());
780
Roland Levillain47b3ab22017-02-27 14:31:35 +0000781 // Save the old reference.
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100782 // Note that we cannot use IP to save the old reference, as IP is
783 // used internally by the ReadBarrierMarkRegX entry point, and we
784 // need the old reference after the call to that entry point.
785 DCHECK_NE(LocationFrom(temp_).reg(), IP0);
786 __ Mov(temp_.W(), ref_reg);
787
Roland Levillain47b3ab22017-02-27 14:31:35 +0000788 // No need to save live registers; it's taken care of by the
789 // entrypoint. Also, there is no need to update the stack mask,
790 // as this runtime call will not trigger a garbage collection.
791 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
792 DCHECK_NE(ref_.reg(), LR);
793 DCHECK_NE(ref_.reg(), WSP);
794 DCHECK_NE(ref_.reg(), WZR);
795 // IP0 is used internally by the ReadBarrierMarkRegX entry point
796 // as a temporary, it cannot be the entry point's input/output.
797 DCHECK_NE(ref_.reg(), IP0);
798 DCHECK(0 <= ref_.reg() && ref_.reg() < kNumberOfWRegisters) << ref_.reg();
799 // "Compact" slow path, saving two moves.
800 //
801 // Instead of using the standard runtime calling convention (input
802 // and output in W0):
803 //
804 // W0 <- ref
805 // W0 <- ReadBarrierMark(W0)
806 // ref <- W0
807 //
808 // we just use rX (the register containing `ref`) as input and output
809 // of a dedicated entrypoint:
810 //
811 // rX <- ReadBarrierMarkRegX(rX)
812 //
Roland Levillain35345a52017-02-27 14:32:08 +0000813 int32_t entry_point_offset =
814 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(ref_.reg());
815 // This runtime call does not require a stack map.
816 arm64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100817
818 // If the new reference is different from the old reference,
Roland Levillain47b3ab22017-02-27 14:31:35 +0000819 // update the field in the holder (`*(obj_ + field_offset_)`).
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100820 //
821 // Note that this field could also hold a different object, if
822 // another thread had concurrently changed it. In that case, the
823 // LDXR/CMP/BNE sequence of instructions in the compare-and-set
824 // (CAS) operation below would abort the CAS, leaving the field
825 // as-is.
Roland Levillain47b3ab22017-02-27 14:31:35 +0000826 vixl::aarch64::Label done;
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100827 __ Cmp(temp_.W(), ref_reg);
Roland Levillain47b3ab22017-02-27 14:31:35 +0000828 __ B(eq, &done);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100829
830 // Update the the holder's field atomically. This may fail if
831 // mutator updates before us, but it's OK. This is achieved
832 // using a strong compare-and-set (CAS) operation with relaxed
833 // memory synchronization ordering, where the expected value is
834 // the old reference and the desired value is the new reference.
835
836 MacroAssembler* masm = arm64_codegen->GetVIXLAssembler();
837 UseScratchRegisterScope temps(masm);
838
839 // Convenience aliases.
840 Register base = obj_.W();
Roland Levillain47b3ab22017-02-27 14:31:35 +0000841 Register offset = XRegisterFrom(field_offset_);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100842 Register expected = temp_.W();
843 Register value = ref_reg;
844 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
845 Register tmp_value = temps.AcquireW(); // Value in memory.
846
847 __ Add(tmp_ptr, base.X(), Operand(offset));
848
849 if (kPoisonHeapReferences) {
850 arm64_codegen->GetAssembler()->PoisonHeapReference(expected);
851 if (value.Is(expected)) {
852 // Do not poison `value`, as it is the same register as
853 // `expected`, which has just been poisoned.
854 } else {
855 arm64_codegen->GetAssembler()->PoisonHeapReference(value);
856 }
857 }
858
859 // do {
860 // tmp_value = [tmp_ptr] - expected;
861 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
862
Roland Levillain24a4d112016-10-26 13:10:46 +0100863 vixl::aarch64::Label loop_head, comparison_failed, exit_loop;
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100864 __ Bind(&loop_head);
865 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
866 __ Cmp(tmp_value, expected);
Roland Levillain24a4d112016-10-26 13:10:46 +0100867 __ B(&comparison_failed, ne);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100868 __ Stxr(tmp_value, value, MemOperand(tmp_ptr));
869 __ Cbnz(tmp_value, &loop_head);
Roland Levillain24a4d112016-10-26 13:10:46 +0100870 __ B(&exit_loop);
871 __ Bind(&comparison_failed);
872 __ Clrex();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100873 __ Bind(&exit_loop);
874
875 if (kPoisonHeapReferences) {
876 arm64_codegen->GetAssembler()->UnpoisonHeapReference(expected);
877 if (value.Is(expected)) {
878 // Do not unpoison `value`, as it is the same register as
879 // `expected`, which has just been unpoisoned.
880 } else {
881 arm64_codegen->GetAssembler()->UnpoisonHeapReference(value);
882 }
883 }
884
Roland Levillain47b3ab22017-02-27 14:31:35 +0000885 __ Bind(&done);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100886 __ B(GetExitLabel());
887 }
888
889 private:
Roland Levillain47b3ab22017-02-27 14:31:35 +0000890 // The location (register) of the marked object reference.
891 const Location ref_;
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100892 // The register containing the object holding the marked object reference field.
893 const Register obj_;
Roland Levillain47b3ab22017-02-27 14:31:35 +0000894 // The location of the offset of the marked reference field within `obj_`.
895 Location field_offset_;
896
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100897 const Register temp_;
898
Roland Levillain47b3ab22017-02-27 14:31:35 +0000899 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathARM64);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100900};
901
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000902// Slow path generating a read barrier for a heap reference.
903class ReadBarrierForHeapReferenceSlowPathARM64 : public SlowPathCodeARM64 {
904 public:
905 ReadBarrierForHeapReferenceSlowPathARM64(HInstruction* instruction,
906 Location out,
907 Location ref,
908 Location obj,
909 uint32_t offset,
910 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000911 : SlowPathCodeARM64(instruction),
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000912 out_(out),
913 ref_(ref),
914 obj_(obj),
915 offset_(offset),
916 index_(index) {
917 DCHECK(kEmitCompilerReadBarrier);
918 // If `obj` is equal to `out` or `ref`, it means the initial object
919 // has been overwritten by (or after) the heap object reference load
920 // to be instrumented, e.g.:
921 //
922 // __ Ldr(out, HeapOperand(out, class_offset);
Roland Levillain44015862016-01-22 11:47:17 +0000923 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000924 //
925 // In that case, we have lost the information about the original
926 // object, and the emitted read barrier cannot work properly.
927 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
928 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
929 }
930
931 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
932 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
933 LocationSummary* locations = instruction_->GetLocations();
934 Primitive::Type type = Primitive::kPrimNot;
935 DCHECK(locations->CanCall());
936 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain3d312422016-06-23 13:53:42 +0100937 DCHECK(instruction_->IsInstanceFieldGet() ||
938 instruction_->IsStaticFieldGet() ||
939 instruction_->IsArrayGet() ||
940 instruction_->IsInstanceOf() ||
941 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100942 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain44015862016-01-22 11:47:17 +0000943 << "Unexpected instruction in read barrier for heap reference slow path: "
944 << instruction_->DebugName();
Roland Levillain19c54192016-11-04 13:44:09 +0000945 // The read barrier instrumentation of object ArrayGet
946 // instructions does not support the HIntermediateAddress
947 // instruction.
Roland Levillaincd3d0fb2016-01-15 19:26:48 +0000948 DCHECK(!(instruction_->IsArrayGet() &&
Artem Serov328429f2016-07-06 16:23:04 +0100949 instruction_->AsArrayGet()->GetArray()->IsIntermediateAddress()));
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000950
951 __ Bind(GetEntryLabel());
952
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000953 SaveLiveRegisters(codegen, locations);
954
955 // We may have to change the index's value, but as `index_` is a
956 // constant member (like other "inputs" of this slow path),
957 // introduce a copy of it, `index`.
958 Location index = index_;
959 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100960 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000961 if (instruction_->IsArrayGet()) {
962 // Compute the actual memory offset and store it in `index`.
963 Register index_reg = RegisterFrom(index_, Primitive::kPrimInt);
964 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_.reg()));
965 if (codegen->IsCoreCalleeSaveRegister(index_.reg())) {
966 // We are about to change the value of `index_reg` (see the
967 // calls to vixl::MacroAssembler::Lsl and
968 // vixl::MacroAssembler::Mov below), but it has
969 // not been saved by the previous call to
970 // art::SlowPathCode::SaveLiveRegisters, as it is a
971 // callee-save register --
972 // art::SlowPathCode::SaveLiveRegisters does not consider
973 // callee-save registers, as it has been designed with the
974 // assumption that callee-save registers are supposed to be
975 // handled by the called function. So, as a callee-save
976 // register, `index_reg` _would_ eventually be saved onto
977 // the stack, but it would be too late: we would have
978 // changed its value earlier. Therefore, we manually save
979 // it here into another freely available register,
980 // `free_reg`, chosen of course among the caller-save
981 // registers (as a callee-save `free_reg` register would
982 // exhibit the same problem).
983 //
984 // Note we could have requested a temporary register from
985 // the register allocator instead; but we prefer not to, as
986 // this is a slow path, and we know we can find a
987 // caller-save register that is available.
988 Register free_reg = FindAvailableCallerSaveRegister(codegen);
989 __ Mov(free_reg.W(), index_reg);
990 index_reg = free_reg;
991 index = LocationFrom(index_reg);
992 } else {
993 // The initial register stored in `index_` has already been
994 // saved in the call to art::SlowPathCode::SaveLiveRegisters
995 // (as it is not a callee-save register), so we can freely
996 // use it.
997 }
998 // Shifting the index value contained in `index_reg` by the scale
999 // factor (2) cannot overflow in practice, as the runtime is
1000 // unable to allocate object arrays with a size larger than
1001 // 2^26 - 1 (that is, 2^28 - 4 bytes).
1002 __ Lsl(index_reg, index_reg, Primitive::ComponentSizeShift(type));
1003 static_assert(
1004 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
1005 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
1006 __ Add(index_reg, index_reg, Operand(offset_));
1007 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01001008 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
1009 // intrinsics, `index_` is not shifted by a scale factor of 2
1010 // (as in the case of ArrayGet), as it is actually an offset
1011 // to an object field within an object.
1012 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001013 DCHECK(instruction_->GetLocations()->Intrinsified());
1014 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
1015 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
1016 << instruction_->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001017 DCHECK_EQ(offset_, 0u);
Roland Levillaina7426c62016-08-03 15:02:10 +01001018 DCHECK(index_.IsRegister());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001019 }
1020 }
1021
1022 // We're moving two or three locations to locations that could
1023 // overlap, so we need a parallel move resolver.
1024 InvokeRuntimeCallingConvention calling_convention;
1025 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
1026 parallel_move.AddMove(ref_,
1027 LocationFrom(calling_convention.GetRegisterAt(0)),
1028 type,
1029 nullptr);
1030 parallel_move.AddMove(obj_,
1031 LocationFrom(calling_convention.GetRegisterAt(1)),
1032 type,
1033 nullptr);
1034 if (index.IsValid()) {
1035 parallel_move.AddMove(index,
1036 LocationFrom(calling_convention.GetRegisterAt(2)),
1037 Primitive::kPrimInt,
1038 nullptr);
1039 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1040 } else {
1041 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
1042 arm64_codegen->MoveConstant(LocationFrom(calling_convention.GetRegisterAt(2)), offset_);
1043 }
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001044 arm64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001045 instruction_,
1046 instruction_->GetDexPc(),
1047 this);
1048 CheckEntrypointTypes<
1049 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
1050 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1051
1052 RestoreLiveRegisters(codegen, locations);
1053
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001054 __ B(GetExitLabel());
1055 }
1056
1057 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathARM64"; }
1058
1059 private:
1060 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001061 size_t ref = static_cast<int>(XRegisterFrom(ref_).GetCode());
1062 size_t obj = static_cast<int>(XRegisterFrom(obj_).GetCode());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001063 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1064 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
1065 return Register(VIXLRegCodeFromART(i), kXRegSize);
1066 }
1067 }
1068 // We shall never fail to find a free caller-save register, as
1069 // there are more than two core caller-save registers on ARM64
1070 // (meaning it is possible to find one which is different from
1071 // `ref` and `obj`).
1072 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1073 LOG(FATAL) << "Could not find a free register";
1074 UNREACHABLE();
1075 }
1076
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001077 const Location out_;
1078 const Location ref_;
1079 const Location obj_;
1080 const uint32_t offset_;
1081 // An additional location containing an index to an array.
1082 // Only used for HArrayGet and the UnsafeGetObject &
1083 // UnsafeGetObjectVolatile intrinsics.
1084 const Location index_;
1085
1086 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathARM64);
1087};
1088
1089// Slow path generating a read barrier for a GC root.
1090class ReadBarrierForRootSlowPathARM64 : public SlowPathCodeARM64 {
1091 public:
1092 ReadBarrierForRootSlowPathARM64(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +00001093 : SlowPathCodeARM64(instruction), out_(out), root_(root) {
Roland Levillain44015862016-01-22 11:47:17 +00001094 DCHECK(kEmitCompilerReadBarrier);
1095 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001096
1097 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1098 LocationSummary* locations = instruction_->GetLocations();
1099 Primitive::Type type = Primitive::kPrimNot;
1100 DCHECK(locations->CanCall());
1101 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(out_.reg()));
Roland Levillain44015862016-01-22 11:47:17 +00001102 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1103 << "Unexpected instruction in read barrier for GC root slow path: "
1104 << instruction_->DebugName();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001105
1106 __ Bind(GetEntryLabel());
1107 SaveLiveRegisters(codegen, locations);
1108
1109 InvokeRuntimeCallingConvention calling_convention;
1110 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
1111 // The argument of the ReadBarrierForRootSlow is not a managed
1112 // reference (`mirror::Object*`), but a `GcRoot<mirror::Object>*`;
1113 // thus we need a 64-bit move here, and we cannot use
1114 //
1115 // arm64_codegen->MoveLocation(
1116 // LocationFrom(calling_convention.GetRegisterAt(0)),
1117 // root_,
1118 // type);
1119 //
1120 // which would emit a 32-bit move, as `type` is a (32-bit wide)
1121 // reference type (`Primitive::kPrimNot`).
1122 __ Mov(calling_convention.GetRegisterAt(0), XRegisterFrom(out_));
Serban Constantinescu22f81d32016-02-18 16:06:31 +00001123 arm64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001124 instruction_,
1125 instruction_->GetDexPc(),
1126 this);
1127 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1128 arm64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
1129
1130 RestoreLiveRegisters(codegen, locations);
1131 __ B(GetExitLabel());
1132 }
1133
1134 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathARM64"; }
1135
1136 private:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001137 const Location out_;
1138 const Location root_;
1139
1140 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathARM64);
1141};
1142
Alexandre Rames5319def2014-10-23 10:03:10 +01001143#undef __
1144
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001145Location InvokeDexCallingConventionVisitorARM64::GetNextLocation(Primitive::Type type) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001146 Location next_location;
1147 if (type == Primitive::kPrimVoid) {
1148 LOG(FATAL) << "Unreachable type " << type;
1149 }
1150
Alexandre Rames542361f2015-01-29 16:57:31 +00001151 if (Primitive::IsFloatingPointType(type) &&
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001152 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
1153 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(float_index_++));
Alexandre Rames542361f2015-01-29 16:57:31 +00001154 } else if (!Primitive::IsFloatingPointType(type) &&
1155 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001156 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
1157 } else {
1158 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Alexandre Rames542361f2015-01-29 16:57:31 +00001159 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
1160 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +01001161 }
1162
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001163 // Space on the stack is reserved for all arguments.
Alexandre Rames542361f2015-01-29 16:57:31 +00001164 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +01001165 return next_location;
1166}
1167
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001168Location InvokeDexCallingConventionVisitorARM64::GetMethodLocation() const {
Nicolas Geoffray38207af2015-06-01 15:46:22 +01001169 return LocationFrom(kArtMethodRegister);
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001170}
1171
Serban Constantinescu579885a2015-02-22 20:51:33 +00001172CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
1173 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +01001174 const CompilerOptions& compiler_options,
1175 OptimizingCompilerStats* stats)
Alexandre Rames5319def2014-10-23 10:03:10 +01001176 : CodeGenerator(graph,
1177 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001178 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +00001179 kNumberOfAllocatableRegisterPairs,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001180 callee_saved_core_registers.GetList(),
1181 callee_saved_fp_registers.GetList(),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001182 compiler_options,
1183 stats),
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001184 block_labels_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Zheng Xu3927c8b2015-11-18 17:46:25 +08001185 jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexandre Rames5319def2014-10-23 10:03:10 +01001186 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +00001187 instruction_visitor_(graph, this),
Serban Constantinescu579885a2015-02-22 20:51:33 +00001188 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001189 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +00001190 isa_features_(isa_features),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001191 uint32_literals_(std::less<uint32_t>(),
1192 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko5233f932015-09-29 19:01:15 +01001193 uint64_literals_(std::less<uint64_t>(),
1194 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001195 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1196 boot_image_string_patches_(StringReferenceValueComparator(),
1197 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1198 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001199 boot_image_type_patches_(TypeReferenceValueComparator(),
1200 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1201 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001202 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001203 jit_string_patches_(StringReferenceValueComparator(),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001204 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1205 jit_class_patches_(TypeReferenceValueComparator(),
1206 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001207 // Save the link register (containing the return address) to mimic Quick.
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001208 AddAllocatedRegister(LocationFrom(lr));
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001209}
Alexandre Rames5319def2014-10-23 10:03:10 +01001210
Alexandre Rames67555f72014-11-18 10:55:16 +00001211#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +01001212
Zheng Xu3927c8b2015-11-18 17:46:25 +08001213void CodeGeneratorARM64::EmitJumpTables() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01001214 for (auto&& jump_table : jump_tables_) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001215 jump_table->EmitTable(this);
1216 }
1217}
1218
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001219void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
Zheng Xu3927c8b2015-11-18 17:46:25 +08001220 EmitJumpTables();
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001221 // Ensure we emit the literal pool.
1222 __ FinalizeCode();
Vladimir Marko58155012015-08-19 12:49:41 +00001223
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +00001224 CodeGenerator::Finalize(allocator);
1225}
1226
Zheng Xuad4450e2015-04-17 18:48:56 +08001227void ParallelMoveResolverARM64::PrepareForEmitNativeCode() {
1228 // Note: There are 6 kinds of moves:
1229 // 1. constant -> GPR/FPR (non-cycle)
1230 // 2. constant -> stack (non-cycle)
1231 // 3. GPR/FPR -> GPR/FPR
1232 // 4. GPR/FPR -> stack
1233 // 5. stack -> GPR/FPR
1234 // 6. stack -> stack (non-cycle)
1235 // Case 1, 2 and 6 should never be included in a dependency cycle on ARM64. For case 3, 4, and 5
1236 // VIXL uses at most 1 GPR. VIXL has 2 GPR and 1 FPR temps, and there should be no intersecting
1237 // cycles on ARM64, so we always have 1 GPR and 1 FPR available VIXL temps to resolve the
1238 // dependency.
1239 vixl_temps_.Open(GetVIXLAssembler());
1240}
1241
1242void ParallelMoveResolverARM64::FinishEmitNativeCode() {
1243 vixl_temps_.Close();
1244}
1245
1246Location ParallelMoveResolverARM64::AllocateScratchLocationFor(Location::Kind kind) {
1247 DCHECK(kind == Location::kRegister || kind == Location::kFpuRegister ||
1248 kind == Location::kStackSlot || kind == Location::kDoubleStackSlot);
1249 kind = (kind == Location::kFpuRegister) ? Location::kFpuRegister : Location::kRegister;
1250 Location scratch = GetScratchLocation(kind);
1251 if (!scratch.Equals(Location::NoLocation())) {
1252 return scratch;
1253 }
1254 // Allocate from VIXL temp registers.
1255 if (kind == Location::kRegister) {
1256 scratch = LocationFrom(vixl_temps_.AcquireX());
1257 } else {
1258 DCHECK(kind == Location::kFpuRegister);
1259 scratch = LocationFrom(vixl_temps_.AcquireD());
1260 }
1261 AddScratchLocation(scratch);
1262 return scratch;
1263}
1264
1265void ParallelMoveResolverARM64::FreeScratchLocation(Location loc) {
1266 if (loc.IsRegister()) {
1267 vixl_temps_.Release(XRegisterFrom(loc));
1268 } else {
1269 DCHECK(loc.IsFpuRegister());
1270 vixl_temps_.Release(DRegisterFrom(loc));
1271 }
1272 RemoveScratchLocation(loc);
1273}
1274
Alexandre Rames3e69f162014-12-10 10:36:50 +00001275void ParallelMoveResolverARM64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001276 MoveOperands* move = moves_[index];
Calin Juravlee460d1d2015-09-29 04:52:17 +01001277 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001278}
1279
Alexandre Rames5319def2014-10-23 10:03:10 +01001280void CodeGeneratorARM64::GenerateFrameEntry() {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001281 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001282 __ Bind(&frame_entry_label_);
1283
Serban Constantinescu02164b32014-11-13 14:05:07 +00001284 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
1285 if (do_overflow_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001286 UseScratchRegisterScope temps(masm);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001287 Register temp = temps.AcquireX();
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001288 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001289 __ Sub(temp, sp, static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
Artem Serov914d7a82017-02-07 14:33:49 +00001290 {
1291 // Ensure that between load and RecordPcInfo there are no pools emitted.
1292 ExactAssemblyScope eas(GetVIXLAssembler(),
1293 kInstructionSize,
1294 CodeBufferCheckScope::kExactSize);
1295 __ ldr(wzr, MemOperand(temp, 0));
1296 RecordPcInfo(nullptr, 0);
1297 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00001298 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001299
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001300 if (!HasEmptyFrame()) {
1301 int frame_size = GetFrameSize();
1302 // Stack layout:
1303 // sp[frame_size - 8] : lr.
1304 // ... : other preserved core registers.
1305 // ... : other preserved fp registers.
1306 // ... : reserved frame space.
1307 // sp[0] : current method.
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001308
1309 // Save the current method if we need it. Note that we do not
1310 // do this in HCurrentMethod, as the instruction might have been removed
1311 // in the SSA graph.
1312 if (RequiresCurrentMethod()) {
1313 __ Str(kArtMethodRegister, MemOperand(sp, -frame_size, PreIndex));
Nicolas Geoffray9989b162016-10-13 13:42:30 +01001314 } else {
1315 __ Claim(frame_size);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001316 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001317 GetAssembler()->cfi().AdjustCFAOffset(frame_size);
Zheng Xu69a50302015-04-14 20:04:41 +08001318 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(),
1319 frame_size - GetCoreSpillSize());
1320 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(),
1321 frame_size - FrameEntrySpillSize());
Mingyao Yang063fc772016-08-02 11:02:54 -07001322
1323 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1324 // Initialize should_deoptimize flag to 0.
1325 Register wzr = Register(VIXLRegCodeFromART(WZR), kWRegSize);
1326 __ Str(wzr, MemOperand(sp, GetStackOffsetOfShouldDeoptimizeFlag()));
1327 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001328 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001329}
1330
1331void CodeGeneratorARM64::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001332 GetAssembler()->cfi().RememberState();
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001333 if (!HasEmptyFrame()) {
1334 int frame_size = GetFrameSize();
Zheng Xu69a50302015-04-14 20:04:41 +08001335 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(),
1336 frame_size - FrameEntrySpillSize());
1337 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(),
1338 frame_size - GetCoreSpillSize());
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001339 __ Drop(frame_size);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001340 GetAssembler()->cfi().AdjustCFAOffset(-frame_size);
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001341 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001342 __ Ret();
1343 GetAssembler()->cfi().RestoreState();
1344 GetAssembler()->cfi().DefCFAOffset(GetFrameSize());
Alexandre Rames5319def2014-10-23 10:03:10 +01001345}
1346
Scott Wakeling97c72b72016-06-24 16:19:36 +01001347CPURegList CodeGeneratorARM64::GetFramePreservedCoreRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001348 DCHECK(ArtVixlRegCodeCoherentForRegSet(core_spill_mask_, GetNumberOfCoreRegisters(), 0, 0));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001349 return CPURegList(CPURegister::kRegister, kXRegSize,
1350 core_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001351}
1352
Scott Wakeling97c72b72016-06-24 16:19:36 +01001353CPURegList CodeGeneratorARM64::GetFramePreservedFPRegisters() const {
Zheng Xuda403092015-04-24 17:35:39 +08001354 DCHECK(ArtVixlRegCodeCoherentForRegSet(0, 0, fpu_spill_mask_,
1355 GetNumberOfFloatingPointRegisters()));
Scott Wakeling97c72b72016-06-24 16:19:36 +01001356 return CPURegList(CPURegister::kFPRegister, kDRegSize,
1357 fpu_spill_mask_);
Zheng Xuda403092015-04-24 17:35:39 +08001358}
1359
Alexandre Rames5319def2014-10-23 10:03:10 +01001360void CodeGeneratorARM64::Bind(HBasicBlock* block) {
1361 __ Bind(GetLabelOf(block));
1362}
1363
Calin Juravle175dc732015-08-25 15:42:32 +01001364void CodeGeneratorARM64::MoveConstant(Location location, int32_t value) {
1365 DCHECK(location.IsRegister());
1366 __ Mov(RegisterFrom(location, Primitive::kPrimInt), value);
1367}
1368
Calin Juravlee460d1d2015-09-29 04:52:17 +01001369void CodeGeneratorARM64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1370 if (location.IsRegister()) {
1371 locations->AddTemp(location);
1372 } else {
1373 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1374 }
1375}
1376
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001377void CodeGeneratorARM64::MarkGCCard(Register object, Register value, bool value_can_be_null) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001378 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +01001379 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001380 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001381 vixl::aarch64::Label done;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001382 if (value_can_be_null) {
1383 __ Cbz(value, &done);
1384 }
Andreas Gampe542451c2016-07-26 09:02:02 -07001385 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64PointerSize>().Int32Value()));
Alexandre Rames5319def2014-10-23 10:03:10 +01001386 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001387 __ Strb(card, MemOperand(card, temp.X()));
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001388 if (value_can_be_null) {
1389 __ Bind(&done);
1390 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001391}
1392
David Brazdil58282f42016-01-14 12:45:10 +00001393void CodeGeneratorARM64::SetupBlockedRegisters() const {
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001394 // Blocked core registers:
1395 // lr : Runtime reserved.
1396 // tr : Runtime reserved.
1397 // xSuspend : Runtime reserved. TODO: Unblock this when the runtime stops using it.
1398 // ip1 : VIXL core temp.
1399 // ip0 : VIXL core temp.
1400 //
1401 // Blocked fp registers:
1402 // d31 : VIXL fp temp.
Alexandre Rames5319def2014-10-23 10:03:10 +01001403 CPURegList reserved_core_registers = vixl_reserved_core_registers;
1404 reserved_core_registers.Combine(runtime_reserved_core_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +01001405 while (!reserved_core_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001406 blocked_core_registers_[reserved_core_registers.PopLowestIndex().GetCode()] = true;
Alexandre Rames5319def2014-10-23 10:03:10 +01001407 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001408
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001409 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
Zheng Xua3ec3942015-02-15 18:39:46 +08001410 while (!reserved_fp_registers.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001411 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().GetCode()] = true;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001412 }
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001413
David Brazdil58282f42016-01-14 12:45:10 +00001414 if (GetGraph()->IsDebuggable()) {
Nicolas Geoffrayecf680d2015-10-05 11:15:37 +01001415 // Stubs do not save callee-save floating point registers. If the graph
1416 // is debuggable, we need to deal with these registers differently. For
1417 // now, just block them.
David Brazdil58282f42016-01-14 12:45:10 +00001418 CPURegList reserved_fp_registers_debuggable = callee_saved_fp_registers;
1419 while (!reserved_fp_registers_debuggable.IsEmpty()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001420 blocked_fpu_registers_[reserved_fp_registers_debuggable.PopLowestIndex().GetCode()] = true;
Serban Constantinescu3d087de2015-01-28 11:57:05 +00001421 }
1422 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001423}
1424
Alexandre Rames3e69f162014-12-10 10:36:50 +00001425size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1426 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1427 __ Str(reg, MemOperand(sp, stack_index));
1428 return kArm64WordSize;
1429}
1430
1431size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1432 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
1433 __ Ldr(reg, MemOperand(sp, stack_index));
1434 return kArm64WordSize;
1435}
1436
1437size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1438 FPRegister reg = FPRegister(reg_id, kDRegSize);
1439 __ Str(reg, MemOperand(sp, stack_index));
1440 return kArm64WordSize;
1441}
1442
1443size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1444 FPRegister reg = FPRegister(reg_id, kDRegSize);
1445 __ Ldr(reg, MemOperand(sp, stack_index));
1446 return kArm64WordSize;
1447}
1448
Alexandre Rames5319def2014-10-23 10:03:10 +01001449void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001450 stream << XRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001451}
1452
1453void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +01001454 stream << DRegister(reg);
Alexandre Rames5319def2014-10-23 10:03:10 +01001455}
1456
Alexandre Rames67555f72014-11-18 10:55:16 +00001457void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001458 if (constant->IsIntConstant()) {
1459 __ Mov(Register(destination), constant->AsIntConstant()->GetValue());
1460 } else if (constant->IsLongConstant()) {
1461 __ Mov(Register(destination), constant->AsLongConstant()->GetValue());
1462 } else if (constant->IsNullConstant()) {
1463 __ Mov(Register(destination), 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001464 } else if (constant->IsFloatConstant()) {
1465 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
1466 } else {
1467 DCHECK(constant->IsDoubleConstant());
1468 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
1469 }
1470}
1471
Alexandre Rames3e69f162014-12-10 10:36:50 +00001472
1473static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
1474 DCHECK(constant.IsConstant());
1475 HConstant* cst = constant.GetConstant();
1476 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001477 // Null is mapped to a core W register, which we associate with kPrimInt.
1478 (cst->IsNullConstant() && type == Primitive::kPrimInt) ||
Alexandre Rames3e69f162014-12-10 10:36:50 +00001479 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
1480 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
1481 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
1482}
1483
Roland Levillain558dea12017-01-27 19:40:44 +00001484// Allocate a scratch register from the VIXL pool, querying first into
1485// the floating-point register pool, and then the the core register
1486// pool. This is essentially a reimplementation of
1487// vixl::aarch64::UseScratchRegisterScope::AcquireCPURegisterOfSize
1488// using a different allocation strategy.
1489static CPURegister AcquireFPOrCoreCPURegisterOfSize(vixl::aarch64::MacroAssembler* masm,
1490 vixl::aarch64::UseScratchRegisterScope* temps,
1491 int size_in_bits) {
1492 return masm->GetScratchFPRegisterList()->IsEmpty()
1493 ? CPURegister(temps->AcquireRegisterOfSize(size_in_bits))
1494 : CPURegister(temps->AcquireVRegisterOfSize(size_in_bits));
1495}
1496
Calin Juravlee460d1d2015-09-29 04:52:17 +01001497void CodeGeneratorARM64::MoveLocation(Location destination,
1498 Location source,
1499 Primitive::Type dst_type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001500 if (source.Equals(destination)) {
1501 return;
1502 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001503
1504 // A valid move can always be inferred from the destination and source
1505 // locations. When moving from and to a register, the argument type can be
1506 // used to generate 32bit instead of 64bit moves. In debug mode we also
1507 // checks the coherency of the locations and the type.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001508 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001509
1510 if (destination.IsRegister() || destination.IsFpuRegister()) {
1511 if (unspecified_type) {
1512 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1513 if (source.IsStackSlot() ||
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001514 (src_cst != nullptr && (src_cst->IsIntConstant()
1515 || src_cst->IsFloatConstant()
1516 || src_cst->IsNullConstant()))) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001517 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001518 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +00001519 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001520 // If the source is a double stack slot or a 64bit constant, a 64bit
1521 // type is appropriate. Else the source is a register, and since the
1522 // type has not been specified, we chose a 64bit type to force a 64bit
1523 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001524 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +00001525 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001526 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001527 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1528 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
1529 CPURegister dst = CPURegisterFrom(destination, dst_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001530 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1531 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
1532 __ Ldr(dst, StackOperandFrom(source));
1533 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001534 DCHECK(CoherentConstantAndType(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001535 MoveConstant(dst, source.GetConstant());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001536 } else if (source.IsRegister()) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001537 if (destination.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001538 __ Mov(Register(dst), RegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001539 } else {
Zheng Xuad4450e2015-04-17 18:48:56 +08001540 DCHECK(destination.IsFpuRegister());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001541 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1542 ? Primitive::kPrimLong
1543 : Primitive::kPrimInt;
1544 __ Fmov(FPRegisterFrom(destination, dst_type), RegisterFrom(source, source_type));
1545 }
1546 } else {
1547 DCHECK(source.IsFpuRegister());
1548 if (destination.IsRegister()) {
1549 Primitive::Type source_type = Primitive::Is64BitType(dst_type)
1550 ? Primitive::kPrimDouble
1551 : Primitive::kPrimFloat;
1552 __ Fmov(RegisterFrom(destination, dst_type), FPRegisterFrom(source, source_type));
1553 } else {
1554 DCHECK(destination.IsFpuRegister());
1555 __ Fmov(FPRegister(dst), FPRegisterFrom(source, dst_type));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001556 }
1557 }
Alexandre Rames3e69f162014-12-10 10:36:50 +00001558 } else { // The destination is not a register. It must be a stack slot.
1559 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1560 if (source.IsRegister() || source.IsFpuRegister()) {
1561 if (unspecified_type) {
1562 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001563 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001564 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001565 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001566 }
1567 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001568 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1569 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
1570 __ Str(CPURegisterFrom(source, dst_type), StackOperandFrom(destination));
Alexandre Rames3e69f162014-12-10 10:36:50 +00001571 } else if (source.IsConstant()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001572 DCHECK(unspecified_type || CoherentConstantAndType(source, dst_type))
1573 << source << " " << dst_type;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001574 UseScratchRegisterScope temps(GetVIXLAssembler());
1575 HConstant* src_cst = source.GetConstant();
1576 CPURegister temp;
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001577 if (src_cst->IsZeroBitPattern()) {
Scott Wakeling79db9972017-01-19 14:08:42 +00001578 temp = (src_cst->IsLongConstant() || src_cst->IsDoubleConstant())
1579 ? Register(xzr)
1580 : Register(wzr);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001581 } else {
Alexandre Ramesb2b753c2016-08-02 13:45:28 +01001582 if (src_cst->IsIntConstant()) {
1583 temp = temps.AcquireW();
1584 } else if (src_cst->IsLongConstant()) {
1585 temp = temps.AcquireX();
1586 } else if (src_cst->IsFloatConstant()) {
1587 temp = temps.AcquireS();
1588 } else {
1589 DCHECK(src_cst->IsDoubleConstant());
1590 temp = temps.AcquireD();
1591 }
1592 MoveConstant(temp, src_cst);
Alexandre Rames3e69f162014-12-10 10:36:50 +00001593 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001594 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001595 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +00001596 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001597 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +00001598 UseScratchRegisterScope temps(GetVIXLAssembler());
Roland Levillain78b3d5d2017-01-04 10:27:50 +00001599 // Use any scratch register (a core or a floating-point one)
1600 // from VIXL scratch register pools as a temporary.
1601 //
1602 // We used to only use the FP scratch register pool, but in some
1603 // rare cases the only register from this pool (D31) would
1604 // already be used (e.g. within a ParallelMove instruction, when
1605 // a move is blocked by a another move requiring a scratch FP
1606 // register, which would reserve D31). To prevent this issue, we
1607 // ask for a scratch register of any type (core or FP).
Roland Levillain558dea12017-01-27 19:40:44 +00001608 //
1609 // Also, we start by asking for a FP scratch register first, as the
1610 // demand of scratch core registers is higher. This is why we
1611 // use AcquireFPOrCoreCPURegisterOfSize instead of
1612 // UseScratchRegisterScope::AcquireCPURegisterOfSize, which
1613 // allocates core scratch registers first.
1614 CPURegister temp = AcquireFPOrCoreCPURegisterOfSize(
1615 GetVIXLAssembler(),
1616 &temps,
1617 (destination.IsDoubleStackSlot() ? kXRegSize : kWRegSize));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001618 __ Ldr(temp, StackOperandFrom(source));
1619 __ Str(temp, StackOperandFrom(destination));
1620 }
1621 }
1622}
1623
1624void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001625 CPURegister dst,
1626 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001627 switch (type) {
1628 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +00001629 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001630 break;
1631 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +00001632 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001633 break;
1634 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +00001635 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001636 break;
1637 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +00001638 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001639 break;
1640 case Primitive::kPrimInt:
1641 case Primitive::kPrimNot:
1642 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001643 case Primitive::kPrimFloat:
1644 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001645 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +00001646 __ Ldr(dst, src);
1647 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001648 case Primitive::kPrimVoid:
1649 LOG(FATAL) << "Unreachable type " << type;
1650 }
1651}
1652
Calin Juravle77520bc2015-01-12 18:45:46 +00001653void CodeGeneratorARM64::LoadAcquire(HInstruction* instruction,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001654 CPURegister dst,
Roland Levillain44015862016-01-22 11:47:17 +00001655 const MemOperand& src,
1656 bool needs_null_check) {
Alexandre Ramesd921d642015-04-16 15:07:16 +01001657 MacroAssembler* masm = GetVIXLAssembler();
Alexandre Ramesd921d642015-04-16 15:07:16 +01001658 UseScratchRegisterScope temps(masm);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001659 Register temp_base = temps.AcquireX();
Calin Juravle77520bc2015-01-12 18:45:46 +00001660 Primitive::Type type = instruction->GetType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001661
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001662 DCHECK(!src.IsPreIndex());
1663 DCHECK(!src.IsPostIndex());
1664
1665 // TODO(vixl): Let the MacroAssembler handle MemOperand.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001666 __ Add(temp_base, src.GetBaseRegister(), OperandFromMemOperand(src));
Artem Serov914d7a82017-02-07 14:33:49 +00001667 {
1668 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
1669 MemOperand base = MemOperand(temp_base);
1670 switch (type) {
1671 case Primitive::kPrimBoolean:
1672 {
1673 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1674 __ ldarb(Register(dst), base);
1675 if (needs_null_check) {
1676 MaybeRecordImplicitNullCheck(instruction);
1677 }
1678 }
1679 break;
1680 case Primitive::kPrimByte:
1681 {
1682 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1683 __ ldarb(Register(dst), base);
1684 if (needs_null_check) {
1685 MaybeRecordImplicitNullCheck(instruction);
1686 }
1687 }
1688 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1689 break;
1690 case Primitive::kPrimChar:
1691 {
1692 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1693 __ ldarh(Register(dst), base);
1694 if (needs_null_check) {
1695 MaybeRecordImplicitNullCheck(instruction);
1696 }
1697 }
1698 break;
1699 case Primitive::kPrimShort:
1700 {
1701 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1702 __ ldarh(Register(dst), base);
1703 if (needs_null_check) {
1704 MaybeRecordImplicitNullCheck(instruction);
1705 }
1706 }
1707 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1708 break;
1709 case Primitive::kPrimInt:
1710 case Primitive::kPrimNot:
1711 case Primitive::kPrimLong:
1712 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
1713 {
1714 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1715 __ ldar(Register(dst), base);
1716 if (needs_null_check) {
1717 MaybeRecordImplicitNullCheck(instruction);
1718 }
1719 }
1720 break;
1721 case Primitive::kPrimFloat:
1722 case Primitive::kPrimDouble: {
1723 DCHECK(dst.IsFPRegister());
1724 DCHECK_EQ(dst.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001725
Artem Serov914d7a82017-02-07 14:33:49 +00001726 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1727 {
1728 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1729 __ ldar(temp, base);
1730 if (needs_null_check) {
1731 MaybeRecordImplicitNullCheck(instruction);
1732 }
1733 }
1734 __ Fmov(FPRegister(dst), temp);
1735 break;
Roland Levillain44015862016-01-22 11:47:17 +00001736 }
Artem Serov914d7a82017-02-07 14:33:49 +00001737 case Primitive::kPrimVoid:
1738 LOG(FATAL) << "Unreachable type " << type;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001739 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001740 }
1741}
1742
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001743void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001744 CPURegister src,
1745 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001746 switch (type) {
1747 case Primitive::kPrimBoolean:
1748 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001749 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001750 break;
1751 case Primitive::kPrimChar:
1752 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001753 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001754 break;
1755 case Primitive::kPrimInt:
1756 case Primitive::kPrimNot:
1757 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001758 case Primitive::kPrimFloat:
1759 case Primitive::kPrimDouble:
Alexandre Rames542361f2015-01-29 16:57:31 +00001760 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001761 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001762 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001763 case Primitive::kPrimVoid:
1764 LOG(FATAL) << "Unreachable type " << type;
1765 }
1766}
1767
Artem Serov914d7a82017-02-07 14:33:49 +00001768void CodeGeneratorARM64::StoreRelease(HInstruction* instruction,
1769 Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001770 CPURegister src,
Artem Serov914d7a82017-02-07 14:33:49 +00001771 const MemOperand& dst,
1772 bool needs_null_check) {
1773 MacroAssembler* masm = GetVIXLAssembler();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001774 UseScratchRegisterScope temps(GetVIXLAssembler());
1775 Register temp_base = temps.AcquireX();
1776
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001777 DCHECK(!dst.IsPreIndex());
1778 DCHECK(!dst.IsPostIndex());
1779
1780 // TODO(vixl): Let the MacroAssembler handle this.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001781 Operand op = OperandFromMemOperand(dst);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001782 __ Add(temp_base, dst.GetBaseRegister(), op);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001783 MemOperand base = MemOperand(temp_base);
Artem Serov914d7a82017-02-07 14:33:49 +00001784 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001785 switch (type) {
1786 case Primitive::kPrimBoolean:
1787 case Primitive::kPrimByte:
Artem Serov914d7a82017-02-07 14:33:49 +00001788 {
1789 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1790 __ stlrb(Register(src), base);
1791 if (needs_null_check) {
1792 MaybeRecordImplicitNullCheck(instruction);
1793 }
1794 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001795 break;
1796 case Primitive::kPrimChar:
1797 case Primitive::kPrimShort:
Artem Serov914d7a82017-02-07 14:33:49 +00001798 {
1799 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1800 __ stlrh(Register(src), base);
1801 if (needs_null_check) {
1802 MaybeRecordImplicitNullCheck(instruction);
1803 }
1804 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001805 break;
1806 case Primitive::kPrimInt:
1807 case Primitive::kPrimNot:
1808 case Primitive::kPrimLong:
Alexandre Rames542361f2015-01-29 16:57:31 +00001809 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Artem Serov914d7a82017-02-07 14:33:49 +00001810 {
1811 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1812 __ stlr(Register(src), base);
1813 if (needs_null_check) {
1814 MaybeRecordImplicitNullCheck(instruction);
1815 }
1816 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001817 break;
1818 case Primitive::kPrimFloat:
1819 case Primitive::kPrimDouble: {
Alexandre Rames542361f2015-01-29 16:57:31 +00001820 DCHECK_EQ(src.Is64Bits(), Primitive::Is64BitType(type));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01001821 Register temp_src;
1822 if (src.IsZero()) {
1823 // The zero register is used to avoid synthesizing zero constants.
1824 temp_src = Register(src);
1825 } else {
1826 DCHECK(src.IsFPRegister());
1827 temp_src = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1828 __ Fmov(temp_src, FPRegister(src));
1829 }
Artem Serov914d7a82017-02-07 14:33:49 +00001830 {
1831 ExactAssemblyScope eas(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
1832 __ stlr(temp_src, base);
1833 if (needs_null_check) {
1834 MaybeRecordImplicitNullCheck(instruction);
1835 }
1836 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001837 break;
1838 }
1839 case Primitive::kPrimVoid:
1840 LOG(FATAL) << "Unreachable type " << type;
1841 }
1842}
1843
Calin Juravle175dc732015-08-25 15:42:32 +01001844void CodeGeneratorARM64::InvokeRuntime(QuickEntrypointEnum entrypoint,
1845 HInstruction* instruction,
1846 uint32_t dex_pc,
1847 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001848 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Artem Serov914d7a82017-02-07 14:33:49 +00001849
1850 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64PointerSize>(entrypoint).Int32Value()));
1851 {
1852 // Ensure the pc position is recorded immediately after the `blr` instruction.
1853 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
1854 __ blr(lr);
1855 if (EntrypointRequiresStackMap(entrypoint)) {
1856 RecordPcInfo(instruction, dex_pc, slow_path);
1857 }
Serban Constantinescuda8ffec2016-03-09 12:02:11 +00001858 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001859}
1860
Roland Levillaindec8f632016-07-22 17:10:06 +01001861void CodeGeneratorARM64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1862 HInstruction* instruction,
1863 SlowPathCode* slow_path) {
1864 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Roland Levillaindec8f632016-07-22 17:10:06 +01001865 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1866 __ Blr(lr);
1867}
1868
Alexandre Rames67555f72014-11-18 10:55:16 +00001869void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001870 Register class_reg) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001871 UseScratchRegisterScope temps(GetVIXLAssembler());
1872 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001873 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1874
Serban Constantinescu02164b32014-11-13 14:05:07 +00001875 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001876 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1877 __ Add(temp, class_reg, status_offset);
1878 __ Ldar(temp, HeapOperand(temp));
1879 __ Cmp(temp, mirror::Class::kStatusInitialized);
1880 __ B(lt, slow_path->GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00001881 __ Bind(slow_path->GetExitLabel());
1882}
Alexandre Rames5319def2014-10-23 10:03:10 +01001883
Roland Levillain44015862016-01-22 11:47:17 +00001884void CodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001885 BarrierType type = BarrierAll;
1886
1887 switch (kind) {
1888 case MemBarrierKind::kAnyAny:
1889 case MemBarrierKind::kAnyStore: {
1890 type = BarrierAll;
1891 break;
1892 }
1893 case MemBarrierKind::kLoadAny: {
1894 type = BarrierReads;
1895 break;
1896 }
1897 case MemBarrierKind::kStoreStore: {
1898 type = BarrierWrites;
1899 break;
1900 }
1901 default:
1902 LOG(FATAL) << "Unexpected memory barrier " << kind;
1903 }
1904 __ Dmb(InnerShareable, type);
1905}
1906
Serban Constantinescu02164b32014-11-13 14:05:07 +00001907void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1908 HBasicBlock* successor) {
1909 SuspendCheckSlowPathARM64* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01001910 down_cast<SuspendCheckSlowPathARM64*>(instruction->GetSlowPath());
1911 if (slow_path == nullptr) {
1912 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1913 instruction->SetSlowPath(slow_path);
1914 codegen_->AddSlowPath(slow_path);
1915 if (successor != nullptr) {
1916 DCHECK(successor->IsLoopHeader());
1917 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
1918 }
1919 } else {
1920 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1921 }
1922
Serban Constantinescu02164b32014-11-13 14:05:07 +00001923 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1924 Register temp = temps.AcquireW();
1925
Andreas Gampe542451c2016-07-26 09:02:02 -07001926 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64PointerSize>().SizeValue()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001927 if (successor == nullptr) {
1928 __ Cbnz(temp, slow_path->GetEntryLabel());
1929 __ Bind(slow_path->GetReturnLabel());
1930 } else {
1931 __ Cbz(temp, codegen_->GetLabelOf(successor));
1932 __ B(slow_path->GetEntryLabel());
1933 // slow_path will return to GetLabelOf(successor).
1934 }
1935}
1936
Alexandre Rames5319def2014-10-23 10:03:10 +01001937InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1938 CodeGeneratorARM64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001939 : InstructionCodeGenerator(graph, codegen),
Alexandre Rames5319def2014-10-23 10:03:10 +01001940 assembler_(codegen->GetAssembler()),
1941 codegen_(codegen) {}
1942
1943#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001944 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001945
1946#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1947
1948enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001949 // Using a base helps identify when we hit such breakpoints.
1950 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001951#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1952 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1953#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1954};
1955
1956#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001957 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr ATTRIBUTE_UNUSED) { \
Alexandre Rames5319def2014-10-23 10:03:10 +01001958 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1959 } \
1960 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1961 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1962 locations->SetOut(Location::Any()); \
1963 }
1964 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1965#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1966
1967#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001968#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001969
Alexandre Rames67555f72014-11-18 10:55:16 +00001970void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001971 DCHECK_EQ(instr->InputCount(), 2U);
1972 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1973 Primitive::Type type = instr->GetResultType();
1974 switch (type) {
1975 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001976 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001977 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00001978 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001979 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001980 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001981
1982 case Primitive::kPrimFloat:
1983 case Primitive::kPrimDouble:
1984 locations->SetInAt(0, Location::RequiresFpuRegister());
1985 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001986 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001987 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001988
Alexandre Rames5319def2014-10-23 10:03:10 +01001989 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001990 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001991 }
1992}
1993
Alexandre Rames09a99962015-04-15 11:47:56 +01001994void LocationsBuilderARM64::HandleFieldGet(HInstruction* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001995 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
1996
1997 bool object_field_get_with_read_barrier =
1998 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Rames09a99962015-04-15 11:47:56 +01001999 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002000 new (GetGraph()->GetArena()) LocationSummary(instruction,
2001 object_field_get_with_read_barrier ?
2002 LocationSummary::kCallOnSlowPath :
2003 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002004 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002005 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Roland Levillaind0b51832017-01-26 19:04:23 +00002006 // We need a temporary register for the read barrier marking slow
2007 // path in CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier.
2008 locations->AddTemp(Location::RequiresRegister());
Vladimir Marko70e97462016-08-09 11:04:26 +01002009 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002010 locations->SetInAt(0, Location::RequiresRegister());
2011 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2012 locations->SetOut(Location::RequiresFpuRegister());
2013 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002014 // The output overlaps for an object field get when read barriers
2015 // are enabled: we do not want the load to overwrite the object's
2016 // location, as we need it to emit the read barrier.
2017 locations->SetOut(
2018 Location::RequiresRegister(),
2019 object_field_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames09a99962015-04-15 11:47:56 +01002020 }
2021}
2022
2023void InstructionCodeGeneratorARM64::HandleFieldGet(HInstruction* instruction,
2024 const FieldInfo& field_info) {
2025 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain44015862016-01-22 11:47:17 +00002026 LocationSummary* locations = instruction->GetLocations();
2027 Location base_loc = locations->InAt(0);
2028 Location out = locations->Out();
2029 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01002030 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002031 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), field_info.GetFieldOffset());
Alexandre Rames09a99962015-04-15 11:47:56 +01002032
Roland Levillain44015862016-01-22 11:47:17 +00002033 if (field_type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2034 // Object FieldGet with Baker's read barrier case.
Roland Levillain44015862016-01-22 11:47:17 +00002035 // /* HeapReference<Object> */ out = *(base + offset)
2036 Register base = RegisterFrom(base_loc, Primitive::kPrimNot);
Roland Levillaind0b51832017-01-26 19:04:23 +00002037 Register temp = WRegisterFrom(locations->GetTemp(0));
Roland Levillain44015862016-01-22 11:47:17 +00002038 // Note that potential implicit null checks are handled in this
2039 // CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier call.
2040 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2041 instruction,
2042 out,
2043 base,
2044 offset,
2045 temp,
2046 /* needs_null_check */ true,
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002047 field_info.IsVolatile());
Roland Levillain44015862016-01-22 11:47:17 +00002048 } else {
2049 // General case.
2050 if (field_info.IsVolatile()) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00002051 // Note that a potential implicit null check is handled in this
2052 // CodeGeneratorARM64::LoadAcquire call.
2053 // NB: LoadAcquire will record the pc info if needed.
2054 codegen_->LoadAcquire(
2055 instruction, OutputCPURegister(instruction), field, /* needs_null_check */ true);
Alexandre Rames09a99962015-04-15 11:47:56 +01002056 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002057 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2058 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002059 codegen_->Load(field_type, OutputCPURegister(instruction), field);
Alexandre Rames09a99962015-04-15 11:47:56 +01002060 codegen_->MaybeRecordImplicitNullCheck(instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +01002061 }
Roland Levillain44015862016-01-22 11:47:17 +00002062 if (field_type == Primitive::kPrimNot) {
2063 // If read barriers are enabled, emit read barriers other than
2064 // Baker's using a slow path (and also unpoison the loaded
2065 // reference, if heap poisoning is enabled).
2066 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
2067 }
Roland Levillain4d027112015-07-01 15:41:14 +01002068 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002069}
2070
2071void LocationsBuilderARM64::HandleFieldSet(HInstruction* instruction) {
2072 LocationSummary* locations =
2073 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2074 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002075 if (IsConstantZeroBitPattern(instruction->InputAt(1))) {
2076 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
2077 } else if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002078 locations->SetInAt(1, Location::RequiresFpuRegister());
2079 } else {
2080 locations->SetInAt(1, Location::RequiresRegister());
2081 }
2082}
2083
2084void InstructionCodeGeneratorARM64::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002085 const FieldInfo& field_info,
2086 bool value_can_be_null) {
Alexandre Rames09a99962015-04-15 11:47:56 +01002087 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
2088
2089 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002090 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 1);
Roland Levillain4d027112015-07-01 15:41:14 +01002091 CPURegister source = value;
Alexandre Rames09a99962015-04-15 11:47:56 +01002092 Offset offset = field_info.GetFieldOffset();
2093 Primitive::Type field_type = field_info.GetFieldType();
Alexandre Rames09a99962015-04-15 11:47:56 +01002094
Roland Levillain4d027112015-07-01 15:41:14 +01002095 {
2096 // We use a block to end the scratch scope before the write barrier, thus
2097 // freeing the temporary registers so they can be used in `MarkGCCard`.
2098 UseScratchRegisterScope temps(GetVIXLAssembler());
2099
2100 if (kPoisonHeapReferences && field_type == Primitive::kPrimNot) {
2101 DCHECK(value.IsW());
2102 Register temp = temps.AcquireW();
2103 __ Mov(temp, value.W());
2104 GetAssembler()->PoisonHeapReference(temp.W());
2105 source = temp;
Alexandre Rames09a99962015-04-15 11:47:56 +01002106 }
Roland Levillain4d027112015-07-01 15:41:14 +01002107
2108 if (field_info.IsVolatile()) {
Artem Serov914d7a82017-02-07 14:33:49 +00002109 codegen_->StoreRelease(
2110 instruction, field_type, source, HeapOperand(obj, offset), /* needs_null_check */ true);
Roland Levillain4d027112015-07-01 15:41:14 +01002111 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00002112 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2113 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain4d027112015-07-01 15:41:14 +01002114 codegen_->Store(field_type, source, HeapOperand(obj, offset));
2115 codegen_->MaybeRecordImplicitNullCheck(instruction);
2116 }
Alexandre Rames09a99962015-04-15 11:47:56 +01002117 }
2118
2119 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002120 codegen_->MarkGCCard(obj, Register(value), value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +01002121 }
2122}
2123
Alexandre Rames67555f72014-11-18 10:55:16 +00002124void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002125 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002126
2127 switch (type) {
2128 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002129 case Primitive::kPrimLong: {
2130 Register dst = OutputRegister(instr);
2131 Register lhs = InputRegisterAt(instr, 0);
2132 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01002133 if (instr->IsAdd()) {
2134 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002135 } else if (instr->IsAnd()) {
2136 __ And(dst, lhs, rhs);
2137 } else if (instr->IsOr()) {
2138 __ Orr(dst, lhs, rhs);
2139 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002140 __ Sub(dst, lhs, rhs);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002141 } else if (instr->IsRor()) {
2142 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002143 uint32_t shift = rhs.GetImmediate() & (lhs.GetSizeInBits() - 1);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00002144 __ Ror(dst, lhs, shift);
2145 } else {
2146 // Ensure shift distance is in the same size register as the result. If
2147 // we are rotating a long and the shift comes in a w register originally,
2148 // we don't need to sxtw for use as an x since the shift distances are
2149 // all & reg_bits - 1.
2150 __ Ror(dst, lhs, RegisterFrom(instr->GetLocations()->InAt(1), type));
2151 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002152 } else {
2153 DCHECK(instr->IsXor());
2154 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01002155 }
2156 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002157 }
2158 case Primitive::kPrimFloat:
2159 case Primitive::kPrimDouble: {
2160 FPRegister dst = OutputFPRegister(instr);
2161 FPRegister lhs = InputFPRegisterAt(instr, 0);
2162 FPRegister rhs = InputFPRegisterAt(instr, 1);
2163 if (instr->IsAdd()) {
2164 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002165 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002166 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00002167 } else {
2168 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002169 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002170 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002171 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002172 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00002173 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01002174 }
2175}
2176
Serban Constantinescu02164b32014-11-13 14:05:07 +00002177void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
2178 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2179
2180 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2181 Primitive::Type type = instr->GetResultType();
2182 switch (type) {
2183 case Primitive::kPrimInt:
2184 case Primitive::kPrimLong: {
2185 locations->SetInAt(0, Location::RequiresRegister());
2186 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2187 locations->SetOut(Location::RequiresRegister());
2188 break;
2189 }
2190 default:
2191 LOG(FATAL) << "Unexpected shift type " << type;
2192 }
2193}
2194
2195void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
2196 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
2197
2198 Primitive::Type type = instr->GetType();
2199 switch (type) {
2200 case Primitive::kPrimInt:
2201 case Primitive::kPrimLong: {
2202 Register dst = OutputRegister(instr);
2203 Register lhs = InputRegisterAt(instr, 0);
2204 Operand rhs = InputOperandAt(instr, 1);
2205 if (rhs.IsImmediate()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002206 uint32_t shift_value = rhs.GetImmediate() &
Roland Levillain5b5b9312016-03-22 14:57:31 +00002207 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002208 if (instr->IsShl()) {
2209 __ Lsl(dst, lhs, shift_value);
2210 } else if (instr->IsShr()) {
2211 __ Asr(dst, lhs, shift_value);
2212 } else {
2213 __ Lsr(dst, lhs, shift_value);
2214 }
2215 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002216 Register rhs_reg = dst.IsX() ? rhs.GetRegister().X() : rhs.GetRegister().W();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002217
2218 if (instr->IsShl()) {
2219 __ Lsl(dst, lhs, rhs_reg);
2220 } else if (instr->IsShr()) {
2221 __ Asr(dst, lhs, rhs_reg);
2222 } else {
2223 __ Lsr(dst, lhs, rhs_reg);
2224 }
2225 }
2226 break;
2227 }
2228 default:
2229 LOG(FATAL) << "Unexpected shift operation type " << type;
2230 }
2231}
2232
Alexandre Rames5319def2014-10-23 10:03:10 +01002233void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002234 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002235}
2236
2237void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002238 HandleBinaryOp(instruction);
2239}
2240
2241void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
2242 HandleBinaryOp(instruction);
2243}
2244
2245void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
2246 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002247}
2248
Artem Serov7fc63502016-02-09 17:15:29 +00002249void LocationsBuilderARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002250 DCHECK(Primitive::IsIntegralType(instr->GetType())) << instr->GetType();
2251 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2252 locations->SetInAt(0, Location::RequiresRegister());
2253 // There is no immediate variant of negated bitwise instructions in AArch64.
2254 locations->SetInAt(1, Location::RequiresRegister());
2255 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2256}
2257
Artem Serov7fc63502016-02-09 17:15:29 +00002258void InstructionCodeGeneratorARM64::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instr) {
Kevin Brodsky9ff0d202016-01-11 13:43:31 +00002259 Register dst = OutputRegister(instr);
2260 Register lhs = InputRegisterAt(instr, 0);
2261 Register rhs = InputRegisterAt(instr, 1);
2262
2263 switch (instr->GetOpKind()) {
2264 case HInstruction::kAnd:
2265 __ Bic(dst, lhs, rhs);
2266 break;
2267 case HInstruction::kOr:
2268 __ Orn(dst, lhs, rhs);
2269 break;
2270 case HInstruction::kXor:
2271 __ Eon(dst, lhs, rhs);
2272 break;
2273 default:
2274 LOG(FATAL) << "Unreachable";
2275 }
2276}
2277
Anton Kirilov74234da2017-01-13 14:42:47 +00002278void LocationsBuilderARM64::VisitDataProcWithShifterOp(
2279 HDataProcWithShifterOp* instruction) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002280 DCHECK(instruction->GetType() == Primitive::kPrimInt ||
2281 instruction->GetType() == Primitive::kPrimLong);
2282 LocationSummary* locations =
2283 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2284 if (instruction->GetInstrKind() == HInstruction::kNeg) {
2285 locations->SetInAt(0, Location::ConstantLocation(instruction->InputAt(0)->AsConstant()));
2286 } else {
2287 locations->SetInAt(0, Location::RequiresRegister());
2288 }
2289 locations->SetInAt(1, Location::RequiresRegister());
2290 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2291}
2292
Anton Kirilov74234da2017-01-13 14:42:47 +00002293void InstructionCodeGeneratorARM64::VisitDataProcWithShifterOp(
2294 HDataProcWithShifterOp* instruction) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002295 Primitive::Type type = instruction->GetType();
2296 HInstruction::InstructionKind kind = instruction->GetInstrKind();
2297 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2298 Register out = OutputRegister(instruction);
2299 Register left;
2300 if (kind != HInstruction::kNeg) {
2301 left = InputRegisterAt(instruction, 0);
2302 }
Anton Kirilov74234da2017-01-13 14:42:47 +00002303 // If this `HDataProcWithShifterOp` was created by merging a type conversion as the
Alexandre Rames8626b742015-11-25 16:28:08 +00002304 // shifter operand operation, the IR generating `right_reg` (input to the type
2305 // conversion) can have a different type from the current instruction's type,
2306 // so we manually indicate the type.
2307 Register right_reg = RegisterFrom(instruction->GetLocations()->InAt(1), type);
Alexandre Rames8626b742015-11-25 16:28:08 +00002308 Operand right_operand(0);
2309
Anton Kirilov74234da2017-01-13 14:42:47 +00002310 HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
2311 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
Alexandre Rames8626b742015-11-25 16:28:08 +00002312 right_operand = Operand(right_reg, helpers::ExtendFromOpKind(op_kind));
2313 } else {
Anton Kirilov74234da2017-01-13 14:42:47 +00002314 right_operand = Operand(right_reg,
2315 helpers::ShiftFromOpKind(op_kind),
2316 instruction->GetShiftAmount());
Alexandre Rames8626b742015-11-25 16:28:08 +00002317 }
2318
2319 // Logical binary operations do not support extension operations in the
2320 // operand. Note that VIXL would still manage if it was passed by generating
2321 // the extension as a separate instruction.
2322 // `HNeg` also does not support extension. See comments in `ShifterOperandSupportsExtension()`.
2323 DCHECK(!right_operand.IsExtendedRegister() ||
2324 (kind != HInstruction::kAnd && kind != HInstruction::kOr && kind != HInstruction::kXor &&
2325 kind != HInstruction::kNeg));
2326 switch (kind) {
2327 case HInstruction::kAdd:
2328 __ Add(out, left, right_operand);
2329 break;
2330 case HInstruction::kAnd:
2331 __ And(out, left, right_operand);
2332 break;
2333 case HInstruction::kNeg:
Roland Levillain1a653882016-03-18 18:05:57 +00002334 DCHECK(instruction->InputAt(0)->AsConstant()->IsArithmeticZero());
Alexandre Rames8626b742015-11-25 16:28:08 +00002335 __ Neg(out, right_operand);
2336 break;
2337 case HInstruction::kOr:
2338 __ Orr(out, left, right_operand);
2339 break;
2340 case HInstruction::kSub:
2341 __ Sub(out, left, right_operand);
2342 break;
2343 case HInstruction::kXor:
2344 __ Eor(out, left, right_operand);
2345 break;
2346 default:
2347 LOG(FATAL) << "Unexpected operation kind: " << kind;
2348 UNREACHABLE();
2349 }
2350}
2351
Artem Serov328429f2016-07-06 16:23:04 +01002352void LocationsBuilderARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002353 LocationSummary* locations =
2354 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2355 locations->SetInAt(0, Location::RequiresRegister());
2356 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->GetOffset(), instruction));
2357 locations->SetOut(Location::RequiresRegister());
2358}
2359
Roland Levillain19c54192016-11-04 13:44:09 +00002360void InstructionCodeGeneratorARM64::VisitIntermediateAddress(HIntermediateAddress* instruction) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002361 __ Add(OutputRegister(instruction),
2362 InputRegisterAt(instruction, 0),
2363 Operand(InputOperandAt(instruction, 1)));
2364}
2365
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002366void LocationsBuilderARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002367 LocationSummary* locations =
2368 new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002369 HInstruction* accumulator = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
2370 if (instr->GetOpKind() == HInstruction::kSub &&
2371 accumulator->IsConstant() &&
Roland Levillain1a653882016-03-18 18:05:57 +00002372 accumulator->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002373 // Don't allocate register for Mneg instruction.
2374 } else {
2375 locations->SetInAt(HMultiplyAccumulate::kInputAccumulatorIndex,
2376 Location::RequiresRegister());
2377 }
2378 locations->SetInAt(HMultiplyAccumulate::kInputMulLeftIndex, Location::RequiresRegister());
2379 locations->SetInAt(HMultiplyAccumulate::kInputMulRightIndex, Location::RequiresRegister());
Alexandre Rames418318f2015-11-20 15:55:47 +00002380 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2381}
2382
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002383void InstructionCodeGeneratorARM64::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
Alexandre Rames418318f2015-11-20 15:55:47 +00002384 Register res = OutputRegister(instr);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002385 Register mul_left = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulLeftIndex);
2386 Register mul_right = InputRegisterAt(instr, HMultiplyAccumulate::kInputMulRightIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002387
2388 // Avoid emitting code that could trigger Cortex A53's erratum 835769.
2389 // This fixup should be carried out for all multiply-accumulate instructions:
2390 // madd, msub, smaddl, smsubl, umaddl and umsubl.
2391 if (instr->GetType() == Primitive::kPrimLong &&
2392 codegen_->GetInstructionSetFeatures().NeedFixCortexA53_835769()) {
2393 MacroAssembler* masm = down_cast<CodeGeneratorARM64*>(codegen_)->GetVIXLAssembler();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002394 vixl::aarch64::Instruction* prev =
2395 masm->GetCursorAddress<vixl::aarch64::Instruction*>() - kInstructionSize;
Alexandre Rames418318f2015-11-20 15:55:47 +00002396 if (prev->IsLoadOrStore()) {
2397 // Make sure we emit only exactly one nop.
Artem Serov914d7a82017-02-07 14:33:49 +00002398 ExactAssemblyScope scope(masm, kInstructionSize, CodeBufferCheckScope::kExactSize);
Alexandre Rames418318f2015-11-20 15:55:47 +00002399 __ nop();
2400 }
2401 }
2402
2403 if (instr->GetOpKind() == HInstruction::kAdd) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002404 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
Alexandre Rames418318f2015-11-20 15:55:47 +00002405 __ Madd(res, mul_left, mul_right, accumulator);
2406 } else {
2407 DCHECK(instr->GetOpKind() == HInstruction::kSub);
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002408 HInstruction* accum_instr = instr->InputAt(HMultiplyAccumulate::kInputAccumulatorIndex);
Roland Levillain1a653882016-03-18 18:05:57 +00002409 if (accum_instr->IsConstant() && accum_instr->AsConstant()->IsArithmeticZero()) {
Artem Udovichenko4a0dad62016-01-26 12:28:31 +03002410 __ Mneg(res, mul_left, mul_right);
2411 } else {
2412 Register accumulator = InputRegisterAt(instr, HMultiplyAccumulate::kInputAccumulatorIndex);
2413 __ Msub(res, mul_left, mul_right, accumulator);
2414 }
Alexandre Rames418318f2015-11-20 15:55:47 +00002415 }
2416}
2417
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002418void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002419 bool object_array_get_with_read_barrier =
2420 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002421 LocationSummary* locations =
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002422 new (GetGraph()->GetArena()) LocationSummary(instruction,
2423 object_array_get_with_read_barrier ?
2424 LocationSummary::kCallOnSlowPath :
2425 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01002426 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002427 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01002428 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002429 locations->SetInAt(0, Location::RequiresRegister());
2430 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002431 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2432 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2433 } else {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002434 // The output overlaps in the case of an object array get with
2435 // read barriers enabled: we do not want the move to overwrite the
2436 // array's location, as we need it to emit the read barrier.
2437 locations->SetOut(
2438 Location::RequiresRegister(),
2439 object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01002440 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002441}
2442
2443void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002444 Primitive::Type type = instruction->GetType();
2445 Register obj = InputRegisterAt(instruction, 0);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002446 LocationSummary* locations = instruction->GetLocations();
2447 Location index = locations->InAt(1);
Roland Levillain44015862016-01-22 11:47:17 +00002448 Location out = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002449 uint32_t offset = CodeGenerator::GetArrayDataOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002450 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2451 instruction->IsStringCharAt();
Alexandre Ramesd921d642015-04-16 15:07:16 +01002452 MacroAssembler* masm = GetVIXLAssembler();
2453 UseScratchRegisterScope temps(masm);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002454
Roland Levillain19c54192016-11-04 13:44:09 +00002455 // The read barrier instrumentation of object ArrayGet instructions
2456 // does not support the HIntermediateAddress instruction.
2457 DCHECK(!((type == Primitive::kPrimNot) &&
2458 instruction->GetArray()->IsIntermediateAddress() &&
2459 kEmitCompilerReadBarrier));
2460
Roland Levillain44015862016-01-22 11:47:17 +00002461 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2462 // Object ArrayGet with Baker's read barrier case.
2463 Register temp = temps.AcquireW();
Roland Levillain44015862016-01-22 11:47:17 +00002464 // Note that a potential implicit null check is handled in the
2465 // CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier call.
2466 codegen_->GenerateArrayLoadWithBakerReadBarrier(
2467 instruction, out, obj.W(), offset, index, temp, /* needs_null_check */ true);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002468 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002469 // General case.
2470 MemOperand source = HeapOperand(obj);
jessicahandojo05765752016-09-09 19:01:32 -07002471 Register length;
2472 if (maybe_compressed_char_at) {
2473 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2474 length = temps.AcquireW();
Artem Serov914d7a82017-02-07 14:33:49 +00002475 {
2476 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2477 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2478
2479 if (instruction->GetArray()->IsIntermediateAddress()) {
2480 DCHECK_LT(count_offset, offset);
2481 int64_t adjusted_offset =
2482 static_cast<int64_t>(count_offset) - static_cast<int64_t>(offset);
2483 // Note that `adjusted_offset` is negative, so this will be a LDUR.
2484 __ Ldr(length, MemOperand(obj.X(), adjusted_offset));
2485 } else {
2486 __ Ldr(length, HeapOperand(obj, count_offset));
2487 }
2488 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002489 }
jessicahandojo05765752016-09-09 19:01:32 -07002490 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002491 if (index.IsConstant()) {
jessicahandojo05765752016-09-09 19:01:32 -07002492 if (maybe_compressed_char_at) {
2493 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002494 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2495 "Expecting 0=compressed, 1=uncompressed");
2496 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002497 __ Ldrb(Register(OutputCPURegister(instruction)),
2498 HeapOperand(obj, offset + Int64ConstantFrom(index)));
2499 __ B(&done);
2500 __ Bind(&uncompressed_load);
2501 __ Ldrh(Register(OutputCPURegister(instruction)),
2502 HeapOperand(obj, offset + (Int64ConstantFrom(index) << 1)));
2503 __ Bind(&done);
2504 } else {
2505 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
2506 source = HeapOperand(obj, offset);
2507 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002508 } else {
Roland Levillain44015862016-01-22 11:47:17 +00002509 Register temp = temps.AcquireSameSizeAs(obj);
Artem Serov328429f2016-07-06 16:23:04 +01002510 if (instruction->GetArray()->IsIntermediateAddress()) {
Roland Levillain44015862016-01-22 11:47:17 +00002511 // We do not need to compute the intermediate address from the array: the
2512 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002513 // `TryExtractArrayAccessAddress()`.
Roland Levillain44015862016-01-22 11:47:17 +00002514 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002515 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Roland Levillain44015862016-01-22 11:47:17 +00002516 DCHECK_EQ(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64(), offset);
2517 }
2518 temp = obj;
2519 } else {
2520 __ Add(temp, obj, offset);
2521 }
jessicahandojo05765752016-09-09 19:01:32 -07002522 if (maybe_compressed_char_at) {
2523 vixl::aarch64::Label uncompressed_load, done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002524 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2525 "Expecting 0=compressed, 1=uncompressed");
2526 __ Tbnz(length.W(), 0, &uncompressed_load);
jessicahandojo05765752016-09-09 19:01:32 -07002527 __ Ldrb(Register(OutputCPURegister(instruction)),
2528 HeapOperand(temp, XRegisterFrom(index), LSL, 0));
2529 __ B(&done);
2530 __ Bind(&uncompressed_load);
2531 __ Ldrh(Register(OutputCPURegister(instruction)),
2532 HeapOperand(temp, XRegisterFrom(index), LSL, 1));
2533 __ Bind(&done);
2534 } else {
2535 source = HeapOperand(temp, XRegisterFrom(index), LSL, Primitive::ComponentSizeShift(type));
2536 }
Roland Levillain44015862016-01-22 11:47:17 +00002537 }
jessicahandojo05765752016-09-09 19:01:32 -07002538 if (!maybe_compressed_char_at) {
Artem Serov914d7a82017-02-07 14:33:49 +00002539 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2540 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
jessicahandojo05765752016-09-09 19:01:32 -07002541 codegen_->Load(type, OutputCPURegister(instruction), source);
2542 codegen_->MaybeRecordImplicitNullCheck(instruction);
2543 }
Roland Levillain44015862016-01-22 11:47:17 +00002544
2545 if (type == Primitive::kPrimNot) {
2546 static_assert(
2547 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2548 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2549 Location obj_loc = locations->InAt(0);
2550 if (index.IsConstant()) {
2551 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset);
2552 } else {
2553 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, obj_loc, offset, index);
2554 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002555 }
Roland Levillain4d027112015-07-01 15:41:14 +01002556 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002557}
2558
Alexandre Rames5319def2014-10-23 10:03:10 +01002559void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
2560 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2561 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002562 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002563}
2564
2565void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markodce016e2016-04-28 13:10:02 +01002566 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
jessicahandojo05765752016-09-09 19:01:32 -07002567 vixl::aarch64::Register out = OutputRegister(instruction);
Artem Serov914d7a82017-02-07 14:33:49 +00002568 {
2569 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2570 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2571 __ Ldr(out, HeapOperand(InputRegisterAt(instruction, 0), offset));
2572 codegen_->MaybeRecordImplicitNullCheck(instruction);
2573 }
jessicahandojo05765752016-09-09 19:01:32 -07002574 // Mask out compression flag from String's array length.
2575 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002576 __ Lsr(out.W(), out.W(), 1u);
jessicahandojo05765752016-09-09 19:01:32 -07002577 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002578}
2579
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002580void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002581 Primitive::Type value_type = instruction->GetComponentType();
2582
2583 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002584 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2585 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01002586 may_need_runtime_call_for_type_check ?
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002587 LocationSummary::kCallOnSlowPath :
2588 LocationSummary::kNoCall);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002589 locations->SetInAt(0, Location::RequiresRegister());
2590 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002591 if (IsConstantZeroBitPattern(instruction->InputAt(2))) {
2592 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2593 } else if (Primitive::IsFloatingPointType(value_type)) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002594 locations->SetInAt(2, Location::RequiresFpuRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002595 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002596 locations->SetInAt(2, Location::RequiresRegister());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002597 }
2598}
2599
2600void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
2601 Primitive::Type value_type = instruction->GetComponentType();
Alexandre Rames97833a02015-04-16 15:07:12 +01002602 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002603 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002604 bool needs_write_barrier =
2605 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Alexandre Rames97833a02015-04-16 15:07:12 +01002606
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002607 Register array = InputRegisterAt(instruction, 0);
Alexandre Ramesbe919d92016-08-23 18:33:36 +01002608 CPURegister value = InputCPURegisterOrZeroRegAt(instruction, 2);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002609 CPURegister source = value;
2610 Location index = locations->InAt(1);
2611 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
2612 MemOperand destination = HeapOperand(array);
2613 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002614
2615 if (!needs_write_barrier) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002616 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002617 if (index.IsConstant()) {
2618 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
2619 destination = HeapOperand(array, offset);
2620 } else {
2621 UseScratchRegisterScope temps(masm);
2622 Register temp = temps.AcquireSameSizeAs(array);
Artem Serov328429f2016-07-06 16:23:04 +01002623 if (instruction->GetArray()->IsIntermediateAddress()) {
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002624 // We do not need to compute the intermediate address from the array: the
2625 // input instruction has done it already. See the comment in
Artem Serov328429f2016-07-06 16:23:04 +01002626 // `TryExtractArrayAccessAddress()`.
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002627 if (kIsDebugBuild) {
Artem Serov328429f2016-07-06 16:23:04 +01002628 HIntermediateAddress* tmp = instruction->GetArray()->AsIntermediateAddress();
Alexandre Ramese6dbf482015-10-19 10:10:41 +01002629 DCHECK(tmp->GetOffset()->AsIntConstant()->GetValueAsUint64() == offset);
2630 }
2631 temp = array;
2632 } else {
2633 __ Add(temp, array, offset);
2634 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002635 destination = HeapOperand(temp,
2636 XRegisterFrom(index),
2637 LSL,
2638 Primitive::ComponentSizeShift(value_type));
2639 }
Artem Serov914d7a82017-02-07 14:33:49 +00002640 {
2641 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2642 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2643 codegen_->Store(value_type, value, destination);
2644 codegen_->MaybeRecordImplicitNullCheck(instruction);
2645 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002646 } else {
Artem Serov328429f2016-07-06 16:23:04 +01002647 DCHECK(!instruction->GetArray()->IsIntermediateAddress());
Scott Wakeling97c72b72016-06-24 16:19:36 +01002648 vixl::aarch64::Label done;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002649 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames97833a02015-04-16 15:07:12 +01002650 {
2651 // We use a block to end the scratch scope before the write barrier, thus
2652 // freeing the temporary registers so they can be used in `MarkGCCard`.
2653 UseScratchRegisterScope temps(masm);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002654 Register temp = temps.AcquireSameSizeAs(array);
Alexandre Rames97833a02015-04-16 15:07:12 +01002655 if (index.IsConstant()) {
2656 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002657 destination = HeapOperand(array, offset);
Alexandre Rames97833a02015-04-16 15:07:12 +01002658 } else {
Alexandre Rames82000b02015-07-07 11:34:16 +01002659 destination = HeapOperand(temp,
2660 XRegisterFrom(index),
2661 LSL,
2662 Primitive::ComponentSizeShift(value_type));
Alexandre Rames97833a02015-04-16 15:07:12 +01002663 }
2664
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002665 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2666 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2667 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2668
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002669 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002670 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathARM64(instruction);
2671 codegen_->AddSlowPath(slow_path);
2672 if (instruction->GetValueCanBeNull()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002673 vixl::aarch64::Label non_zero;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002674 __ Cbnz(Register(value), &non_zero);
2675 if (!index.IsConstant()) {
2676 __ Add(temp, array, offset);
2677 }
Artem Serov914d7a82017-02-07 14:33:49 +00002678 {
2679 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools
2680 // emitted.
2681 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2682 __ Str(wzr, destination);
2683 codegen_->MaybeRecordImplicitNullCheck(instruction);
2684 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002685 __ B(&done);
2686 __ Bind(&non_zero);
2687 }
2688
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002689 // Note that when Baker read barriers are enabled, the type
2690 // checks are performed without read barriers. This is fine,
2691 // even in the case where a class object is in the from-space
2692 // after the flip, as a comparison involving such a type would
2693 // not produce a false positive; it may of course produce a
2694 // false negative, in which case we would take the ArraySet
2695 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01002696
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002697 Register temp2 = temps.AcquireSameSizeAs(array);
2698 // /* HeapReference<Class> */ temp = array->klass_
Artem Serov914d7a82017-02-07 14:33:49 +00002699 {
2700 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
2701 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2702 __ Ldr(temp, HeapOperand(array, class_offset));
2703 codegen_->MaybeRecordImplicitNullCheck(instruction);
2704 }
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002705 GetAssembler()->MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01002706
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002707 // /* HeapReference<Class> */ temp = temp->component_type_
2708 __ Ldr(temp, HeapOperand(temp, component_offset));
2709 // /* HeapReference<Class> */ temp2 = value->klass_
2710 __ Ldr(temp2, HeapOperand(Register(value), class_offset));
2711 // If heap poisoning is enabled, no need to unpoison `temp`
2712 // nor `temp2`, as we are comparing two poisoned references.
2713 __ Cmp(temp, temp2);
2714 temps.Release(temp2);
Roland Levillain16d9f942016-08-25 17:27:56 +01002715
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002716 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2717 vixl::aarch64::Label do_put;
2718 __ B(eq, &do_put);
2719 // If heap poisoning is enabled, the `temp` reference has
2720 // not been unpoisoned yet; unpoison it now.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00002721 GetAssembler()->MaybeUnpoisonHeapReference(temp);
2722
Roland Levillain9d6e1f82016-09-05 15:57:33 +01002723 // /* HeapReference<Class> */ temp = temp->super_class_
2724 __ Ldr(temp, HeapOperand(temp, super_offset));
2725 // If heap poisoning is enabled, no need to unpoison
2726 // `temp`, as we are comparing against null below.
2727 __ Cbnz(temp, slow_path->GetEntryLabel());
2728 __ Bind(&do_put);
2729 } else {
2730 __ B(ne, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002731 }
2732 }
2733
2734 if (kPoisonHeapReferences) {
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002735 Register temp2 = temps.AcquireSameSizeAs(array);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002736 DCHECK(value.IsW());
Nicolas Geoffraya8a0fe22015-10-01 15:50:27 +01002737 __ Mov(temp2, value.W());
2738 GetAssembler()->PoisonHeapReference(temp2);
2739 source = temp2;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002740 }
2741
2742 if (!index.IsConstant()) {
2743 __ Add(temp, array, offset);
2744 }
Artem Serov914d7a82017-02-07 14:33:49 +00002745 {
2746 // Ensure that between store and MaybeRecordImplicitNullCheck there are no pools emitted.
2747 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
2748 __ Str(source, destination);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002749
Artem Serov914d7a82017-02-07 14:33:49 +00002750 if (!may_need_runtime_call_for_type_check) {
2751 codegen_->MaybeRecordImplicitNullCheck(instruction);
2752 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002753 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002754 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01002755
2756 codegen_->MarkGCCard(array, value.W(), instruction->GetValueCanBeNull());
2757
2758 if (done.IsLinked()) {
2759 __ Bind(&done);
2760 }
2761
2762 if (slow_path != nullptr) {
2763 __ Bind(slow_path->GetExitLabel());
Alexandre Rames97833a02015-04-16 15:07:12 +01002764 }
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002765 }
2766}
2767
Alexandre Rames67555f72014-11-18 10:55:16 +00002768void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002769 RegisterSet caller_saves = RegisterSet::Empty();
2770 InvokeRuntimeCallingConvention calling_convention;
2771 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
2772 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1).GetCode()));
2773 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexandre Rames67555f72014-11-18 10:55:16 +00002774 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu760d8ef2015-03-28 18:09:56 +00002775 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
Alexandre Rames67555f72014-11-18 10:55:16 +00002776}
2777
2778void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002779 BoundsCheckSlowPathARM64* slow_path =
2780 new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00002781 codegen_->AddSlowPath(slow_path);
Alexandre Rames67555f72014-11-18 10:55:16 +00002782 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
2783 __ B(slow_path->GetEntryLabel(), hs);
2784}
2785
Alexandre Rames67555f72014-11-18 10:55:16 +00002786void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
2787 LocationSummary* locations =
2788 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2789 locations->SetInAt(0, Location::RequiresRegister());
2790 if (check->HasUses()) {
2791 locations->SetOut(Location::SameAsFirstInput());
2792 }
2793}
2794
2795void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
2796 // We assume the class is not null.
2797 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2798 check->GetLoadClass(), check, check->GetDexPc(), true);
2799 codegen_->AddSlowPath(slow_path);
2800 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
2801}
2802
Roland Levillain1a653882016-03-18 18:05:57 +00002803static bool IsFloatingPointZeroConstant(HInstruction* inst) {
2804 return (inst->IsFloatConstant() && (inst->AsFloatConstant()->IsArithmeticZero()))
2805 || (inst->IsDoubleConstant() && (inst->AsDoubleConstant()->IsArithmeticZero()));
2806}
2807
2808void InstructionCodeGeneratorARM64::GenerateFcmp(HInstruction* instruction) {
2809 FPRegister lhs_reg = InputFPRegisterAt(instruction, 0);
2810 Location rhs_loc = instruction->GetLocations()->InAt(1);
2811 if (rhs_loc.IsConstant()) {
2812 // 0.0 is the only immediate that can be encoded directly in
2813 // an FCMP instruction.
2814 //
2815 // Both the JLS (section 15.20.1) and the JVMS (section 6.5)
2816 // specify that in a floating-point comparison, positive zero
2817 // and negative zero are considered equal, so we can use the
2818 // literal 0.0 for both cases here.
2819 //
2820 // Note however that some methods (Float.equal, Float.compare,
2821 // Float.compareTo, Double.equal, Double.compare,
2822 // Double.compareTo, Math.max, Math.min, StrictMath.max,
2823 // StrictMath.min) consider 0.0 to be (strictly) greater than
2824 // -0.0. So if we ever translate calls to these methods into a
2825 // HCompare instruction, we must handle the -0.0 case with
2826 // care here.
2827 DCHECK(IsFloatingPointZeroConstant(rhs_loc.GetConstant()));
2828 __ Fcmp(lhs_reg, 0.0);
2829 } else {
2830 __ Fcmp(lhs_reg, InputFPRegisterAt(instruction, 1));
2831 }
Roland Levillain7f63c522015-07-13 15:54:55 +00002832}
2833
Serban Constantinescu02164b32014-11-13 14:05:07 +00002834void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002835 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00002836 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2837 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002838 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002839 case Primitive::kPrimBoolean:
2840 case Primitive::kPrimByte:
2841 case Primitive::kPrimShort:
2842 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002843 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002844 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002845 locations->SetInAt(0, Location::RequiresRegister());
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00002846 locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002847 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2848 break;
2849 }
2850 case Primitive::kPrimFloat:
2851 case Primitive::kPrimDouble: {
2852 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain7f63c522015-07-13 15:54:55 +00002853 locations->SetInAt(1,
2854 IsFloatingPointZeroConstant(compare->InputAt(1))
2855 ? Location::ConstantLocation(compare->InputAt(1)->AsConstant())
2856 : Location::RequiresFpuRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002857 locations->SetOut(Location::RequiresRegister());
2858 break;
2859 }
2860 default:
2861 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2862 }
2863}
2864
2865void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
2866 Primitive::Type in_type = compare->InputAt(0)->GetType();
2867
2868 // 0 if: left == right
2869 // 1 if: left > right
2870 // -1 if: left < right
2871 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002872 case Primitive::kPrimBoolean:
2873 case Primitive::kPrimByte:
2874 case Primitive::kPrimShort:
2875 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002876 case Primitive::kPrimInt:
Serban Constantinescu02164b32014-11-13 14:05:07 +00002877 case Primitive::kPrimLong: {
2878 Register result = OutputRegister(compare);
2879 Register left = InputRegisterAt(compare, 0);
2880 Operand right = InputOperandAt(compare, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002881 __ Cmp(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08002882 __ Cset(result, ne); // result == +1 if NE or 0 otherwise
2883 __ Cneg(result, result, lt); // result == -1 if LT or unchanged otherwise
Serban Constantinescu02164b32014-11-13 14:05:07 +00002884 break;
2885 }
2886 case Primitive::kPrimFloat:
2887 case Primitive::kPrimDouble: {
2888 Register result = OutputRegister(compare);
Roland Levillain1a653882016-03-18 18:05:57 +00002889 GenerateFcmp(compare);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002890 __ Cset(result, ne);
2891 __ Cneg(result, result, ARM64FPCondition(kCondLT, compare->IsGtBias()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002892 break;
2893 }
2894 default:
2895 LOG(FATAL) << "Unimplemented compare type " << in_type;
2896 }
2897}
2898
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002899void LocationsBuilderARM64::HandleCondition(HCondition* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002900 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Roland Levillain7f63c522015-07-13 15:54:55 +00002901
2902 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
2903 locations->SetInAt(0, Location::RequiresFpuRegister());
2904 locations->SetInAt(1,
2905 IsFloatingPointZeroConstant(instruction->InputAt(1))
2906 ? Location::ConstantLocation(instruction->InputAt(1)->AsConstant())
2907 : Location::RequiresFpuRegister());
2908 } else {
2909 // Integer cases.
2910 locations->SetInAt(0, Location::RequiresRegister());
2911 locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
2912 }
2913
David Brazdilb3e773e2016-01-26 11:28:37 +00002914 if (!instruction->IsEmittedAtUseSite()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002915 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002916 }
2917}
2918
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002919void InstructionCodeGeneratorARM64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002920 if (instruction->IsEmittedAtUseSite()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002921 return;
2922 }
2923
2924 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames5319def2014-10-23 10:03:10 +01002925 Register res = RegisterFrom(locations->Out(), instruction->GetType());
Roland Levillain7f63c522015-07-13 15:54:55 +00002926 IfCondition if_cond = instruction->GetCondition();
Alexandre Rames5319def2014-10-23 10:03:10 +01002927
Roland Levillain7f63c522015-07-13 15:54:55 +00002928 if (Primitive::IsFloatingPointType(instruction->InputAt(0)->GetType())) {
Roland Levillain1a653882016-03-18 18:05:57 +00002929 GenerateFcmp(instruction);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002930 __ Cset(res, ARM64FPCondition(if_cond, instruction->IsGtBias()));
Roland Levillain7f63c522015-07-13 15:54:55 +00002931 } else {
2932 // Integer cases.
2933 Register lhs = InputRegisterAt(instruction, 0);
2934 Operand rhs = InputOperandAt(instruction, 1);
2935 __ Cmp(lhs, rhs);
Vladimir Markod6e069b2016-01-18 11:11:01 +00002936 __ Cset(res, ARM64Condition(if_cond));
Roland Levillain7f63c522015-07-13 15:54:55 +00002937 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002938}
2939
2940#define FOR_EACH_CONDITION_INSTRUCTION(M) \
2941 M(Equal) \
2942 M(NotEqual) \
2943 M(LessThan) \
2944 M(LessThanOrEqual) \
2945 M(GreaterThan) \
Aart Bike9f37602015-10-09 11:15:55 -07002946 M(GreaterThanOrEqual) \
2947 M(Below) \
2948 M(BelowOrEqual) \
2949 M(Above) \
2950 M(AboveOrEqual)
Alexandre Rames5319def2014-10-23 10:03:10 +01002951#define DEFINE_CONDITION_VISITORS(Name) \
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002952void LocationsBuilderARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); } \
2953void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { HandleCondition(comp); }
Alexandre Rames5319def2014-10-23 10:03:10 +01002954FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00002955#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01002956#undef FOR_EACH_CONDITION_INSTRUCTION
2957
Zheng Xuc6667102015-05-15 16:08:45 +08002958void InstructionCodeGeneratorARM64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2959 DCHECK(instruction->IsDiv() || instruction->IsRem());
2960
2961 LocationSummary* locations = instruction->GetLocations();
2962 Location second = locations->InAt(1);
2963 DCHECK(second.IsConstant());
2964
2965 Register out = OutputRegister(instruction);
2966 Register dividend = InputRegisterAt(instruction, 0);
2967 int64_t imm = Int64FromConstant(second.GetConstant());
2968 DCHECK(imm == 1 || imm == -1);
2969
2970 if (instruction->IsRem()) {
2971 __ Mov(out, 0);
2972 } else {
2973 if (imm == 1) {
2974 __ Mov(out, dividend);
2975 } else {
2976 __ Neg(out, dividend);
2977 }
2978 }
2979}
2980
2981void InstructionCodeGeneratorARM64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2982 DCHECK(instruction->IsDiv() || instruction->IsRem());
2983
2984 LocationSummary* locations = instruction->GetLocations();
2985 Location second = locations->InAt(1);
2986 DCHECK(second.IsConstant());
2987
2988 Register out = OutputRegister(instruction);
2989 Register dividend = InputRegisterAt(instruction, 0);
2990 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002991 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Zheng Xuc6667102015-05-15 16:08:45 +08002992 int ctz_imm = CTZ(abs_imm);
2993
2994 UseScratchRegisterScope temps(GetVIXLAssembler());
2995 Register temp = temps.AcquireSameSizeAs(out);
2996
2997 if (instruction->IsDiv()) {
2998 __ Add(temp, dividend, abs_imm - 1);
2999 __ Cmp(dividend, 0);
3000 __ Csel(out, temp, dividend, lt);
3001 if (imm > 0) {
3002 __ Asr(out, out, ctz_imm);
3003 } else {
3004 __ Neg(out, Operand(out, ASR, ctz_imm));
3005 }
3006 } else {
3007 int bits = instruction->GetResultType() == Primitive::kPrimInt ? 32 : 64;
3008 __ Asr(temp, dividend, bits - 1);
3009 __ Lsr(temp, temp, bits - ctz_imm);
3010 __ Add(out, dividend, temp);
3011 __ And(out, out, abs_imm - 1);
3012 __ Sub(out, out, temp);
3013 }
3014}
3015
3016void InstructionCodeGeneratorARM64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3017 DCHECK(instruction->IsDiv() || instruction->IsRem());
3018
3019 LocationSummary* locations = instruction->GetLocations();
3020 Location second = locations->InAt(1);
3021 DCHECK(second.IsConstant());
3022
3023 Register out = OutputRegister(instruction);
3024 Register dividend = InputRegisterAt(instruction, 0);
3025 int64_t imm = Int64FromConstant(second.GetConstant());
3026
3027 Primitive::Type type = instruction->GetResultType();
3028 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
3029
3030 int64_t magic;
3031 int shift;
3032 CalculateMagicAndShiftForDivRem(imm, type == Primitive::kPrimLong /* is_long */, &magic, &shift);
3033
3034 UseScratchRegisterScope temps(GetVIXLAssembler());
3035 Register temp = temps.AcquireSameSizeAs(out);
3036
3037 // temp = get_high(dividend * magic)
3038 __ Mov(temp, magic);
3039 if (type == Primitive::kPrimLong) {
3040 __ Smulh(temp, dividend, temp);
3041 } else {
3042 __ Smull(temp.X(), dividend, temp);
3043 __ Lsr(temp.X(), temp.X(), 32);
3044 }
3045
3046 if (imm > 0 && magic < 0) {
3047 __ Add(temp, temp, dividend);
3048 } else if (imm < 0 && magic > 0) {
3049 __ Sub(temp, temp, dividend);
3050 }
3051
3052 if (shift != 0) {
3053 __ Asr(temp, temp, shift);
3054 }
3055
3056 if (instruction->IsDiv()) {
3057 __ Sub(out, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
3058 } else {
3059 __ Sub(temp, temp, Operand(temp, ASR, type == Primitive::kPrimLong ? 63 : 31));
3060 // TODO: Strength reduction for msub.
3061 Register temp_imm = temps.AcquireSameSizeAs(out);
3062 __ Mov(temp_imm, imm);
3063 __ Msub(out, temp, temp_imm, dividend);
3064 }
3065}
3066
3067void InstructionCodeGeneratorARM64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3068 DCHECK(instruction->IsDiv() || instruction->IsRem());
3069 Primitive::Type type = instruction->GetResultType();
3070 DCHECK(type == Primitive::kPrimInt || Primitive::kPrimLong);
3071
3072 LocationSummary* locations = instruction->GetLocations();
3073 Register out = OutputRegister(instruction);
3074 Location second = locations->InAt(1);
3075
3076 if (second.IsConstant()) {
3077 int64_t imm = Int64FromConstant(second.GetConstant());
3078
3079 if (imm == 0) {
3080 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3081 } else if (imm == 1 || imm == -1) {
3082 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003083 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Zheng Xuc6667102015-05-15 16:08:45 +08003084 DivRemByPowerOfTwo(instruction);
3085 } else {
3086 DCHECK(imm <= -2 || imm >= 2);
3087 GenerateDivRemWithAnyConstant(instruction);
3088 }
3089 } else {
3090 Register dividend = InputRegisterAt(instruction, 0);
3091 Register divisor = InputRegisterAt(instruction, 1);
3092 if (instruction->IsDiv()) {
3093 __ Sdiv(out, dividend, divisor);
3094 } else {
3095 UseScratchRegisterScope temps(GetVIXLAssembler());
3096 Register temp = temps.AcquireSameSizeAs(out);
3097 __ Sdiv(temp, dividend, divisor);
3098 __ Msub(out, temp, divisor, dividend);
3099 }
3100 }
3101}
3102
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003103void LocationsBuilderARM64::VisitDiv(HDiv* div) {
3104 LocationSummary* locations =
3105 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3106 switch (div->GetResultType()) {
3107 case Primitive::kPrimInt:
3108 case Primitive::kPrimLong:
3109 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08003110 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003111 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3112 break;
3113
3114 case Primitive::kPrimFloat:
3115 case Primitive::kPrimDouble:
3116 locations->SetInAt(0, Location::RequiresFpuRegister());
3117 locations->SetInAt(1, Location::RequiresFpuRegister());
3118 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3119 break;
3120
3121 default:
3122 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3123 }
3124}
3125
3126void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
3127 Primitive::Type type = div->GetResultType();
3128 switch (type) {
3129 case Primitive::kPrimInt:
3130 case Primitive::kPrimLong:
Zheng Xuc6667102015-05-15 16:08:45 +08003131 GenerateDivRemIntegral(div);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00003132 break;
3133
3134 case Primitive::kPrimFloat:
3135 case Primitive::kPrimDouble:
3136 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
3137 break;
3138
3139 default:
3140 LOG(FATAL) << "Unexpected div type " << type;
3141 }
3142}
3143
Alexandre Rames67555f72014-11-18 10:55:16 +00003144void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003145 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00003146 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00003147}
3148
3149void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3150 SlowPathCodeARM64* slow_path =
3151 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
3152 codegen_->AddSlowPath(slow_path);
3153 Location value = instruction->GetLocations()->InAt(0);
3154
Alexandre Rames3e69f162014-12-10 10:36:50 +00003155 Primitive::Type type = instruction->GetType();
3156
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003157 if (!Primitive::IsIntegralType(type)) {
3158 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Alexandre Rames3e69f162014-12-10 10:36:50 +00003159 return;
3160 }
3161
Alexandre Rames67555f72014-11-18 10:55:16 +00003162 if (value.IsConstant()) {
3163 int64_t divisor = Int64ConstantFrom(value);
3164 if (divisor == 0) {
3165 __ B(slow_path->GetEntryLabel());
3166 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00003167 // A division by a non-null constant is valid. We don't need to perform
3168 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00003169 }
3170 } else {
3171 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
3172 }
3173}
3174
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003175void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
3176 LocationSummary* locations =
3177 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3178 locations->SetOut(Location::ConstantLocation(constant));
3179}
3180
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003181void InstructionCodeGeneratorARM64::VisitDoubleConstant(
3182 HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003183 // Will be generated at use site.
3184}
3185
Alexandre Rames5319def2014-10-23 10:03:10 +01003186void LocationsBuilderARM64::VisitExit(HExit* exit) {
3187 exit->SetLocations(nullptr);
3188}
3189
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003190void InstructionCodeGeneratorARM64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003191}
3192
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003193void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
3194 LocationSummary* locations =
3195 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3196 locations->SetOut(Location::ConstantLocation(constant));
3197}
3198
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01003199void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00003200 // Will be generated at use site.
3201}
3202
David Brazdilfc6a86a2015-06-26 10:33:45 +00003203void InstructionCodeGeneratorARM64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003204 DCHECK(!successor->IsExitBlock());
3205 HBasicBlock* block = got->GetBlock();
3206 HInstruction* previous = got->GetPrevious();
3207 HLoopInformation* info = block->GetLoopInformation();
3208
David Brazdil46e2a392015-03-16 17:31:52 +00003209 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00003210 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3211 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3212 return;
3213 }
3214 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3215 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3216 }
3217 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003218 __ B(codegen_->GetLabelOf(successor));
3219 }
3220}
3221
David Brazdilfc6a86a2015-06-26 10:33:45 +00003222void LocationsBuilderARM64::VisitGoto(HGoto* got) {
3223 got->SetLocations(nullptr);
3224}
3225
3226void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
3227 HandleGoto(got, got->GetSuccessor());
3228}
3229
3230void LocationsBuilderARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3231 try_boundary->SetLocations(nullptr);
3232}
3233
3234void InstructionCodeGeneratorARM64::VisitTryBoundary(HTryBoundary* try_boundary) {
3235 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3236 if (!successor->IsExitBlock()) {
3237 HandleGoto(try_boundary, successor);
3238 }
3239}
3240
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003241void InstructionCodeGeneratorARM64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003242 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01003243 vixl::aarch64::Label* true_target,
3244 vixl::aarch64::Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003245 // FP branching requires both targets to be explicit. If either of the targets
3246 // is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003247 vixl::aarch64::Label fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003248 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003249
David Brazdil0debae72015-11-12 18:37:00 +00003250 if (true_target == nullptr && false_target == nullptr) {
3251 // Nothing to do. The code always falls through.
3252 return;
3253 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003254 // Constant condition, statically compared against "true" (integer value 1).
3255 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003256 if (true_target != nullptr) {
3257 __ B(true_target);
Serban Constantinescu02164b32014-11-13 14:05:07 +00003258 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003259 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003260 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003261 if (false_target != nullptr) {
3262 __ B(false_target);
3263 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00003264 }
David Brazdil0debae72015-11-12 18:37:00 +00003265 return;
3266 }
3267
3268 // The following code generates these patterns:
3269 // (1) true_target == nullptr && false_target != nullptr
3270 // - opposite condition true => branch to false_target
3271 // (2) true_target != nullptr && false_target == nullptr
3272 // - condition true => branch to true_target
3273 // (3) true_target != nullptr && false_target != nullptr
3274 // - condition true => branch to true_target
3275 // - branch to false_target
3276 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003277 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003278 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexandre Rames5319def2014-10-23 10:03:10 +01003279 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003280 if (true_target == nullptr) {
3281 __ Cbz(InputRegisterAt(instruction, condition_input_index), false_target);
3282 } else {
3283 __ Cbnz(InputRegisterAt(instruction, condition_input_index), true_target);
3284 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003285 } else {
3286 // The condition instruction has not been materialized, use its inputs as
3287 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003288 HCondition* condition = cond->AsCondition();
Roland Levillain7f63c522015-07-13 15:54:55 +00003289
David Brazdil0debae72015-11-12 18:37:00 +00003290 Primitive::Type type = condition->InputAt(0)->GetType();
Roland Levillain7f63c522015-07-13 15:54:55 +00003291 if (Primitive::IsFloatingPointType(type)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003292 GenerateFcmp(condition);
David Brazdil0debae72015-11-12 18:37:00 +00003293 if (true_target == nullptr) {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003294 IfCondition opposite_condition = condition->GetOppositeCondition();
3295 __ B(ARM64FPCondition(opposite_condition, condition->IsGtBias()), false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003296 } else {
Vladimir Markod6e069b2016-01-18 11:11:01 +00003297 __ B(ARM64FPCondition(condition->GetCondition(), condition->IsGtBias()), true_target);
David Brazdil0debae72015-11-12 18:37:00 +00003298 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003299 } else {
Roland Levillain7f63c522015-07-13 15:54:55 +00003300 // Integer cases.
3301 Register lhs = InputRegisterAt(condition, 0);
3302 Operand rhs = InputOperandAt(condition, 1);
David Brazdil0debae72015-11-12 18:37:00 +00003303
3304 Condition arm64_cond;
Scott Wakeling97c72b72016-06-24 16:19:36 +01003305 vixl::aarch64::Label* non_fallthrough_target;
David Brazdil0debae72015-11-12 18:37:00 +00003306 if (true_target == nullptr) {
3307 arm64_cond = ARM64Condition(condition->GetOppositeCondition());
3308 non_fallthrough_target = false_target;
3309 } else {
3310 arm64_cond = ARM64Condition(condition->GetCondition());
3311 non_fallthrough_target = true_target;
3312 }
3313
Aart Bik086d27e2016-01-20 17:02:00 -08003314 if ((arm64_cond == eq || arm64_cond == ne || arm64_cond == lt || arm64_cond == ge) &&
Scott Wakeling97c72b72016-06-24 16:19:36 +01003315 rhs.IsImmediate() && (rhs.GetImmediate() == 0)) {
Roland Levillain7f63c522015-07-13 15:54:55 +00003316 switch (arm64_cond) {
3317 case eq:
David Brazdil0debae72015-11-12 18:37:00 +00003318 __ Cbz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003319 break;
3320 case ne:
David Brazdil0debae72015-11-12 18:37:00 +00003321 __ Cbnz(lhs, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003322 break;
3323 case lt:
3324 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003325 __ Tbnz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003326 break;
3327 case ge:
3328 // Test the sign bit and branch accordingly.
David Brazdil0debae72015-11-12 18:37:00 +00003329 __ Tbz(lhs, (lhs.IsX() ? kXRegSize : kWRegSize) - 1, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003330 break;
3331 default:
3332 // Without the `static_cast` the compiler throws an error for
3333 // `-Werror=sign-promo`.
3334 LOG(FATAL) << "Unexpected condition: " << static_cast<int>(arm64_cond);
3335 }
3336 } else {
3337 __ Cmp(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00003338 __ B(arm64_cond, non_fallthrough_target);
Roland Levillain7f63c522015-07-13 15:54:55 +00003339 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003340 }
3341 }
David Brazdil0debae72015-11-12 18:37:00 +00003342
3343 // If neither branch falls through (case 3), the conditional branch to `true_target`
3344 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3345 if (true_target != nullptr && false_target != nullptr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01003346 __ B(false_target);
3347 }
David Brazdil0debae72015-11-12 18:37:00 +00003348
3349 if (fallthrough_target.IsLinked()) {
3350 __ Bind(&fallthrough_target);
3351 }
Alexandre Rames5319def2014-10-23 10:03:10 +01003352}
3353
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003354void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
3355 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003356 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003357 locations->SetInAt(0, Location::RequiresRegister());
3358 }
3359}
3360
3361void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003362 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3363 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Scott Wakeling97c72b72016-06-24 16:19:36 +01003364 vixl::aarch64::Label* true_target = codegen_->GetLabelOf(true_successor);
3365 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor)) {
3366 true_target = nullptr;
3367 }
3368 vixl::aarch64::Label* false_target = codegen_->GetLabelOf(false_successor);
3369 if (codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor)) {
3370 false_target = nullptr;
3371 }
David Brazdil0debae72015-11-12 18:37:00 +00003372 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003373}
3374
3375void LocationsBuilderARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
3376 LocationSummary* locations = new (GetGraph()->GetArena())
3377 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01003378 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00003379 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003380 locations->SetInAt(0, Location::RequiresRegister());
3381 }
3382}
3383
3384void InstructionCodeGeneratorARM64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003385 SlowPathCodeARM64* slow_path =
3386 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathARM64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003387 GenerateTestAndBranch(deoptimize,
3388 /* condition_input_index */ 0,
3389 slow_path->GetEntryLabel(),
3390 /* false_target */ nullptr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07003391}
3392
Mingyao Yang063fc772016-08-02 11:02:54 -07003393void LocationsBuilderARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3394 LocationSummary* locations = new (GetGraph()->GetArena())
3395 LocationSummary(flag, LocationSummary::kNoCall);
3396 locations->SetOut(Location::RequiresRegister());
3397}
3398
3399void InstructionCodeGeneratorARM64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3400 __ Ldr(OutputRegister(flag),
3401 MemOperand(sp, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
3402}
3403
David Brazdilc0b601b2016-02-08 14:20:45 +00003404static inline bool IsConditionOnFloatingPointValues(HInstruction* condition) {
3405 return condition->IsCondition() &&
3406 Primitive::IsFloatingPointType(condition->InputAt(0)->GetType());
3407}
3408
Alexandre Rames880f1192016-06-13 16:04:50 +01003409static inline Condition GetConditionForSelect(HCondition* condition) {
3410 IfCondition cond = condition->AsCondition()->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003411 return IsConditionOnFloatingPointValues(condition) ? ARM64FPCondition(cond, condition->IsGtBias())
3412 : ARM64Condition(cond);
3413}
3414
David Brazdil74eb1b22015-12-14 11:44:01 +00003415void LocationsBuilderARM64::VisitSelect(HSelect* select) {
3416 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexandre Rames880f1192016-06-13 16:04:50 +01003417 if (Primitive::IsFloatingPointType(select->GetType())) {
3418 locations->SetInAt(0, Location::RequiresFpuRegister());
3419 locations->SetInAt(1, Location::RequiresFpuRegister());
3420 locations->SetOut(Location::RequiresFpuRegister());
3421 } else {
3422 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
3423 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
3424 bool is_true_value_constant = cst_true_value != nullptr;
3425 bool is_false_value_constant = cst_false_value != nullptr;
3426 // Ask VIXL whether we should synthesize constants in registers.
3427 // We give an arbitrary register to VIXL when dealing with non-constant inputs.
3428 Operand true_op = is_true_value_constant ?
3429 Operand(Int64FromConstant(cst_true_value)) : Operand(x1);
3430 Operand false_op = is_false_value_constant ?
3431 Operand(Int64FromConstant(cst_false_value)) : Operand(x2);
3432 bool true_value_in_register = false;
3433 bool false_value_in_register = false;
3434 MacroAssembler::GetCselSynthesisInformation(
3435 x0, true_op, false_op, &true_value_in_register, &false_value_in_register);
3436 true_value_in_register |= !is_true_value_constant;
3437 false_value_in_register |= !is_false_value_constant;
3438
3439 locations->SetInAt(1, true_value_in_register ? Location::RequiresRegister()
3440 : Location::ConstantLocation(cst_true_value));
3441 locations->SetInAt(0, false_value_in_register ? Location::RequiresRegister()
3442 : Location::ConstantLocation(cst_false_value));
3443 locations->SetOut(Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00003444 }
Alexandre Rames880f1192016-06-13 16:04:50 +01003445
David Brazdil74eb1b22015-12-14 11:44:01 +00003446 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3447 locations->SetInAt(2, Location::RequiresRegister());
3448 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003449}
3450
3451void InstructionCodeGeneratorARM64::VisitSelect(HSelect* select) {
David Brazdilc0b601b2016-02-08 14:20:45 +00003452 HInstruction* cond = select->GetCondition();
David Brazdilc0b601b2016-02-08 14:20:45 +00003453 Condition csel_cond;
3454
3455 if (IsBooleanValueOrMaterializedCondition(cond)) {
3456 if (cond->IsCondition() && cond->GetNext() == select) {
Alexandre Rames880f1192016-06-13 16:04:50 +01003457 // Use the condition flags set by the previous instruction.
3458 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003459 } else {
3460 __ Cmp(InputRegisterAt(select, 2), 0);
Alexandre Rames880f1192016-06-13 16:04:50 +01003461 csel_cond = ne;
David Brazdilc0b601b2016-02-08 14:20:45 +00003462 }
3463 } else if (IsConditionOnFloatingPointValues(cond)) {
Roland Levillain1a653882016-03-18 18:05:57 +00003464 GenerateFcmp(cond);
Alexandre Rames880f1192016-06-13 16:04:50 +01003465 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003466 } else {
3467 __ Cmp(InputRegisterAt(cond, 0), InputOperandAt(cond, 1));
Alexandre Rames880f1192016-06-13 16:04:50 +01003468 csel_cond = GetConditionForSelect(cond->AsCondition());
David Brazdilc0b601b2016-02-08 14:20:45 +00003469 }
3470
Alexandre Rames880f1192016-06-13 16:04:50 +01003471 if (Primitive::IsFloatingPointType(select->GetType())) {
3472 __ Fcsel(OutputFPRegister(select),
3473 InputFPRegisterAt(select, 1),
3474 InputFPRegisterAt(select, 0),
3475 csel_cond);
3476 } else {
3477 __ Csel(OutputRegister(select),
3478 InputOperandAt(select, 1),
3479 InputOperandAt(select, 0),
3480 csel_cond);
David Brazdilc0b601b2016-02-08 14:20:45 +00003481 }
David Brazdil74eb1b22015-12-14 11:44:01 +00003482}
3483
David Srbecky0cf44932015-12-09 14:09:59 +00003484void LocationsBuilderARM64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3485 new (GetGraph()->GetArena()) LocationSummary(info);
3486}
3487
David Srbeckyd28f4a02016-03-14 17:14:24 +00003488void InstructionCodeGeneratorARM64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3489 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003490}
3491
3492void CodeGeneratorARM64::GenerateNop() {
3493 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003494}
3495
Alexandre Rames5319def2014-10-23 10:03:10 +01003496void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003497 HandleFieldGet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003498}
3499
3500void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003501 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames5319def2014-10-23 10:03:10 +01003502}
3503
3504void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01003505 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01003506}
3507
3508void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01003509 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01003510}
3511
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003512// Temp is used for read barrier.
3513static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3514 if (kEmitCompilerReadBarrier &&
Roland Levillain44015862016-01-22 11:47:17 +00003515 (kUseBakerReadBarrier ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003516 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3517 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3518 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3519 return 1;
3520 }
3521 return 0;
3522}
3523
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003524// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003525// interface pointer, one for loading the current interface.
3526// The other checks have one temp for loading the object's class.
3527static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3528 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
3529 return 3;
3530 }
3531 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain44015862016-01-22 11:47:17 +00003532}
3533
Alexandre Rames67555f72014-11-18 10:55:16 +00003534void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003535 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003536 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01003537 bool baker_read_barrier_slow_path = false;
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003538 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003539 case TypeCheckKind::kExactCheck:
3540 case TypeCheckKind::kAbstractClassCheck:
3541 case TypeCheckKind::kClassHierarchyCheck:
3542 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003543 call_kind =
3544 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01003545 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003546 break;
3547 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003548 case TypeCheckKind::kUnresolvedCheck:
3549 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003550 call_kind = LocationSummary::kCallOnSlowPath;
3551 break;
3552 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003553
Alexandre Rames67555f72014-11-18 10:55:16 +00003554 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01003555 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003556 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01003557 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003558 locations->SetInAt(0, Location::RequiresRegister());
3559 locations->SetInAt(1, Location::RequiresRegister());
3560 // The "out" register is used as a temporary, so it overlaps with the inputs.
3561 // Note that TypeCheckSlowPathARM64 uses this register too.
3562 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003563 // Add temps if necessary for read barriers.
3564 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexandre Rames67555f72014-11-18 10:55:16 +00003565}
3566
3567void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003568 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexandre Rames67555f72014-11-18 10:55:16 +00003569 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003570 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003571 Register obj = InputRegisterAt(instruction, 0);
3572 Register cls = InputRegisterAt(instruction, 1);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003573 Location out_loc = locations->Out();
Alexandre Rames67555f72014-11-18 10:55:16 +00003574 Register out = OutputRegister(instruction);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003575 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
3576 DCHECK_LE(num_temps, 1u);
3577 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003578 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3579 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3580 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3581 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexandre Rames67555f72014-11-18 10:55:16 +00003582
Scott Wakeling97c72b72016-06-24 16:19:36 +01003583 vixl::aarch64::Label done, zero;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003584 SlowPathCodeARM64* slow_path = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00003585
3586 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003587 // Avoid null check if we know `obj` is not null.
3588 if (instruction->MustDoNullCheck()) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003589 __ Cbz(obj, &zero);
3590 }
3591
Roland Levillain44015862016-01-22 11:47:17 +00003592 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003593 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003594 // /* HeapReference<Class> */ out = obj->klass_
3595 GenerateReferenceLoadTwoRegisters(instruction,
3596 out_loc,
3597 obj_loc,
3598 class_offset,
3599 maybe_temp_loc,
3600 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003601 __ Cmp(out, cls);
3602 __ Cset(out, eq);
3603 if (zero.IsLinked()) {
3604 __ B(&done);
3605 }
3606 break;
3607 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003608
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003609 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003610 // /* HeapReference<Class> */ out = obj->klass_
3611 GenerateReferenceLoadTwoRegisters(instruction,
3612 out_loc,
3613 obj_loc,
3614 class_offset,
3615 maybe_temp_loc,
3616 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003617 // If the class is abstract, we eagerly fetch the super class of the
3618 // object to avoid doing a comparison we know will fail.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003619 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003620 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003621 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003622 GenerateReferenceLoadOneRegister(instruction,
3623 out_loc,
3624 super_offset,
3625 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003626 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003627 // If `out` is null, we use it for the result, and jump to `done`.
3628 __ Cbz(out, &done);
3629 __ Cmp(out, cls);
3630 __ B(ne, &loop);
3631 __ Mov(out, 1);
3632 if (zero.IsLinked()) {
3633 __ B(&done);
3634 }
3635 break;
3636 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003637
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003638 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003639 // /* HeapReference<Class> */ out = obj->klass_
3640 GenerateReferenceLoadTwoRegisters(instruction,
3641 out_loc,
3642 obj_loc,
3643 class_offset,
3644 maybe_temp_loc,
3645 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003646 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003647 vixl::aarch64::Label loop, success;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003648 __ Bind(&loop);
3649 __ Cmp(out, cls);
3650 __ B(eq, &success);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003651 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003652 GenerateReferenceLoadOneRegister(instruction,
3653 out_loc,
3654 super_offset,
3655 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003656 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003657 __ Cbnz(out, &loop);
3658 // If `out` is null, we use it for the result, and jump to `done`.
3659 __ B(&done);
3660 __ Bind(&success);
3661 __ Mov(out, 1);
3662 if (zero.IsLinked()) {
3663 __ B(&done);
3664 }
3665 break;
3666 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003667
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003668 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003669 // /* HeapReference<Class> */ out = obj->klass_
3670 GenerateReferenceLoadTwoRegisters(instruction,
3671 out_loc,
3672 obj_loc,
3673 class_offset,
3674 maybe_temp_loc,
3675 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003676 // Do an exact check.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003677 vixl::aarch64::Label exact_check;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003678 __ Cmp(out, cls);
3679 __ B(eq, &exact_check);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003680 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003681 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003682 GenerateReferenceLoadOneRegister(instruction,
3683 out_loc,
3684 component_offset,
3685 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003686 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003687 // If `out` is null, we use it for the result, and jump to `done`.
3688 __ Cbz(out, &done);
3689 __ Ldrh(out, HeapOperand(out, primitive_offset));
3690 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3691 __ Cbnz(out, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003692 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003693 __ Mov(out, 1);
3694 __ B(&done);
3695 break;
3696 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003697
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003698 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08003699 // No read barrier since the slow path will retry upon failure.
3700 // /* HeapReference<Class> */ out = obj->klass_
3701 GenerateReferenceLoadTwoRegisters(instruction,
3702 out_loc,
3703 obj_loc,
3704 class_offset,
3705 maybe_temp_loc,
3706 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003707 __ Cmp(out, cls);
3708 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003709 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3710 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003711 codegen_->AddSlowPath(slow_path);
3712 __ B(ne, slow_path->GetEntryLabel());
3713 __ Mov(out, 1);
3714 if (zero.IsLinked()) {
3715 __ B(&done);
3716 }
3717 break;
3718 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003719
Calin Juravle98893e12015-10-02 21:05:03 +01003720 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003721 case TypeCheckKind::kInterfaceCheck: {
3722 // Note that we indeed only call on slow path, but we always go
3723 // into the slow path for the unresolved and interface check
3724 // cases.
3725 //
3726 // We cannot directly call the InstanceofNonTrivial runtime
3727 // entry point without resorting to a type checking slow path
3728 // here (i.e. by calling InvokeRuntime directly), as it would
3729 // require to assign fixed registers for the inputs of this
3730 // HInstanceOf instruction (following the runtime calling
3731 // convention), which might be cluttered by the potential first
3732 // read barrier emission at the beginning of this method.
Roland Levillain44015862016-01-22 11:47:17 +00003733 //
3734 // TODO: Introduce a new runtime entry point taking the object
3735 // to test (instead of its class) as argument, and let it deal
3736 // with the read barrier issues. This will let us refactor this
3737 // case of the `switch` code as it was previously (with a direct
3738 // call to the runtime not using a type checking slow path).
3739 // This should also be beneficial for the other cases above.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003740 DCHECK(locations->OnlyCallsOnSlowPath());
3741 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3742 /* is_fatal */ false);
3743 codegen_->AddSlowPath(slow_path);
3744 __ B(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003745 if (zero.IsLinked()) {
3746 __ B(&done);
3747 }
3748 break;
3749 }
3750 }
3751
3752 if (zero.IsLinked()) {
3753 __ Bind(&zero);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003754 __ Mov(out, 0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003755 }
3756
3757 if (done.IsLinked()) {
3758 __ Bind(&done);
3759 }
3760
3761 if (slow_path != nullptr) {
3762 __ Bind(slow_path->GetExitLabel());
3763 }
3764}
3765
3766void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
3767 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3768 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3769
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003770 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3771 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003772 case TypeCheckKind::kExactCheck:
3773 case TypeCheckKind::kAbstractClassCheck:
3774 case TypeCheckKind::kClassHierarchyCheck:
3775 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003776 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
3777 LocationSummary::kCallOnSlowPath :
3778 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003779 break;
3780 case TypeCheckKind::kArrayCheck:
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003781 case TypeCheckKind::kUnresolvedCheck:
3782 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003783 call_kind = LocationSummary::kCallOnSlowPath;
3784 break;
3785 }
3786
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003787 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3788 locations->SetInAt(0, Location::RequiresRegister());
3789 locations->SetInAt(1, Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003790 // Add temps for read barriers and other uses. One is used by TypeCheckSlowPathARM64.
3791 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003792}
3793
3794void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain44015862016-01-22 11:47:17 +00003795 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003796 LocationSummary* locations = instruction->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003797 Location obj_loc = locations->InAt(0);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003798 Register obj = InputRegisterAt(instruction, 0);
3799 Register cls = InputRegisterAt(instruction, 1);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003800 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3801 DCHECK_GE(num_temps, 1u);
3802 DCHECK_LE(num_temps, 3u);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003803 Location temp_loc = locations->GetTemp(0);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003804 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
3805 Location maybe_temp3_loc = (num_temps >= 3) ? locations->GetTemp(2) : Location::NoLocation();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003806 Register temp = WRegisterFrom(temp_loc);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003807 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3808 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3809 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3810 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3811 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3812 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3813 const uint32_t object_array_data_offset =
3814 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003815
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003816 bool is_type_check_slow_path_fatal = false;
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003817 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
3818 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
3819 // read barriers is done for performance and code size reasons.
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003820 if (!kEmitCompilerReadBarrier) {
3821 is_type_check_slow_path_fatal =
3822 (type_check_kind == TypeCheckKind::kExactCheck ||
3823 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3824 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3825 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3826 !instruction->CanThrowIntoCatchBlock();
3827 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003828 SlowPathCodeARM64* type_check_slow_path =
3829 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(instruction,
3830 is_type_check_slow_path_fatal);
3831 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003832
Scott Wakeling97c72b72016-06-24 16:19:36 +01003833 vixl::aarch64::Label done;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003834 // Avoid null check if we know obj is not null.
3835 if (instruction->MustDoNullCheck()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01003836 __ Cbz(obj, &done);
3837 }
Alexandre Rames67555f72014-11-18 10:55:16 +00003838
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003839 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003840 case TypeCheckKind::kExactCheck:
3841 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003842 // /* HeapReference<Class> */ temp = obj->klass_
3843 GenerateReferenceLoadTwoRegisters(instruction,
3844 temp_loc,
3845 obj_loc,
3846 class_offset,
3847 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003848 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003849
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003850 __ Cmp(temp, cls);
3851 // Jump to slow path for throwing the exception or doing a
3852 // more involved array check.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003853 __ B(ne, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003854 break;
3855 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003856
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003857 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003858 // /* HeapReference<Class> */ temp = obj->klass_
3859 GenerateReferenceLoadTwoRegisters(instruction,
3860 temp_loc,
3861 obj_loc,
3862 class_offset,
3863 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003864 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003865
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003866 // If the class is abstract, we eagerly fetch the super class of the
3867 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003868 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003869 __ Bind(&loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003870 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003871 GenerateReferenceLoadOneRegister(instruction,
3872 temp_loc,
3873 super_offset,
3874 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003875 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003876
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003877 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3878 // exception.
3879 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3880 // Otherwise, compare classes.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003881 __ Cmp(temp, cls);
3882 __ B(ne, &loop);
3883 break;
3884 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003885
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003886 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003887 // /* HeapReference<Class> */ temp = obj->klass_
3888 GenerateReferenceLoadTwoRegisters(instruction,
3889 temp_loc,
3890 obj_loc,
3891 class_offset,
3892 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003893 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003894
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003895 // Walk over the class hierarchy to find a match.
Scott Wakeling97c72b72016-06-24 16:19:36 +01003896 vixl::aarch64::Label loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003897 __ Bind(&loop);
3898 __ Cmp(temp, cls);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003899 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003900
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003901 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003902 GenerateReferenceLoadOneRegister(instruction,
3903 temp_loc,
3904 super_offset,
3905 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003906 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003907
3908 // If the class reference currently in `temp` is not null, jump
3909 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003910 __ Cbnz(temp, &loop);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003911 // Otherwise, jump to the slow path to throw the exception.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003912 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003913 break;
3914 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003915
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003916 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003917 // /* HeapReference<Class> */ temp = obj->klass_
3918 GenerateReferenceLoadTwoRegisters(instruction,
3919 temp_loc,
3920 obj_loc,
3921 class_offset,
3922 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003923 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003924
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01003925 // Do an exact check.
3926 __ Cmp(temp, cls);
3927 __ B(eq, &done);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003928
3929 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003930 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08003931 GenerateReferenceLoadOneRegister(instruction,
3932 temp_loc,
3933 component_offset,
3934 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003935 kWithoutReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003936
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003937 // If the component type is null, jump to the slow path to throw the exception.
3938 __ Cbz(temp, type_check_slow_path->GetEntryLabel());
3939 // Otherwise, the object is indeed an array. Further check that this component type is not a
3940 // primitive type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003941 __ Ldrh(temp, HeapOperand(temp, primitive_offset));
3942 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08003943 __ Cbnz(temp, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003944 break;
3945 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003946
Calin Juravle98893e12015-10-02 21:05:03 +01003947 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003948 // We always go into the type check slow path for the unresolved check cases.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003949 //
3950 // We cannot directly call the CheckCast runtime entry point
3951 // without resorting to a type checking slow path here (i.e. by
3952 // calling InvokeRuntime directly), as it would require to
3953 // assign fixed registers for the inputs of this HInstanceOf
3954 // instruction (following the runtime calling convention), which
3955 // might be cluttered by the potential first read barrier
3956 // emission at the beginning of this method.
3957 __ B(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003958 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003959 case TypeCheckKind::kInterfaceCheck: {
3960 // /* HeapReference<Class> */ temp = obj->klass_
3961 GenerateReferenceLoadTwoRegisters(instruction,
3962 temp_loc,
3963 obj_loc,
3964 class_offset,
3965 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003966 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003967
3968 // /* HeapReference<Class> */ temp = temp->iftable_
3969 GenerateReferenceLoadTwoRegisters(instruction,
3970 temp_loc,
3971 temp_loc,
3972 iftable_offset,
3973 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08003974 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08003975 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003976 __ Ldr(WRegisterFrom(maybe_temp2_loc), HeapOperand(temp.W(), array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08003977 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003978 vixl::aarch64::Label start_loop;
3979 __ Bind(&start_loop);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08003980 __ Cbz(WRegisterFrom(maybe_temp2_loc), type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003981 __ Ldr(WRegisterFrom(maybe_temp3_loc), HeapOperand(temp.W(), object_array_data_offset));
3982 GetAssembler()->MaybeUnpoisonHeapReference(WRegisterFrom(maybe_temp3_loc));
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003983 // Go to next interface.
3984 __ Add(temp, temp, 2 * kHeapReferenceSize);
3985 __ Sub(WRegisterFrom(maybe_temp2_loc), WRegisterFrom(maybe_temp2_loc), 2);
Mathieu Chartierafbcdaf2016-11-14 10:50:29 -08003986 // Compare the classes and continue the loop if they do not match.
3987 __ Cmp(cls, WRegisterFrom(maybe_temp3_loc));
3988 __ B(ne, &start_loop);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07003989 break;
3990 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003991 }
Nicolas Geoffray75374372015-09-17 17:12:19 +00003992 __ Bind(&done);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003993
Roland Levillain22ccc3a2015-11-24 13:10:05 +00003994 __ Bind(type_check_slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00003995}
3996
Alexandre Rames5319def2014-10-23 10:03:10 +01003997void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
3998 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3999 locations->SetOut(Location::ConstantLocation(constant));
4000}
4001
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004002void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004003 // Will be generated at use site.
4004}
4005
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004006void LocationsBuilderARM64::VisitNullConstant(HNullConstant* constant) {
4007 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4008 locations->SetOut(Location::ConstantLocation(constant));
4009}
4010
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004011void InstructionCodeGeneratorARM64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004012 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00004013}
4014
Calin Juravle175dc732015-08-25 15:42:32 +01004015void LocationsBuilderARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4016 // The trampoline uses the same calling convention as dex calling conventions,
4017 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
4018 // the method_idx.
4019 HandleInvoke(invoke);
4020}
4021
4022void InstructionCodeGeneratorARM64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4023 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
4024}
4025
Alexandre Rames5319def2014-10-23 10:03:10 +01004026void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01004027 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01004028 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Alexandre Rames5319def2014-10-23 10:03:10 +01004029}
4030
Alexandre Rames67555f72014-11-18 10:55:16 +00004031void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4032 HandleInvoke(invoke);
4033}
4034
4035void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
4036 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004037 LocationSummary* locations = invoke->GetLocations();
4038 Register temp = XRegisterFrom(locations->GetTemp(0));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004039 Location receiver = locations->InAt(0);
Alexandre Rames67555f72014-11-18 10:55:16 +00004040 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004041 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00004042
4043 // The register ip1 is required to be used for the hidden argument in
4044 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
Alexandre Ramesd921d642015-04-16 15:07:16 +01004045 MacroAssembler* masm = GetVIXLAssembler();
4046 UseScratchRegisterScope scratch_scope(masm);
Alexandre Rames67555f72014-11-18 10:55:16 +00004047 scratch_scope.Exclude(ip1);
4048 __ Mov(ip1, invoke->GetDexMethodIndex());
4049
Artem Serov914d7a82017-02-07 14:33:49 +00004050 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
Alexandre Rames67555f72014-11-18 10:55:16 +00004051 if (receiver.IsStackSlot()) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07004052 __ Ldr(temp.W(), StackOperandFrom(receiver));
Artem Serov914d7a82017-02-07 14:33:49 +00004053 {
4054 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4055 // /* HeapReference<Class> */ temp = temp->klass_
4056 __ Ldr(temp.W(), HeapOperand(temp.W(), class_offset));
4057 codegen_->MaybeRecordImplicitNullCheck(invoke);
4058 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004059 } else {
Artem Serov914d7a82017-02-07 14:33:49 +00004060 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004061 // /* HeapReference<Class> */ temp = receiver->klass_
Mathieu Chartiere401d142015-04-22 13:56:20 -07004062 __ Ldr(temp.W(), HeapOperandFrom(receiver, class_offset));
Artem Serov914d7a82017-02-07 14:33:49 +00004063 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexandre Rames67555f72014-11-18 10:55:16 +00004064 }
Artem Serov914d7a82017-02-07 14:33:49 +00004065
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004066 // Instead of simply (possibly) unpoisoning `temp` here, we should
4067 // emit a read barrier for the previous class reference load.
4068 // However this is not required in practice, as this is an
4069 // intermediate/temporary reference and because the current
4070 // concurrent copying collector keeps the from-space memory
4071 // intact/accessible until the end of the marking phase (the
4072 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01004073 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004074 __ Ldr(temp,
4075 MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
4076 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004077 invoke->GetImtIndex(), kArm64PointerSize));
Alexandre Rames67555f72014-11-18 10:55:16 +00004078 // temp = temp->GetImtEntryAt(method_offset);
Mathieu Chartiere401d142015-04-22 13:56:20 -07004079 __ Ldr(temp, MemOperand(temp, method_offset));
Alexandre Rames67555f72014-11-18 10:55:16 +00004080 // lr = temp->GetEntryPoint();
Mathieu Chartiere401d142015-04-22 13:56:20 -07004081 __ Ldr(lr, MemOperand(temp, entry_point.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004082
4083 {
4084 // Ensure the pc position is recorded immediately after the `blr` instruction.
4085 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4086
4087 // lr();
4088 __ blr(lr);
4089 DCHECK(!codegen_->IsLeafMethod());
4090 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4091 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004092}
4093
4094void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Nicolas Geoffray331605a2017-03-01 11:01:41 +00004095 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004096 if (intrinsic.TryDispatch(invoke)) {
4097 return;
4098 }
4099
Alexandre Rames67555f72014-11-18 10:55:16 +00004100 HandleInvoke(invoke);
4101}
4102
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00004103void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004104 // Explicit clinit checks triggered by static invokes must have been pruned by
4105 // art::PrepareForRegisterAllocation.
4106 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004107
Nicolas Geoffray331605a2017-03-01 11:01:41 +00004108 IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena(), codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08004109 if (intrinsic.TryDispatch(invoke)) {
4110 return;
4111 }
4112
Alexandre Rames67555f72014-11-18 10:55:16 +00004113 HandleInvoke(invoke);
4114}
4115
Andreas Gampe878d58c2015-01-15 23:24:00 -08004116static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorARM64* codegen) {
4117 if (invoke->GetLocations()->Intrinsified()) {
4118 IntrinsicCodeGeneratorARM64 intrinsic(codegen);
4119 intrinsic.Dispatch(invoke);
4120 return true;
4121 }
4122 return false;
4123}
4124
Vladimir Markodc151b22015-10-15 18:02:30 +01004125HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
4126 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004127 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Roland Levillain44015862016-01-22 11:47:17 +00004128 // On ARM64 we support all dispatch types.
Vladimir Markodc151b22015-10-15 18:02:30 +01004129 return desired_dispatch_info;
4130}
4131
TatWai Chongd8c052a2016-11-02 16:12:48 +08004132Location CodeGeneratorARM64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
4133 Location temp) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004134 // Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004135 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4136 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004137 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
4138 uint32_t offset =
4139 GetThreadOffset<kArm64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004140 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004141 __ Ldr(XRegisterFrom(temp), MemOperand(tr, offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004142 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004143 }
Vladimir Marko58155012015-08-19 12:49:41 +00004144 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004145 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004146 break;
4147 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
4148 // Load method address from literal pool.
Alexandre Rames6dc01742015-11-12 14:44:19 +00004149 __ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00004150 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004151 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
4152 // Add ADRP with its PC-relative DexCache access patch.
Nicolas Geoffray5d37c152017-01-12 13:25:19 +00004153 const DexFile& dex_file = invoke->GetDexFileForPcRelativeDexCache();
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004154 uint32_t element_offset = invoke->GetDexCacheArrayOffset();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004155 vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004156 EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004157 // Add LDR with its PC-relative DexCache access patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004158 vixl::aarch64::Label* ldr_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004159 NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004160 EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp));
Vladimir Marko58155012015-08-19 12:49:41 +00004161 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004162 }
Vladimir Marko58155012015-08-19 12:49:41 +00004163 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00004164 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004165 Register reg = XRegisterFrom(temp);
4166 Register method_reg;
4167 if (current_method.IsRegister()) {
4168 method_reg = XRegisterFrom(current_method);
4169 } else {
4170 DCHECK(invoke->GetLocations()->Intrinsified());
4171 DCHECK(!current_method.IsValid());
4172 method_reg = reg;
4173 __ Ldr(reg.X(), MemOperand(sp, kCurrentMethodStackOffset));
4174 }
Vladimir Markob2c431e2015-08-19 12:45:42 +00004175
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004176 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01004177 __ Ldr(reg.X(),
4178 MemOperand(method_reg.X(),
Andreas Gampe542451c2016-07-26 09:02:02 -07004179 ArtMethod::DexCacheResolvedMethodsOffset(kArm64PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00004180 // temp = temp[index_in_cache];
Vladimir Marko40ecb122016-04-06 17:33:41 +01004181 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
4182 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00004183 __ Ldr(reg.X(), MemOperand(reg.X(), GetCachePointerOffset(index_in_cache)));
4184 break;
4185 }
4186 }
TatWai Chongd8c052a2016-11-02 16:12:48 +08004187 return callee_method;
4188}
4189
4190void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
4191 // All registers are assumed to be correctly set up.
4192 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00004193
4194 switch (invoke->GetCodePtrLocation()) {
4195 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
4196 __ Bl(&frame_entry_label_);
4197 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004198 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4199 // LR = callee_method->entry_point_from_quick_compiled_code_;
4200 __ Ldr(lr, MemOperand(
Alexandre Rames6dc01742015-11-12 14:44:19 +00004201 XRegisterFrom(callee_method),
Andreas Gampe542451c2016-07-26 09:02:02 -07004202 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize).Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004203 {
4204 // To ensure that the pc position is recorded immediately after the `blr` instruction
4205 // BLR must be the last instruction emitted in this function.
4206 // Recording the pc will occur right after returning from this function.
4207 ExactAssemblyScope eas(GetVIXLAssembler(),
4208 kInstructionSize,
4209 CodeBufferCheckScope::kExactSize);
4210 // lr()
4211 __ blr(lr);
4212 }
Vladimir Marko58155012015-08-19 12:49:41 +00004213 break;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00004214 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004215
Andreas Gampe878d58c2015-01-15 23:24:00 -08004216 DCHECK(!IsLeafMethod());
4217}
4218
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004219void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004220 // Use the calling convention instead of the location of the receiver, as
4221 // intrinsics may have put the receiver in a different register. In the intrinsics
4222 // slow path, the arguments have been moved to the right place, so here we are
4223 // guaranteed that the receiver is the first register of the calling convention.
4224 InvokeDexCallingConvention calling_convention;
4225 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004226 Register temp = XRegisterFrom(temp_in);
4227 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4228 invoke->GetVTableIndex(), kArm64PointerSize).SizeValue();
4229 Offset class_offset = mirror::Object::ClassOffset();
Andreas Gampe542451c2016-07-26 09:02:02 -07004230 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004231
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004232 DCHECK(receiver.IsRegister());
Artem Serov914d7a82017-02-07 14:33:49 +00004233
4234 {
4235 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
4236 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
4237 // /* HeapReference<Class> */ temp = receiver->klass_
4238 __ Ldr(temp.W(), HeapOperandFrom(LocationFrom(receiver), class_offset));
4239 MaybeRecordImplicitNullCheck(invoke);
4240 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004241 // Instead of simply (possibly) unpoisoning `temp` here, we should
4242 // emit a read barrier for the previous class reference load.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004243 // intermediate/temporary reference and because the current
4244 // concurrent copying collector keeps the from-space memory
4245 // intact/accessible until the end of the marking phase (the
4246 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004247 GetAssembler()->MaybeUnpoisonHeapReference(temp.W());
4248 // temp = temp->GetMethodAt(method_offset);
4249 __ Ldr(temp, MemOperand(temp, method_offset));
4250 // lr = temp->GetEntryPoint();
4251 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
Artem Serov914d7a82017-02-07 14:33:49 +00004252 {
4253 // To ensure that the pc position is recorded immediately after the `blr` instruction
4254 // BLR should be the last instruction emitted in this function.
4255 // Recording the pc will occur right after returning from this function.
4256 ExactAssemblyScope eas(GetVIXLAssembler(), kInstructionSize, CodeBufferCheckScope::kExactSize);
4257 // lr();
4258 __ blr(lr);
4259 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004260}
4261
Orion Hodsonac141392017-01-13 11:53:47 +00004262void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4263 HandleInvoke(invoke);
4264}
4265
4266void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4267 codegen_->GenerateInvokePolymorphicCall(invoke);
4268}
4269
Scott Wakeling97c72b72016-06-24 16:19:36 +01004270vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
4271 const DexFile& dex_file,
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004272 dex::StringIndex string_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004273 vixl::aarch64::Label* adrp_label) {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004274 return
4275 NewPcRelativePatch(dex_file, string_index.index_, adrp_label, &pc_relative_string_patches_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004276}
4277
Scott Wakeling97c72b72016-06-24 16:19:36 +01004278vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
4279 const DexFile& dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004280 dex::TypeIndex type_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004281 vixl::aarch64::Label* adrp_label) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08004282 return NewPcRelativePatch(dex_file, type_index.index_, adrp_label, &pc_relative_type_patches_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004283}
4284
Vladimir Marko1998cd02017-01-13 13:02:58 +00004285vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
4286 const DexFile& dex_file,
4287 dex::TypeIndex type_index,
4288 vixl::aarch64::Label* adrp_label) {
4289 return NewPcRelativePatch(dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
4290}
4291
Scott Wakeling97c72b72016-06-24 16:19:36 +01004292vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
4293 const DexFile& dex_file,
4294 uint32_t element_offset,
4295 vixl::aarch64::Label* adrp_label) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004296 return NewPcRelativePatch(dex_file, element_offset, adrp_label, &pc_relative_dex_cache_patches_);
4297}
4298
Scott Wakeling97c72b72016-06-24 16:19:36 +01004299vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativePatch(
4300 const DexFile& dex_file,
4301 uint32_t offset_or_index,
4302 vixl::aarch64::Label* adrp_label,
4303 ArenaDeque<PcRelativePatchInfo>* patches) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004304 // Add a patch entry and return the label.
4305 patches->emplace_back(dex_file, offset_or_index);
4306 PcRelativePatchInfo* info = &patches->back();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004307 vixl::aarch64::Label* label = &info->label;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004308 // If adrp_label is null, this is the ADRP patch and needs to point to its own label.
4309 info->pc_insn_label = (adrp_label != nullptr) ? adrp_label : label;
4310 return label;
4311}
4312
Scott Wakeling97c72b72016-06-24 16:19:36 +01004313vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageStringLiteral(
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004314 const DexFile& dex_file, dex::StringIndex string_index) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004315 return boot_image_string_patches_.GetOrCreate(
4316 StringReference(&dex_file, string_index),
4317 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4318}
4319
Scott Wakeling97c72b72016-06-24 16:19:36 +01004320vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageTypeLiteral(
Andreas Gampea5b09a62016-11-17 15:21:22 -08004321 const DexFile& dex_file, dex::TypeIndex type_index) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004322 return boot_image_type_patches_.GetOrCreate(
4323 TypeReference(&dex_file, type_index),
4324 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4325}
4326
Scott Wakeling97c72b72016-06-24 16:19:36 +01004327vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateBootImageAddressLiteral(
4328 uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00004329 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004330}
4331
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004332vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLiteral(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004333 const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) {
4334 jit_string_roots_.Overwrite(StringReference(&dex_file, string_index),
4335 reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004336 return jit_string_patches_.GetOrCreate(
4337 StringReference(&dex_file, string_index),
4338 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4339}
4340
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004341vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004342 const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
4343 jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
4344 reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004345 return jit_class_patches_.GetOrCreate(
4346 TypeReference(&dex_file, type_index),
4347 [this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
4348}
4349
Vladimir Markoaad75c62016-10-03 08:46:48 +00004350void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label,
4351 vixl::aarch64::Register reg) {
4352 DCHECK(reg.IsX());
4353 SingleEmissionCheckScope guard(GetVIXLAssembler());
4354 __ Bind(fixup_label);
Scott Wakelingb77051e2016-11-21 19:46:00 +00004355 __ adrp(reg, /* offset placeholder */ static_cast<int64_t>(0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004356}
4357
4358void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
4359 vixl::aarch64::Register out,
4360 vixl::aarch64::Register base) {
4361 DCHECK(out.IsX());
4362 DCHECK(base.IsX());
4363 SingleEmissionCheckScope guard(GetVIXLAssembler());
4364 __ Bind(fixup_label);
4365 __ add(out, base, Operand(/* offset placeholder */ 0));
4366}
4367
4368void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label,
4369 vixl::aarch64::Register out,
4370 vixl::aarch64::Register base) {
4371 DCHECK(base.IsX());
4372 SingleEmissionCheckScope guard(GetVIXLAssembler());
4373 __ Bind(fixup_label);
4374 __ ldr(out, MemOperand(base, /* offset placeholder */ 0));
4375}
4376
4377template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
4378inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches(
4379 const ArenaDeque<PcRelativePatchInfo>& infos,
4380 ArenaVector<LinkerPatch>* linker_patches) {
4381 for (const PcRelativePatchInfo& info : infos) {
4382 linker_patches->push_back(Factory(info.label.GetLocation(),
4383 &info.target_dex_file,
4384 info.pc_insn_label->GetLocation(),
4385 info.offset_or_index));
4386 }
4387}
4388
Vladimir Marko58155012015-08-19 12:49:41 +00004389void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
4390 DCHECK(linker_patches->empty());
4391 size_t size =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004392 pc_relative_dex_cache_patches_.size() +
4393 boot_image_string_patches_.size() +
4394 pc_relative_string_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004395 boot_image_type_patches_.size() +
4396 pc_relative_type_patches_.size() +
Richard Uhlerc52f3032017-03-02 13:45:45 +00004397 type_bss_entry_patches_.size();
Vladimir Marko58155012015-08-19 12:49:41 +00004398 linker_patches->reserve(size);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004399 for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01004400 linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
Vladimir Marko58155012015-08-19 12:49:41 +00004401 &info.target_dex_file,
Scott Wakeling97c72b72016-06-24 16:19:36 +01004402 info.pc_insn_label->GetLocation(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004403 info.offset_or_index));
4404 }
4405 for (const auto& entry : boot_image_string_patches_) {
4406 const StringReference& target_string = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01004407 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4408 linker_patches->push_back(LinkerPatch::StringPatch(literal->GetOffset(),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004409 target_string.dex_file,
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004410 target_string.string_index.index_));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004411 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004412 if (!GetCompilerOptions().IsBootImage()) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00004413 DCHECK(pc_relative_type_patches_.empty());
Vladimir Markoaad75c62016-10-03 08:46:48 +00004414 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
4415 linker_patches);
4416 } else {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004417 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
4418 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004419 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
4420 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004421 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004422 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
4423 linker_patches);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004424 for (const auto& entry : boot_image_type_patches_) {
4425 const TypeReference& target_type = entry.first;
Scott Wakeling97c72b72016-06-24 16:19:36 +01004426 vixl::aarch64::Literal<uint32_t>* literal = entry.second;
4427 linker_patches->push_back(LinkerPatch::TypePatch(literal->GetOffset(),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004428 target_type.dex_file,
Andreas Gampea5b09a62016-11-17 15:21:22 -08004429 target_type.type_index.index_));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004430 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004431 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004432}
4433
Scott Wakeling97c72b72016-06-24 16:19:36 +01004434vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004435 Uint32ToLiteralMap* map) {
4436 return map->GetOrCreate(
4437 value,
4438 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(value); });
4439}
4440
Scott Wakeling97c72b72016-06-24 16:19:36 +01004441vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateUint64Literal(uint64_t value) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004442 return uint64_literals_.GetOrCreate(
4443 value,
4444 [this, value]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(value); });
Vladimir Marko58155012015-08-19 12:49:41 +00004445}
4446
Scott Wakeling97c72b72016-06-24 16:19:36 +01004447vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodLiteral(
Vladimir Marko58155012015-08-19 12:49:41 +00004448 MethodReference target_method,
4449 MethodToLiteralMap* map) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004450 return map->GetOrCreate(
4451 target_method,
4452 [this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
Vladimir Marko58155012015-08-19 12:49:41 +00004453}
4454
Andreas Gampe878d58c2015-01-15 23:24:00 -08004455void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004456 // Explicit clinit checks triggered by static invokes must have been pruned by
4457 // art::PrepareForRegisterAllocation.
4458 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01004459
Andreas Gampe878d58c2015-01-15 23:24:00 -08004460 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4461 return;
4462 }
4463
Artem Serov914d7a82017-02-07 14:33:49 +00004464 // Ensure that between the BLR (emitted by GenerateStaticOrDirectCall) and RecordPcInfo there
4465 // are no pools emitted.
4466 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01004467 LocationSummary* locations = invoke->GetLocations();
4468 codegen_->GenerateStaticOrDirectCall(
4469 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +00004470 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Alexandre Rames5319def2014-10-23 10:03:10 +01004471}
4472
4473void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08004474 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
4475 return;
4476 }
4477
Artem Serov914d7a82017-02-07 14:33:49 +00004478 // Ensure that between the BLR (emitted by GenerateVirtualCall) and RecordPcInfo there
4479 // are no pools emitted.
4480 EmissionCheckScope guard(GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004481 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01004482 DCHECK(!codegen_->IsLeafMethod());
4483 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4484}
4485
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004486HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
4487 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004488 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004489 case HLoadClass::LoadKind::kInvalid:
4490 LOG(FATAL) << "UNREACHABLE";
4491 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004492 case HLoadClass::LoadKind::kReferrersClass:
4493 break;
4494 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4495 DCHECK(!GetCompilerOptions().GetCompilePic());
4496 break;
4497 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
4498 DCHECK(GetCompilerOptions().GetCompilePic());
4499 break;
4500 case HLoadClass::LoadKind::kBootImageAddress:
4501 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004502 case HLoadClass::LoadKind::kBssEntry:
4503 DCHECK(!Runtime::Current()->UseJitCompilation());
4504 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004505 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004506 DCHECK(Runtime::Current()->UseJitCompilation());
4507 break;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004508 case HLoadClass::LoadKind::kDexCacheViaMethod:
4509 break;
4510 }
4511 return desired_class_load_kind;
4512}
4513
Alexandre Rames67555f72014-11-18 10:55:16 +00004514void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00004515 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4516 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004517 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00004518 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004519 cls,
4520 LocationFrom(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00004521 LocationFrom(vixl::aarch64::x0));
Vladimir Markoea4c1262017-02-06 19:59:33 +00004522 DCHECK(calling_convention.GetRegisterAt(0).Is(vixl::aarch64::x0));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004523 return;
4524 }
Vladimir Marko41559982017-01-06 14:04:23 +00004525 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004526
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004527 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
4528 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004529 ? LocationSummary::kCallOnSlowPath
4530 : LocationSummary::kNoCall;
4531 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004532 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004533 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004534 }
4535
Vladimir Marko41559982017-01-06 14:04:23 +00004536 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004537 locations->SetInAt(0, Location::RequiresRegister());
4538 }
4539 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004540 if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
4541 if (!kUseReadBarrier || kUseBakerReadBarrier) {
4542 // Rely on the type resolution or initialization and marking to save everything we need.
4543 // Note that IP0 may be clobbered by saving/restoring the live register (only one thanks
4544 // to the custom calling convention) or by marking, so we shall use IP1.
4545 RegisterSet caller_saves = RegisterSet::Empty();
4546 InvokeRuntimeCallingConvention calling_convention;
4547 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
4548 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
4549 RegisterFrom(calling_convention.GetReturnLocation(Primitive::kPrimNot),
4550 Primitive::kPrimNot).GetCode());
4551 locations->SetCustomSlowPathCallerSaves(caller_saves);
4552 } else {
4553 // For non-Baker read barrier we have a temp-clobbering call.
4554 }
4555 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004556}
4557
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004558// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4559// move.
4560void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00004561 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
4562 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
4563 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01004564 return;
4565 }
Vladimir Marko41559982017-01-06 14:04:23 +00004566 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01004567
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004568 Location out_loc = cls->GetLocations()->Out();
Calin Juravle580b6092015-10-06 17:35:58 +01004569 Register out = OutputRegister(cls);
Vladimir Markoea4c1262017-02-06 19:59:33 +00004570 Register bss_entry_temp;
4571 vixl::aarch64::Label* bss_entry_adrp_label = nullptr;
Alexandre Rames67555f72014-11-18 10:55:16 +00004572
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004573 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
4574 ? kWithoutReadBarrier
4575 : kCompilerReadBarrierOption;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004576 bool generate_null_check = false;
Vladimir Marko41559982017-01-06 14:04:23 +00004577 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004578 case HLoadClass::LoadKind::kReferrersClass: {
4579 DCHECK(!cls->CanCallRuntime());
4580 DCHECK(!cls->MustGenerateClinitCheck());
4581 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
4582 Register current_method = InputRegisterAt(cls, 0);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004583 GenerateGcRootFieldLoad(cls,
4584 out_loc,
4585 current_method,
4586 ArtMethod::DeclaringClassOffset().Int32Value(),
Roland Levillain00468f32016-10-27 18:02:48 +01004587 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004588 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004589 break;
4590 }
4591 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004592 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004593 __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
4594 cls->GetTypeIndex()));
4595 break;
4596 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004597 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004598 // Add ADRP with its PC-relative type patch.
4599 const DexFile& dex_file = cls->GetDexFile();
Andreas Gampea5b09a62016-11-17 15:21:22 -08004600 dex::TypeIndex type_index = cls->GetTypeIndex();
Scott Wakeling97c72b72016-06-24 16:19:36 +01004601 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004602 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004603 // Add ADD with its PC-relative type patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004604 vixl::aarch64::Label* add_label =
4605 codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004606 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004607 break;
4608 }
4609 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004610 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004611 uint32_t address = dchecked_integral_cast<uint32_t>(
4612 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
4613 DCHECK_NE(address, 0u);
4614 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004615 break;
4616 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004617 case HLoadClass::LoadKind::kBssEntry: {
4618 // Add ADRP with its PC-relative Class .bss entry patch.
4619 const DexFile& dex_file = cls->GetDexFile();
4620 dex::TypeIndex type_index = cls->GetTypeIndex();
Vladimir Markoea4c1262017-02-06 19:59:33 +00004621 // We can go to slow path even with non-zero reference and in that case marking
4622 // can clobber IP0, so we need to use IP1 which shall be preserved.
4623 bss_entry_temp = ip1;
4624 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
4625 temps.Exclude(bss_entry_temp);
4626 bss_entry_adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
4627 codegen_->EmitAdrpPlaceholder(bss_entry_adrp_label, bss_entry_temp);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004628 // Add LDR with its PC-relative Class patch.
4629 vixl::aarch64::Label* ldr_label =
Vladimir Markoea4c1262017-02-06 19:59:33 +00004630 codegen_->NewBssEntryTypePatch(dex_file, type_index, bss_entry_adrp_label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004631 // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
4632 GenerateGcRootFieldLoad(cls,
Vladimir Markoea4c1262017-02-06 19:59:33 +00004633 out_loc,
4634 bss_entry_temp,
4635 /* offset placeholder */ 0u,
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004636 ldr_label,
Vladimir Markoea4c1262017-02-06 19:59:33 +00004637 read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004638 generate_null_check = true;
4639 break;
4640 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004641 case HLoadClass::LoadKind::kJitTableAddress: {
4642 __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
4643 cls->GetTypeIndex(),
Nicolas Geoffray5247c082017-01-13 14:17:29 +00004644 cls->GetClass()));
Mathieu Chartier31b12e32016-09-02 17:11:57 -07004645 GenerateGcRootFieldLoad(cls,
4646 out_loc,
4647 out.X(),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00004648 /* offset */ 0,
Roland Levillain00468f32016-10-27 18:02:48 +01004649 /* fixup_label */ nullptr,
Vladimir Markoea4c1262017-02-06 19:59:33 +00004650 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004651 break;
4652 }
Vladimir Marko41559982017-01-06 14:04:23 +00004653 case HLoadClass::LoadKind::kDexCacheViaMethod:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004654 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00004655 LOG(FATAL) << "UNREACHABLE";
4656 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004657 }
4658
Vladimir Markoea4c1262017-02-06 19:59:33 +00004659 bool do_clinit = cls->MustGenerateClinitCheck();
4660 if (generate_null_check || do_clinit) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004661 DCHECK(cls->CanCallRuntime());
4662 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
Vladimir Markoea4c1262017-02-06 19:59:33 +00004663 cls, cls, cls->GetDexPc(), do_clinit, bss_entry_temp, bss_entry_adrp_label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004664 codegen_->AddSlowPath(slow_path);
4665 if (generate_null_check) {
4666 __ Cbz(out, slow_path->GetEntryLabel());
4667 }
4668 if (cls->MustGenerateClinitCheck()) {
4669 GenerateClassInitializationCheck(slow_path, out);
4670 } else {
4671 __ Bind(slow_path->GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +00004672 }
4673 }
4674}
4675
David Brazdilcb1c0552015-08-04 16:22:25 +01004676static MemOperand GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07004677 return MemOperand(tr, Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01004678}
4679
Alexandre Rames67555f72014-11-18 10:55:16 +00004680void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
4681 LocationSummary* locations =
4682 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
4683 locations->SetOut(Location::RequiresRegister());
4684}
4685
4686void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
David Brazdilcb1c0552015-08-04 16:22:25 +01004687 __ Ldr(OutputRegister(instruction), GetExceptionTlsAddress());
4688}
4689
4690void LocationsBuilderARM64::VisitClearException(HClearException* clear) {
4691 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
4692}
4693
4694void InstructionCodeGeneratorARM64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
4695 __ Str(wzr, GetExceptionTlsAddress());
Alexandre Rames67555f72014-11-18 10:55:16 +00004696}
4697
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004698HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
4699 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004700 switch (desired_string_load_kind) {
4701 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4702 DCHECK(!GetCompilerOptions().GetCompilePic());
4703 break;
4704 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4705 DCHECK(GetCompilerOptions().GetCompilePic());
4706 break;
4707 case HLoadString::LoadKind::kBootImageAddress:
4708 break;
Vladimir Markoaad75c62016-10-03 08:46:48 +00004709 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01004710 DCHECK(!Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004711 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004712 case HLoadString::LoadKind::kJitTableAddress:
4713 DCHECK(Runtime::Current()->UseJitCompilation());
4714 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004715 case HLoadString::LoadKind::kDexCacheViaMethod:
4716 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004717 }
4718 return desired_string_load_kind;
4719}
4720
Alexandre Rames67555f72014-11-18 10:55:16 +00004721void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004722 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00004723 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004724 if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) {
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004725 InvokeRuntimeCallingConvention calling_convention;
4726 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
4727 } else {
4728 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004729 if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
4730 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00004731 // Rely on the pResolveString and marking to save everything we need.
4732 // Note that IP0 may be clobbered by saving/restoring the live register (only one thanks
4733 // to the custom calling convention) or by marking, so we shall use IP1.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004734 RegisterSet caller_saves = RegisterSet::Empty();
4735 InvokeRuntimeCallingConvention calling_convention;
4736 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
4737 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
4738 RegisterFrom(calling_convention.GetReturnLocation(Primitive::kPrimNot),
4739 Primitive::kPrimNot).GetCode());
4740 locations->SetCustomSlowPathCallerSaves(caller_saves);
4741 } else {
4742 // For non-Baker read barrier we have a temp-clobbering call.
4743 }
4744 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004745 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004746}
4747
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004748// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
4749// move.
4750void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexandre Rames67555f72014-11-18 10:55:16 +00004751 Register out = OutputRegister(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004752 Location out_loc = load->GetLocations()->Out();
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004753
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004754 switch (load->GetLoadKind()) {
4755 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004756 __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
4757 load->GetStringIndex()));
4758 return; // No dex cache slow path.
4759 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004760 // Add ADRP with its PC-relative String patch.
4761 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004762 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004763 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Scott Wakeling97c72b72016-06-24 16:19:36 +01004764 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004765 codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004766 // Add ADD with its PC-relative String patch.
Scott Wakeling97c72b72016-06-24 16:19:36 +01004767 vixl::aarch64::Label* add_label =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004768 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004769 codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004770 return; // No dex cache slow path.
4771 }
4772 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004773 uint32_t address = dchecked_integral_cast<uint32_t>(
4774 reinterpret_cast<uintptr_t>(load->GetString().Get()));
4775 DCHECK_NE(address, 0u);
4776 __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004777 return; // No dex cache slow path.
4778 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004779 case HLoadString::LoadKind::kBssEntry: {
4780 // Add ADRP with its PC-relative String .bss entry patch.
4781 const DexFile& dex_file = load->GetDexFile();
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004782 const dex::StringIndex string_index = load->GetStringIndex();
Vladimir Markoaad75c62016-10-03 08:46:48 +00004783 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004784 // We could use IP0 as the marking shall not clobber IP0 if the reference is null and
4785 // that's when we need the slow path. But let's not rely on such details and use IP1.
4786 Register temp = ip1;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004787 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
Vladimir Markoea4c1262017-02-06 19:59:33 +00004788 temps.Exclude(temp);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004789 vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004790 codegen_->EmitAdrpPlaceholder(adrp_label, temp);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004791 // Add LDR with its PC-relative String patch.
4792 vixl::aarch64::Label* ldr_label =
4793 codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004794 // /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
Vladimir Markoaad75c62016-10-03 08:46:48 +00004795 GenerateGcRootFieldLoad(load,
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004796 out_loc,
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004797 temp,
Roland Levillain00468f32016-10-27 18:02:48 +01004798 /* offset placeholder */ 0u,
4799 ldr_label,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08004800 kCompilerReadBarrierOption);
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004801 SlowPathCodeARM64* slow_path =
4802 new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load, temp, adrp_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004803 codegen_->AddSlowPath(slow_path);
4804 __ Cbz(out.X(), slow_path->GetEntryLabel());
4805 __ Bind(slow_path->GetExitLabel());
4806 return;
4807 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004808 case HLoadString::LoadKind::kJitTableAddress: {
4809 __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00004810 load->GetStringIndex(),
4811 load->GetString()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00004812 GenerateGcRootFieldLoad(load,
4813 out_loc,
4814 out.X(),
4815 /* offset */ 0,
4816 /* fixup_label */ nullptr,
4817 kCompilerReadBarrierOption);
4818 return;
4819 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004820 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004821 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004822 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +00004823
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07004824 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004825 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01004826 DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(), out.GetCode());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004827 __ Mov(calling_convention.GetRegisterAt(0).W(), load->GetStringIndex().index_);
Christina Wadsworth1fe89ea2016-08-31 16:14:38 -07004828 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
4829 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexandre Rames67555f72014-11-18 10:55:16 +00004830}
4831
Alexandre Rames5319def2014-10-23 10:03:10 +01004832void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
4833 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4834 locations->SetOut(Location::ConstantLocation(constant));
4835}
4836
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004837void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004838 // Will be generated at use site.
4839}
4840
Alexandre Rames67555f72014-11-18 10:55:16 +00004841void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
4842 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004843 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00004844 InvokeRuntimeCallingConvention calling_convention;
4845 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4846}
4847
4848void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01004849 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004850 instruction,
4851 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004852 if (instruction->IsEnter()) {
4853 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
4854 } else {
4855 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
4856 }
Alexandre Rames67555f72014-11-18 10:55:16 +00004857}
4858
Alexandre Rames42d641b2014-10-27 14:00:51 +00004859void LocationsBuilderARM64::VisitMul(HMul* mul) {
4860 LocationSummary* locations =
4861 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
4862 switch (mul->GetResultType()) {
4863 case Primitive::kPrimInt:
4864 case Primitive::kPrimLong:
4865 locations->SetInAt(0, Location::RequiresRegister());
4866 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004867 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004868 break;
4869
4870 case Primitive::kPrimFloat:
4871 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004872 locations->SetInAt(0, Location::RequiresFpuRegister());
4873 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00004874 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00004875 break;
4876
4877 default:
4878 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4879 }
4880}
4881
4882void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
4883 switch (mul->GetResultType()) {
4884 case Primitive::kPrimInt:
4885 case Primitive::kPrimLong:
4886 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
4887 break;
4888
4889 case Primitive::kPrimFloat:
4890 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00004891 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00004892 break;
4893
4894 default:
4895 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
4896 }
4897}
4898
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004899void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
4900 LocationSummary* locations =
4901 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
4902 switch (neg->GetResultType()) {
4903 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00004904 case Primitive::kPrimLong:
Serban Constantinescu2d35d9d2015-02-22 22:08:01 +00004905 locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg));
Alexandre Rames67555f72014-11-18 10:55:16 +00004906 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004907 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004908
4909 case Primitive::kPrimFloat:
4910 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004911 locations->SetInAt(0, Location::RequiresFpuRegister());
4912 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004913 break;
4914
4915 default:
4916 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4917 }
4918}
4919
4920void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
4921 switch (neg->GetResultType()) {
4922 case Primitive::kPrimInt:
4923 case Primitive::kPrimLong:
4924 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
4925 break;
4926
4927 case Primitive::kPrimFloat:
4928 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00004929 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004930 break;
4931
4932 default:
4933 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
4934 }
4935}
4936
4937void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
4938 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004939 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004940 InvokeRuntimeCallingConvention calling_convention;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004941 locations->SetOut(LocationFrom(x0));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004942 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
4943 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004944}
4945
4946void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004947 // Note: if heap poisoning is enabled, the entry point takes cares
4948 // of poisoning the reference.
Nicolas Geoffrayb048cb72017-01-23 22:50:24 +00004949 QuickEntrypointEnum entrypoint =
4950 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
4951 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004952 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00004953}
4954
Alexandre Rames5319def2014-10-23 10:03:10 +01004955void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
4956 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004957 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames5319def2014-10-23 10:03:10 +01004958 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00004959 if (instruction->IsStringAlloc()) {
4960 locations->AddTemp(LocationFrom(kArtMethodRegister));
4961 } else {
4962 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00004963 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004964 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
4965}
4966
4967void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004968 // Note: if heap poisoning is enabled, the entry point takes cares
4969 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004970 if (instruction->IsStringAlloc()) {
4971 // String is allocated through StringFactory. Call NewEmptyString entry point.
4972 Location temp = instruction->GetLocations()->GetTemp(0);
Andreas Gampe542451c2016-07-26 09:02:02 -07004973 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004974 __ Ldr(XRegisterFrom(temp), MemOperand(tr, QUICK_ENTRY_POINT(pNewEmptyString)));
4975 __ Ldr(lr, MemOperand(XRegisterFrom(temp), code_offset.Int32Value()));
Artem Serov914d7a82017-02-07 14:33:49 +00004976
4977 {
4978 // Ensure the pc position is recorded immediately after the `blr` instruction.
4979 ExactAssemblyScope eas(GetVIXLAssembler(),
4980 kInstructionSize,
4981 CodeBufferCheckScope::kExactSize);
4982 __ blr(lr);
4983 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4984 }
David Brazdil6de19382016-01-08 17:37:10 +00004985 } else {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00004986 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00004987 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00004988 }
Alexandre Rames5319def2014-10-23 10:03:10 +01004989}
4990
4991void LocationsBuilderARM64::VisitNot(HNot* instruction) {
4992 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00004993 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00004994 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01004995}
4996
4997void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004998 switch (instruction->GetResultType()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01004999 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01005000 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01005001 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01005002 break;
5003
5004 default:
5005 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5006 }
5007}
5008
David Brazdil66d126e2015-04-03 16:02:44 +01005009void LocationsBuilderARM64::VisitBooleanNot(HBooleanNot* instruction) {
5010 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5011 locations->SetInAt(0, Location::RequiresRegister());
5012 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5013}
5014
5015void InstructionCodeGeneratorARM64::VisitBooleanNot(HBooleanNot* instruction) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01005016 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), vixl::aarch64::Operand(1));
David Brazdil66d126e2015-04-03 16:02:44 +01005017}
5018
Alexandre Rames5319def2014-10-23 10:03:10 +01005019void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005020 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5021 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01005022}
5023
Calin Juravle2ae48182016-03-16 14:05:09 +00005024void CodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5025 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005026 return;
5027 }
Artem Serov914d7a82017-02-07 14:33:49 +00005028 {
5029 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
5030 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5031 Location obj = instruction->GetLocations()->InAt(0);
5032 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
5033 RecordPcInfo(instruction, instruction->GetDexPc());
5034 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005035}
5036
Calin Juravle2ae48182016-03-16 14:05:09 +00005037void CodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005038 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005039 AddSlowPath(slow_path);
Alexandre Rames5319def2014-10-23 10:03:10 +01005040
5041 LocationSummary* locations = instruction->GetLocations();
5042 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005043
5044 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
Alexandre Rames5319def2014-10-23 10:03:10 +01005045}
5046
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005047void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005048 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005049}
5050
Alexandre Rames67555f72014-11-18 10:55:16 +00005051void LocationsBuilderARM64::VisitOr(HOr* instruction) {
5052 HandleBinaryOp(instruction);
5053}
5054
5055void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
5056 HandleBinaryOp(instruction);
5057}
5058
Alexandre Rames3e69f162014-12-10 10:36:50 +00005059void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5060 LOG(FATAL) << "Unreachable";
5061}
5062
5063void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
5064 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5065}
5066
Alexandre Rames5319def2014-10-23 10:03:10 +01005067void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
5068 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5069 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5070 if (location.IsStackSlot()) {
5071 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5072 } else if (location.IsDoubleStackSlot()) {
5073 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5074 }
5075 locations->SetOut(location);
5076}
5077
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005078void InstructionCodeGeneratorARM64::VisitParameterValue(
5079 HParameterValue* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005080 // Nothing to do, the parameter is already at its location.
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005081}
5082
5083void LocationsBuilderARM64::VisitCurrentMethod(HCurrentMethod* instruction) {
5084 LocationSummary* locations =
5085 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffray38207af2015-06-01 15:46:22 +01005086 locations->SetOut(LocationFrom(kArtMethodRegister));
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01005087}
5088
5089void InstructionCodeGeneratorARM64::VisitCurrentMethod(
5090 HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
5091 // Nothing to do, the method is already at its location.
Alexandre Rames5319def2014-10-23 10:03:10 +01005092}
5093
5094void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
5095 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005096 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005097 locations->SetInAt(i, Location::Any());
5098 }
5099 locations->SetOut(Location::Any());
5100}
5101
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005102void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005103 LOG(FATAL) << "Unreachable";
5104}
5105
Serban Constantinescu02164b32014-11-13 14:05:07 +00005106void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005107 Primitive::Type type = rem->GetResultType();
Alexandre Rames542361f2015-01-29 16:57:31 +00005108 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005109 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
5110 : LocationSummary::kNoCall;
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005111 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
5112
5113 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005114 case Primitive::kPrimInt:
5115 case Primitive::kPrimLong:
5116 locations->SetInAt(0, Location::RequiresRegister());
Zheng Xuc6667102015-05-15 16:08:45 +08005117 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00005118 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5119 break;
5120
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005121 case Primitive::kPrimFloat:
5122 case Primitive::kPrimDouble: {
5123 InvokeRuntimeCallingConvention calling_convention;
5124 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
5125 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
5126 locations->SetOut(calling_convention.GetReturnLocation(type));
5127
5128 break;
5129 }
5130
Serban Constantinescu02164b32014-11-13 14:05:07 +00005131 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005132 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00005133 }
5134}
5135
5136void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
5137 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005138
Serban Constantinescu02164b32014-11-13 14:05:07 +00005139 switch (type) {
5140 case Primitive::kPrimInt:
5141 case Primitive::kPrimLong: {
Zheng Xuc6667102015-05-15 16:08:45 +08005142 GenerateDivRemIntegral(rem);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005143 break;
5144 }
5145
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005146 case Primitive::kPrimFloat:
5147 case Primitive::kPrimDouble: {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005148 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
5149 codegen_->InvokeRuntime(entrypoint, rem, rem->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005150 if (type == Primitive::kPrimFloat) {
5151 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5152 } else {
5153 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5154 }
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00005155 break;
5156 }
5157
Serban Constantinescu02164b32014-11-13 14:05:07 +00005158 default:
5159 LOG(FATAL) << "Unexpected rem type " << type;
Vladimir Marko351dddf2015-12-11 16:34:46 +00005160 UNREACHABLE();
Serban Constantinescu02164b32014-11-13 14:05:07 +00005161 }
5162}
5163
Calin Juravle27df7582015-04-17 19:12:31 +01005164void LocationsBuilderARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5165 memory_barrier->SetLocations(nullptr);
5166}
5167
5168void InstructionCodeGeneratorARM64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005169 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01005170}
5171
Alexandre Rames5319def2014-10-23 10:03:10 +01005172void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
5173 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5174 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00005175 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01005176}
5177
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005178void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005179 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005180}
5181
5182void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
5183 instruction->SetLocations(nullptr);
5184}
5185
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005186void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction ATTRIBUTE_UNUSED) {
Alexandre Rames5319def2014-10-23 10:03:10 +01005187 codegen_->GenerateFrameExit();
Alexandre Rames5319def2014-10-23 10:03:10 +01005188}
5189
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005190void LocationsBuilderARM64::VisitRor(HRor* ror) {
5191 HandleBinaryOp(ror);
5192}
5193
5194void InstructionCodeGeneratorARM64::VisitRor(HRor* ror) {
5195 HandleBinaryOp(ror);
5196}
5197
Serban Constantinescu02164b32014-11-13 14:05:07 +00005198void LocationsBuilderARM64::VisitShl(HShl* shl) {
5199 HandleShift(shl);
5200}
5201
5202void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
5203 HandleShift(shl);
5204}
5205
5206void LocationsBuilderARM64::VisitShr(HShr* shr) {
5207 HandleShift(shr);
5208}
5209
5210void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
5211 HandleShift(shr);
5212}
5213
Alexandre Rames5319def2014-10-23 10:03:10 +01005214void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005215 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005216}
5217
5218void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005219 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005220}
5221
Alexandre Rames67555f72014-11-18 10:55:16 +00005222void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005223 HandleFieldGet(instruction);
Alexandre Rames67555f72014-11-18 10:55:16 +00005224}
5225
5226void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005227 HandleFieldGet(instruction, instruction->GetFieldInfo());
Alexandre Rames67555f72014-11-18 10:55:16 +00005228}
5229
5230void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames09a99962015-04-15 11:47:56 +01005231 HandleFieldSet(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01005232}
5233
Alexandre Rames67555f72014-11-18 10:55:16 +00005234void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005235 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexandre Rames5319def2014-10-23 10:03:10 +01005236}
5237
Calin Juravlee460d1d2015-09-29 04:52:17 +01005238void LocationsBuilderARM64::VisitUnresolvedInstanceFieldGet(
5239 HUnresolvedInstanceFieldGet* instruction) {
5240 FieldAccessCallingConventionARM64 calling_convention;
5241 codegen_->CreateUnresolvedFieldLocationSummary(
5242 instruction, instruction->GetFieldType(), calling_convention);
5243}
5244
5245void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldGet(
5246 HUnresolvedInstanceFieldGet* instruction) {
5247 FieldAccessCallingConventionARM64 calling_convention;
5248 codegen_->GenerateUnresolvedFieldAccess(instruction,
5249 instruction->GetFieldType(),
5250 instruction->GetFieldIndex(),
5251 instruction->GetDexPc(),
5252 calling_convention);
5253}
5254
5255void LocationsBuilderARM64::VisitUnresolvedInstanceFieldSet(
5256 HUnresolvedInstanceFieldSet* instruction) {
5257 FieldAccessCallingConventionARM64 calling_convention;
5258 codegen_->CreateUnresolvedFieldLocationSummary(
5259 instruction, instruction->GetFieldType(), calling_convention);
5260}
5261
5262void InstructionCodeGeneratorARM64::VisitUnresolvedInstanceFieldSet(
5263 HUnresolvedInstanceFieldSet* instruction) {
5264 FieldAccessCallingConventionARM64 calling_convention;
5265 codegen_->GenerateUnresolvedFieldAccess(instruction,
5266 instruction->GetFieldType(),
5267 instruction->GetFieldIndex(),
5268 instruction->GetDexPc(),
5269 calling_convention);
5270}
5271
5272void LocationsBuilderARM64::VisitUnresolvedStaticFieldGet(
5273 HUnresolvedStaticFieldGet* instruction) {
5274 FieldAccessCallingConventionARM64 calling_convention;
5275 codegen_->CreateUnresolvedFieldLocationSummary(
5276 instruction, instruction->GetFieldType(), calling_convention);
5277}
5278
5279void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldGet(
5280 HUnresolvedStaticFieldGet* instruction) {
5281 FieldAccessCallingConventionARM64 calling_convention;
5282 codegen_->GenerateUnresolvedFieldAccess(instruction,
5283 instruction->GetFieldType(),
5284 instruction->GetFieldIndex(),
5285 instruction->GetDexPc(),
5286 calling_convention);
5287}
5288
5289void LocationsBuilderARM64::VisitUnresolvedStaticFieldSet(
5290 HUnresolvedStaticFieldSet* instruction) {
5291 FieldAccessCallingConventionARM64 calling_convention;
5292 codegen_->CreateUnresolvedFieldLocationSummary(
5293 instruction, instruction->GetFieldType(), calling_convention);
5294}
5295
5296void InstructionCodeGeneratorARM64::VisitUnresolvedStaticFieldSet(
5297 HUnresolvedStaticFieldSet* instruction) {
5298 FieldAccessCallingConventionARM64 calling_convention;
5299 codegen_->GenerateUnresolvedFieldAccess(instruction,
5300 instruction->GetFieldType(),
5301 instruction->GetFieldIndex(),
5302 instruction->GetDexPc(),
5303 calling_convention);
5304}
5305
Alexandre Rames5319def2014-10-23 10:03:10 +01005306void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005307 LocationSummary* locations =
5308 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005309 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexandre Rames5319def2014-10-23 10:03:10 +01005310}
5311
5312void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005313 HBasicBlock* block = instruction->GetBlock();
5314 if (block->GetLoopInformation() != nullptr) {
5315 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5316 // The back edge will generate the suspend check.
5317 return;
5318 }
5319 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5320 // The goto will generate the suspend check.
5321 return;
5322 }
5323 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01005324}
5325
Alexandre Rames67555f72014-11-18 10:55:16 +00005326void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
5327 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005328 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexandre Rames67555f72014-11-18 10:55:16 +00005329 InvokeRuntimeCallingConvention calling_convention;
5330 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
5331}
5332
5333void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
Serban Constantinescu22f81d32016-02-18 16:06:31 +00005334 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08005335 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00005336}
5337
5338void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
5339 LocationSummary* locations =
5340 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
5341 Primitive::Type input_type = conversion->GetInputType();
5342 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00005343 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00005344 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
5345 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
5346 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5347 }
5348
Alexandre Rames542361f2015-01-29 16:57:31 +00005349 if (Primitive::IsFloatingPointType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005350 locations->SetInAt(0, Location::RequiresFpuRegister());
5351 } else {
5352 locations->SetInAt(0, Location::RequiresRegister());
5353 }
5354
Alexandre Rames542361f2015-01-29 16:57:31 +00005355 if (Primitive::IsFloatingPointType(result_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005356 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5357 } else {
5358 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5359 }
5360}
5361
5362void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
5363 Primitive::Type result_type = conversion->GetResultType();
5364 Primitive::Type input_type = conversion->GetInputType();
5365
5366 DCHECK_NE(input_type, result_type);
5367
Alexandre Rames542361f2015-01-29 16:57:31 +00005368 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexandre Rames67555f72014-11-18 10:55:16 +00005369 int result_size = Primitive::ComponentSize(result_type);
5370 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00005371 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00005372 Register output = OutputRegister(conversion);
5373 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames8626b742015-11-25 16:28:08 +00005374 if (result_type == Primitive::kPrimInt && input_type == Primitive::kPrimLong) {
Alexandre Rames4dff2fd2015-08-20 13:36:35 +01005375 // 'int' values are used directly as W registers, discarding the top
5376 // bits, so we don't need to sign-extend and can just perform a move.
5377 // We do not pass the `kDiscardForSameWReg` argument to force clearing the
5378 // top 32 bits of the target register. We theoretically could leave those
5379 // bits unchanged, but we would have to make sure that no code uses a
5380 // 32bit input value as a 64bit value assuming that the top 32 bits are
5381 // zero.
5382 __ Mov(output.W(), source.W());
Alexandre Rames8626b742015-11-25 16:28:08 +00005383 } else if (result_type == Primitive::kPrimChar ||
5384 (input_type == Primitive::kPrimChar && input_size < result_size)) {
5385 __ Ubfx(output,
5386 output.IsX() ? source.X() : source.W(),
5387 0, Primitive::ComponentSize(Primitive::kPrimChar) * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005388 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00005389 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00005390 }
Alexandre Rames542361f2015-01-29 16:57:31 +00005391 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005392 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00005393 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005394 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
5395 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
Alexandre Rames542361f2015-01-29 16:57:31 +00005396 } else if (Primitive::IsFloatingPointType(result_type) &&
5397 Primitive::IsFloatingPointType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00005398 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
5399 } else {
5400 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
5401 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00005402 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00005403}
Alexandre Rames67555f72014-11-18 10:55:16 +00005404
Serban Constantinescu02164b32014-11-13 14:05:07 +00005405void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
5406 HandleShift(ushr);
5407}
5408
5409void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
5410 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00005411}
5412
5413void LocationsBuilderARM64::VisitXor(HXor* instruction) {
5414 HandleBinaryOp(instruction);
5415}
5416
5417void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
5418 HandleBinaryOp(instruction);
5419}
5420
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005421void LocationsBuilderARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005422 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005423 LOG(FATAL) << "Unreachable";
5424}
5425
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005426void InstructionCodeGeneratorARM64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00005427 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00005428 LOG(FATAL) << "Unreachable";
5429}
5430
Mark Mendellfe57faa2015-09-18 09:26:15 -04005431// Simple implementation of packed switch - generate cascaded compare/jumps.
5432void LocationsBuilderARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5433 LocationSummary* locations =
5434 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
5435 locations->SetInAt(0, Location::RequiresRegister());
5436}
5437
5438void InstructionCodeGeneratorARM64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
5439 int32_t lower_bound = switch_instr->GetStartValue();
Zheng Xu3927c8b2015-11-18 17:46:25 +08005440 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendellfe57faa2015-09-18 09:26:15 -04005441 Register value_reg = InputRegisterAt(switch_instr, 0);
5442 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
5443
Zheng Xu3927c8b2015-11-18 17:46:25 +08005444 // Roughly set 16 as max average assemblies generated per HIR in a graph.
Scott Wakeling97c72b72016-06-24 16:19:36 +01005445 static constexpr int32_t kMaxExpectedSizePerHInstruction = 16 * kInstructionSize;
Zheng Xu3927c8b2015-11-18 17:46:25 +08005446 // ADR has a limited range(+/-1MB), so we set a threshold for the number of HIRs in the graph to
5447 // make sure we don't emit it if the target may run out of range.
5448 // TODO: Instead of emitting all jump tables at the end of the code, we could keep track of ADR
5449 // ranges and emit the tables only as required.
5450 static constexpr int32_t kJumpTableInstructionThreshold = 1* MB / kMaxExpectedSizePerHInstruction;
Mark Mendellfe57faa2015-09-18 09:26:15 -04005451
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005452 if (num_entries <= kPackedSwitchCompareJumpThreshold ||
Zheng Xu3927c8b2015-11-18 17:46:25 +08005453 // Current instruction id is an upper bound of the number of HIRs in the graph.
5454 GetGraph()->GetCurrentInstructionId() > kJumpTableInstructionThreshold) {
5455 // Create a series of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005456 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5457 Register temp = temps.AcquireW();
5458 __ Subs(temp, value_reg, Operand(lower_bound));
5459
Zheng Xu3927c8b2015-11-18 17:46:25 +08005460 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00005461 // Jump to successors[0] if value == lower_bound.
5462 __ B(eq, codegen_->GetLabelOf(successors[0]));
5463 int32_t last_index = 0;
5464 for (; num_entries - last_index > 2; last_index += 2) {
5465 __ Subs(temp, temp, Operand(2));
5466 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
5467 __ B(lo, codegen_->GetLabelOf(successors[last_index + 1]));
5468 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
5469 __ B(eq, codegen_->GetLabelOf(successors[last_index + 2]));
5470 }
5471 if (num_entries - last_index == 2) {
5472 // The last missing case_value.
5473 __ Cmp(temp, Operand(1));
5474 __ B(eq, codegen_->GetLabelOf(successors[last_index + 1]));
Zheng Xu3927c8b2015-11-18 17:46:25 +08005475 }
5476
5477 // And the default for any other value.
5478 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
5479 __ B(codegen_->GetLabelOf(default_block));
5480 }
5481 } else {
Alexandre Ramesc01a6642016-04-15 11:54:06 +01005482 JumpTableARM64* jump_table = codegen_->CreateJumpTable(switch_instr);
Zheng Xu3927c8b2015-11-18 17:46:25 +08005483
5484 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
5485
5486 // Below instructions should use at most one blocked register. Since there are two blocked
5487 // registers, we are free to block one.
5488 Register temp_w = temps.AcquireW();
5489 Register index;
5490 // Remove the bias.
5491 if (lower_bound != 0) {
5492 index = temp_w;
5493 __ Sub(index, value_reg, Operand(lower_bound));
5494 } else {
5495 index = value_reg;
5496 }
5497
5498 // Jump to default block if index is out of the range.
5499 __ Cmp(index, Operand(num_entries));
5500 __ B(hs, codegen_->GetLabelOf(default_block));
5501
5502 // In current VIXL implementation, it won't require any blocked registers to encode the
5503 // immediate value for Adr. So we are free to use both VIXL blocked registers to reduce the
5504 // register pressure.
5505 Register table_base = temps.AcquireX();
5506 // Load jump offset from the table.
5507 __ Adr(table_base, jump_table->GetTableStartLabel());
5508 Register jump_offset = temp_w;
5509 __ Ldr(jump_offset, MemOperand(table_base, index, UXTW, 2));
5510
5511 // Jump to target block by branching to table_base(pc related) + offset.
5512 Register target_address = table_base;
5513 __ Add(target_address, table_base, Operand(jump_offset, SXTW));
5514 __ Br(target_address);
Mark Mendellfe57faa2015-09-18 09:26:15 -04005515 }
5516}
5517
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005518void InstructionCodeGeneratorARM64::GenerateReferenceLoadOneRegister(
5519 HInstruction* instruction,
5520 Location out,
5521 uint32_t offset,
5522 Location maybe_temp,
5523 ReadBarrierOption read_barrier_option) {
Roland Levillain44015862016-01-22 11:47:17 +00005524 Primitive::Type type = Primitive::kPrimNot;
5525 Register out_reg = RegisterFrom(out, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005526 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005527 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005528 Register temp_reg = RegisterFrom(maybe_temp, type);
5529 if (kUseBakerReadBarrier) {
5530 // Load with fast path based Baker's read barrier.
5531 // /* HeapReference<Object> */ out = *(out + offset)
5532 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5533 out,
5534 out_reg,
5535 offset,
5536 temp_reg,
5537 /* needs_null_check */ false,
5538 /* use_load_acquire */ false);
5539 } else {
5540 // Load with slow path based read barrier.
5541 // Save the value of `out` into `maybe_temp` before overwriting it
5542 // in the following move operation, as we will need it for the
5543 // read barrier below.
5544 __ Mov(temp_reg, out_reg);
5545 // /* HeapReference<Object> */ out = *(out + offset)
5546 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5547 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5548 }
5549 } else {
5550 // Plain load with no read barrier.
5551 // /* HeapReference<Object> */ out = *(out + offset)
5552 __ Ldr(out_reg, HeapOperand(out_reg, offset));
5553 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5554 }
5555}
5556
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005557void InstructionCodeGeneratorARM64::GenerateReferenceLoadTwoRegisters(
5558 HInstruction* instruction,
5559 Location out,
5560 Location obj,
5561 uint32_t offset,
5562 Location maybe_temp,
5563 ReadBarrierOption read_barrier_option) {
Roland Levillain44015862016-01-22 11:47:17 +00005564 Primitive::Type type = Primitive::kPrimNot;
5565 Register out_reg = RegisterFrom(out, type);
5566 Register obj_reg = RegisterFrom(obj, type);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005567 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08005568 CHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005569 if (kUseBakerReadBarrier) {
5570 // Load with fast path based Baker's read barrier.
5571 Register temp_reg = RegisterFrom(maybe_temp, type);
5572 // /* HeapReference<Object> */ out = *(obj + offset)
5573 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5574 out,
5575 obj_reg,
5576 offset,
5577 temp_reg,
5578 /* needs_null_check */ false,
5579 /* use_load_acquire */ false);
5580 } else {
5581 // Load with slow path based read barrier.
5582 // /* HeapReference<Object> */ out = *(obj + offset)
5583 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5584 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5585 }
5586 } else {
5587 // Plain load with no read barrier.
5588 // /* HeapReference<Object> */ out = *(obj + offset)
5589 __ Ldr(out_reg, HeapOperand(obj_reg, offset));
5590 GetAssembler()->MaybeUnpoisonHeapReference(out_reg);
5591 }
5592}
5593
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005594void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(
5595 HInstruction* instruction,
5596 Location root,
5597 Register obj,
5598 uint32_t offset,
5599 vixl::aarch64::Label* fixup_label,
5600 ReadBarrierOption read_barrier_option) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005601 DCHECK(fixup_label == nullptr || offset == 0u);
Roland Levillain44015862016-01-22 11:47:17 +00005602 Register root_reg = RegisterFrom(root, Primitive::kPrimNot);
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08005603 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07005604 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain44015862016-01-22 11:47:17 +00005605 if (kUseBakerReadBarrier) {
5606 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
Roland Levillain35345a52017-02-27 14:32:08 +00005607 // Baker's read barrier are used:
Roland Levillain44015862016-01-22 11:47:17 +00005608 //
Roland Levillain35345a52017-02-27 14:32:08 +00005609 // root = obj.field;
Mathieu Chartierfe814e82016-11-09 14:32:49 -08005610 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
Roland Levillain35345a52017-02-27 14:32:08 +00005611 // if (temp != null) {
5612 // root = temp(root)
Roland Levillain44015862016-01-22 11:47:17 +00005613 // }
5614
5615 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005616 if (fixup_label == nullptr) {
5617 __ Ldr(root_reg, MemOperand(obj, offset));
5618 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005619 codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005620 }
Roland Levillain44015862016-01-22 11:47:17 +00005621 static_assert(
5622 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5623 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5624 "have different sizes.");
5625 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5626 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5627 "have different sizes.");
5628
Roland Levillain35345a52017-02-27 14:32:08 +00005629 Register temp = lr;
5630
5631 // Slow path marking the GC root `root`. The entrypoint will alrady be loaded in temp.
5632 SlowPathCodeARM64* slow_path =
5633 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction,
5634 root,
5635 LocationFrom(temp));
5636 codegen_->AddSlowPath(slow_path);
5637 const int32_t entry_point_offset =
5638 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArm64PointerSize>(root.reg());
5639 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5640 // Loading the entrypoint does not require a load acquire since it is only changed when
5641 // threads are suspended or running a checkpoint.
5642 __ Ldr(temp, MemOperand(tr, entry_point_offset));
Mathieu Chartierfe814e82016-11-09 14:32:49 -08005643 // The entrypoint is null when the GC is not marking, this prevents one load compared to
5644 // checking GetIsGcMarking.
Roland Levillain44015862016-01-22 11:47:17 +00005645 __ Cbnz(temp, slow_path->GetEntryLabel());
5646 __ Bind(slow_path->GetExitLabel());
5647 } else {
5648 // GC root loaded through a slow path for read barriers other
5649 // than Baker's.
5650 // /* GcRoot<mirror::Object>* */ root = obj + offset
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005651 if (fixup_label == nullptr) {
5652 __ Add(root_reg.X(), obj.X(), offset);
5653 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005654 codegen_->EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005655 }
Roland Levillain44015862016-01-22 11:47:17 +00005656 // /* mirror::Object* */ root = root->Read()
5657 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5658 }
5659 } else {
5660 // Plain GC root load with no read barrier.
5661 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005662 if (fixup_label == nullptr) {
5663 __ Ldr(root_reg, MemOperand(obj, offset));
5664 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00005665 codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005666 }
Roland Levillain44015862016-01-22 11:47:17 +00005667 // Note that GC roots are not affected by heap poisoning, thus we
5668 // do not have to unpoison `root_reg` here.
5669 }
5670}
5671
5672void CodeGeneratorARM64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5673 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005674 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005675 uint32_t offset,
5676 Register temp,
5677 bool needs_null_check,
5678 bool use_load_acquire) {
5679 DCHECK(kEmitCompilerReadBarrier);
5680 DCHECK(kUseBakerReadBarrier);
5681
5682 // /* HeapReference<Object> */ ref = *(obj + offset)
5683 Location no_index = Location::NoLocation();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005684 size_t no_scale_factor = 0u;
Roland Levillainbfea3352016-06-23 13:48:47 +01005685 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5686 ref,
5687 obj,
5688 offset,
5689 no_index,
5690 no_scale_factor,
5691 temp,
5692 needs_null_check,
5693 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005694}
5695
5696void CodeGeneratorARM64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5697 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005698 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005699 uint32_t data_offset,
5700 Location index,
5701 Register temp,
5702 bool needs_null_check) {
5703 DCHECK(kEmitCompilerReadBarrier);
5704 DCHECK(kUseBakerReadBarrier);
5705
5706 // Array cells are never volatile variables, therefore array loads
5707 // never use Load-Acquire instructions on ARM64.
5708 const bool use_load_acquire = false;
5709
Roland Levillainbfea3352016-06-23 13:48:47 +01005710 static_assert(
5711 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5712 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005713 // /* HeapReference<Object> */ ref =
5714 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Roland Levillainbfea3352016-06-23 13:48:47 +01005715 size_t scale_factor = Primitive::ComponentSizeShift(Primitive::kPrimNot);
5716 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5717 ref,
5718 obj,
5719 data_offset,
5720 index,
5721 scale_factor,
5722 temp,
5723 needs_null_check,
5724 use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005725}
5726
5727void CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5728 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +01005729 Register obj,
Roland Levillain44015862016-01-22 11:47:17 +00005730 uint32_t offset,
5731 Location index,
Roland Levillainbfea3352016-06-23 13:48:47 +01005732 size_t scale_factor,
Roland Levillain44015862016-01-22 11:47:17 +00005733 Register temp,
5734 bool needs_null_check,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005735 bool use_load_acquire,
5736 bool always_update_field) {
Roland Levillain44015862016-01-22 11:47:17 +00005737 DCHECK(kEmitCompilerReadBarrier);
5738 DCHECK(kUseBakerReadBarrier);
Roland Levillainbfea3352016-06-23 13:48:47 +01005739 // If we are emitting an array load, we should not be using a
5740 // Load Acquire instruction. In other words:
5741 // `instruction->IsArrayGet()` => `!use_load_acquire`.
5742 DCHECK(!instruction->IsArrayGet() || !use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +00005743
Roland Levillain35345a52017-02-27 14:32:08 +00005744 MacroAssembler* masm = GetVIXLAssembler();
5745 UseScratchRegisterScope temps(masm);
5746
5747 // In slow path based read barriers, the read barrier call is
5748 // inserted after the original load. However, in fast path based
5749 // Baker's read barriers, we need to perform the load of
5750 // mirror::Object::monitor_ *before* the original reference load.
5751 // This load-load ordering is required by the read barrier.
5752 // The fast path/slow path (for Baker's algorithm) should look like:
Roland Levillain44015862016-01-22 11:47:17 +00005753 //
Roland Levillain35345a52017-02-27 14:32:08 +00005754 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5755 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5756 // HeapReference<Object> ref = *src; // Original reference load.
5757 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5758 // if (is_gray) {
5759 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005760 // }
Roland Levillain35345a52017-02-27 14:32:08 +00005761 //
5762 // Note: the original implementation in ReadBarrier::Barrier is
5763 // slightly more complex as it performs additional checks that we do
5764 // not do here for performance reasons.
Roland Levillain44015862016-01-22 11:47:17 +00005765
5766 Primitive::Type type = Primitive::kPrimNot;
5767 Register ref_reg = RegisterFrom(ref, type);
Roland Levillain35345a52017-02-27 14:32:08 +00005768 DCHECK(obj.IsW());
5769 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
Roland Levillain44015862016-01-22 11:47:17 +00005770
Roland Levillain35345a52017-02-27 14:32:08 +00005771 {
5772 // Ensure that between load and MaybeRecordImplicitNullCheck there are no pools emitted.
5773 EmissionCheckScope guard(GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes);
5774 // /* int32_t */ monitor = obj->monitor_
5775 __ Ldr(temp, HeapOperand(obj, monitor_offset));
5776 if (needs_null_check) {
5777 MaybeRecordImplicitNullCheck(instruction);
5778 }
5779 }
5780 // /* LockWord */ lock_word = LockWord(monitor)
5781 static_assert(sizeof(LockWord) == sizeof(int32_t),
5782 "art::LockWord and int32_t have different sizes.");
Roland Levillain44015862016-01-22 11:47:17 +00005783
Roland Levillain35345a52017-02-27 14:32:08 +00005784 // Introduce a dependency on the lock_word including rb_state,
5785 // to prevent load-load reordering, and without using
5786 // a memory barrier (which would be more expensive).
5787 // `obj` is unchanged by this operation, but its value now depends
5788 // on `temp`.
5789 __ Add(obj.X(), obj.X(), Operand(temp.X(), LSR, 32));
5790
5791 // The actual reference load.
Roland Levillain44015862016-01-22 11:47:17 +00005792 if (index.IsValid()) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005793 // Load types involving an "index": ArrayGet,
5794 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5795 // intrinsics.
Roland Levillainbfea3352016-06-23 13:48:47 +01005796 if (use_load_acquire) {
5797 // UnsafeGetObjectVolatile intrinsic case.
5798 // Register `index` is not an index in an object array, but an
5799 // offset to an object reference field within object `obj`.
5800 DCHECK(instruction->IsInvoke()) << instruction->DebugName();
5801 DCHECK(instruction->GetLocations()->Intrinsified());
5802 DCHECK(instruction->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile)
5803 << instruction->AsInvoke()->GetIntrinsic();
Roland Levillaina1aa3b12016-10-26 13:03:38 +01005804 DCHECK_EQ(offset, 0u);
5805 DCHECK_EQ(scale_factor, 0u);
Roland Levillain35345a52017-02-27 14:32:08 +00005806 DCHECK_EQ(needs_null_check, 0u);
5807 // /* HeapReference<Object> */ ref = *(obj + index)
Roland Levillainbfea3352016-06-23 13:48:47 +01005808 MemOperand field = HeapOperand(obj, XRegisterFrom(index));
5809 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005810 } else {
Roland Levillain35345a52017-02-27 14:32:08 +00005811 // ArrayGet and UnsafeGetObject intrinsics cases.
5812 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
Roland Levillainbfea3352016-06-23 13:48:47 +01005813 if (index.IsConstant()) {
5814 uint32_t computed_offset = offset + (Int64ConstantFrom(index) << scale_factor);
5815 Load(type, ref_reg, HeapOperand(obj, computed_offset));
5816 } else {
Roland Levillain35345a52017-02-27 14:32:08 +00005817 Register temp3 = temps.AcquireW();
5818 __ Add(temp3, obj, offset);
5819 Load(type, ref_reg, HeapOperand(temp3, XRegisterFrom(index), LSL, scale_factor));
5820 temps.Release(temp3);
Roland Levillainbfea3352016-06-23 13:48:47 +01005821 }
Roland Levillain44015862016-01-22 11:47:17 +00005822 }
Roland Levillain44015862016-01-22 11:47:17 +00005823 } else {
Roland Levillain35345a52017-02-27 14:32:08 +00005824 // /* HeapReference<Object> */ ref = *(obj + offset)
Roland Levillain44015862016-01-22 11:47:17 +00005825 MemOperand field = HeapOperand(obj, offset);
5826 if (use_load_acquire) {
Roland Levillain35345a52017-02-27 14:32:08 +00005827 LoadAcquire(instruction, ref_reg, field, /* needs_null_check */ false);
Roland Levillain44015862016-01-22 11:47:17 +00005828 } else {
5829 Load(type, ref_reg, field);
5830 }
5831 }
5832
5833 // Object* ref = ref_addr->AsMirrorPtr()
5834 GetAssembler()->MaybeUnpoisonHeapReference(ref_reg);
Roland Levillain35345a52017-02-27 14:32:08 +00005835
5836 // Slow path marking the object `ref` when it is gray.
5837 SlowPathCodeARM64* slow_path;
5838 if (always_update_field) {
5839 // ReadBarrierMarkAndUpdateFieldSlowPathARM64 only supports
5840 // address of the form `obj + field_offset`, where `obj` is a
5841 // register and `field_offset` is a register. Thus `offset` and
5842 // `scale_factor` above are expected to be null in this code path.
5843 DCHECK_EQ(offset, 0u);
5844 DCHECK_EQ(scale_factor, 0u); /* "times 1" */
5845 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathARM64(
5846 instruction, ref, obj, /* field_offset */ index, temp);
5847 } else {
5848 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathARM64(instruction, ref);
5849 }
5850 AddSlowPath(slow_path);
5851
5852 // if (rb_state == ReadBarrier::GrayState())
5853 // ref = ReadBarrier::Mark(ref);
5854 // Given the numeric representation, it's enough to check the low bit of the rb_state.
5855 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5856 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
5857 __ Tbnz(temp, LockWord::kReadBarrierStateShift, slow_path->GetEntryLabel());
5858 __ Bind(slow_path->GetExitLabel());
Roland Levillain44015862016-01-22 11:47:17 +00005859}
5860
5861void CodeGeneratorARM64::GenerateReadBarrierSlow(HInstruction* instruction,
5862 Location out,
5863 Location ref,
5864 Location obj,
5865 uint32_t offset,
5866 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005867 DCHECK(kEmitCompilerReadBarrier);
5868
Roland Levillain44015862016-01-22 11:47:17 +00005869 // Insert a slow path based read barrier *after* the reference load.
5870 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005871 // If heap poisoning is enabled, the unpoisoning of the loaded
5872 // reference will be carried out by the runtime within the slow
5873 // path.
5874 //
5875 // Note that `ref` currently does not get unpoisoned (when heap
5876 // poisoning is enabled), which is alright as the `ref` argument is
5877 // not used by the artReadBarrierSlow entry point.
5878 //
5879 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
5880 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena())
5881 ReadBarrierForHeapReferenceSlowPathARM64(instruction, out, ref, obj, offset, index);
5882 AddSlowPath(slow_path);
5883
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005884 __ B(slow_path->GetEntryLabel());
5885 __ Bind(slow_path->GetExitLabel());
5886}
5887
Roland Levillain44015862016-01-22 11:47:17 +00005888void CodeGeneratorARM64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5889 Location out,
5890 Location ref,
5891 Location obj,
5892 uint32_t offset,
5893 Location index) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005894 if (kEmitCompilerReadBarrier) {
Roland Levillain44015862016-01-22 11:47:17 +00005895 // Baker's read barriers shall be handled by the fast path
5896 // (CodeGeneratorARM64::GenerateReferenceLoadWithBakerReadBarrier).
5897 DCHECK(!kUseBakerReadBarrier);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005898 // If heap poisoning is enabled, unpoisoning will be taken care of
5899 // by the runtime within the slow path.
Roland Levillain44015862016-01-22 11:47:17 +00005900 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005901 } else if (kPoisonHeapReferences) {
5902 GetAssembler()->UnpoisonHeapReference(WRegisterFrom(out));
5903 }
5904}
5905
Roland Levillain44015862016-01-22 11:47:17 +00005906void CodeGeneratorARM64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5907 Location out,
5908 Location root) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005909 DCHECK(kEmitCompilerReadBarrier);
5910
Roland Levillain44015862016-01-22 11:47:17 +00005911 // Insert a slow path based read barrier *after* the GC root load.
5912 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005913 // Note that GC roots are not affected by heap poisoning, so we do
5914 // not need to do anything special for this here.
5915 SlowPathCodeARM64* slow_path =
5916 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathARM64(instruction, out, root);
5917 AddSlowPath(slow_path);
5918
Roland Levillain22ccc3a2015-11-24 13:10:05 +00005919 __ B(slow_path->GetEntryLabel());
5920 __ Bind(slow_path->GetExitLabel());
5921}
5922
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005923void LocationsBuilderARM64::VisitClassTableGet(HClassTableGet* instruction) {
5924 LocationSummary* locations =
5925 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5926 locations->SetInAt(0, Location::RequiresRegister());
5927 locations->SetOut(Location::RequiresRegister());
5928}
5929
5930void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instruction) {
5931 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00005932 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005933 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005934 instruction->GetIndex(), kArm64PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005935 __ Ldr(XRegisterFrom(locations->Out()),
5936 MemOperand(XRegisterFrom(locations->InAt(0)), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005937 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005938 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005939 instruction->GetIndex(), kArm64PointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005940 __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)),
5941 mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value()));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01005942 __ Ldr(XRegisterFrom(locations->Out()),
5943 MemOperand(XRegisterFrom(locations->Out()), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005944 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005945}
5946
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005947static void PatchJitRootUse(uint8_t* code,
5948 const uint8_t* roots_data,
5949 vixl::aarch64::Literal<uint32_t>* literal,
5950 uint64_t index_in_table) {
5951 uint32_t literal_offset = literal->GetOffset();
5952 uintptr_t address =
5953 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
5954 uint8_t* data = code + literal_offset;
5955 reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
5956}
5957
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005958void CodeGeneratorARM64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
5959 for (const auto& entry : jit_string_patches_) {
5960 const auto& it = jit_string_roots_.find(entry.first);
5961 DCHECK(it != jit_string_roots_.end());
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00005962 PatchJitRootUse(code, roots_data, entry.second, it->second);
5963 }
5964 for (const auto& entry : jit_class_patches_) {
5965 const auto& it = jit_class_roots_.find(entry.first);
5966 DCHECK(it != jit_class_roots_.end());
5967 PatchJitRootUse(code, roots_data, entry.second, it->second);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00005968 }
5969}
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00005970
Alexandre Rames67555f72014-11-18 10:55:16 +00005971#undef __
5972#undef QUICK_ENTRY_POINT
5973
Alexandre Rames5319def2014-10-23 10:03:10 +01005974} // namespace arm64
5975} // namespace art