blob: 7201e5906094a6241ef46c92cb1c85a9b1be63bf [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM64_H_
18#define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM64_H_
19
Vladimir Markocac5a7e2016-02-22 10:39:50 +000020#include "arch/arm64/quick_method_frame_info_arm64.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010021#include "code_generator.h"
Calin Juravlee460d1d2015-09-29 04:52:17 +010022#include "common_arm64.h"
Serban Constantinescu02d81cc2015-01-05 16:08:49 +000023#include "dex/compiler_enums.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000024#include "driver/compiler_options.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010025#include "nodes.h"
26#include "parallel_move_resolver.h"
Mathieu Chartierdc00f182016-07-14 10:10:44 -070027#include "string_reference.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010028#include "utils/arm64/assembler_arm64.h"
Vladimir Markodbb7f5b2016-03-30 13:23:58 +010029#include "utils/type_reference.h"
Scott Wakeling97c72b72016-06-24 16:19:36 +010030
31// TODO: make vixl clean wrt -Wshadow.
32#pragma GCC diagnostic push
33#pragma GCC diagnostic ignored "-Wshadow"
34#include "a64/disasm-a64.h"
35#include "a64/macro-assembler-a64.h"
36#pragma GCC diagnostic pop
Alexandre Rames5319def2014-10-23 10:03:10 +010037
38namespace art {
39namespace arm64 {
40
41class CodeGeneratorARM64;
Andreas Gampe878d58c2015-01-15 23:24:00 -080042
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +000043// Use a local definition to prevent copying mistakes.
44static constexpr size_t kArm64WordSize = kArm64PointerSize;
45
Scott Wakeling97c72b72016-06-24 16:19:36 +010046static const vixl::aarch64::Register kParameterCoreRegisters[] = {
47 vixl::aarch64::x1,
48 vixl::aarch64::x2,
49 vixl::aarch64::x3,
50 vixl::aarch64::x4,
51 vixl::aarch64::x5,
52 vixl::aarch64::x6,
53 vixl::aarch64::x7
Alexandre Rames5319def2014-10-23 10:03:10 +010054};
55static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
Scott Wakeling97c72b72016-06-24 16:19:36 +010056static const vixl::aarch64::FPRegister kParameterFPRegisters[] = {
57 vixl::aarch64::d0,
58 vixl::aarch64::d1,
59 vixl::aarch64::d2,
60 vixl::aarch64::d3,
61 vixl::aarch64::d4,
62 vixl::aarch64::d5,
63 vixl::aarch64::d6,
64 vixl::aarch64::d7
Alexandre Rames5319def2014-10-23 10:03:10 +010065};
66static constexpr size_t kParameterFPRegistersLength = arraysize(kParameterFPRegisters);
67
Scott Wakeling97c72b72016-06-24 16:19:36 +010068// Thread Register
69const vixl::aarch64::Register tr = vixl::aarch64::x19;
70// Method register on invoke.
71static const vixl::aarch64::Register kArtMethodRegister = vixl::aarch64::x0;
72const vixl::aarch64::CPURegList vixl_reserved_core_registers(vixl::aarch64::ip0,
73 vixl::aarch64::ip1);
74const vixl::aarch64::CPURegList vixl_reserved_fp_registers(vixl::aarch64::d31);
Alexandre Rames5319def2014-10-23 10:03:10 +010075
Scott Wakeling97c72b72016-06-24 16:19:36 +010076const vixl::aarch64::CPURegList runtime_reserved_core_registers(tr, vixl::aarch64::lr);
Serban Constantinescu3d087de2015-01-28 11:57:05 +000077
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010078// Callee-saved registers AAPCS64 (without x19 - Thread Register)
Scott Wakeling97c72b72016-06-24 16:19:36 +010079const vixl::aarch64::CPURegList callee_saved_core_registers(vixl::aarch64::CPURegister::kRegister,
80 vixl::aarch64::kXRegSize,
81 vixl::aarch64::x20.GetCode(),
82 vixl::aarch64::x30.GetCode());
83const vixl::aarch64::CPURegList callee_saved_fp_registers(vixl::aarch64::CPURegister::kFPRegister,
84 vixl::aarch64::kDRegSize,
85 vixl::aarch64::d8.GetCode(),
86 vixl::aarch64::d15.GetCode());
Alexandre Ramesa89086e2014-11-07 17:13:25 +000087Location ARM64ReturnLocation(Primitive::Type return_type);
88
Andreas Gampe878d58c2015-01-15 23:24:00 -080089class SlowPathCodeARM64 : public SlowPathCode {
90 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000091 explicit SlowPathCodeARM64(HInstruction* instruction)
92 : SlowPathCode(instruction), entry_label_(), exit_label_() {}
Andreas Gampe878d58c2015-01-15 23:24:00 -080093
Scott Wakeling97c72b72016-06-24 16:19:36 +010094 vixl::aarch64::Label* GetEntryLabel() { return &entry_label_; }
95 vixl::aarch64::Label* GetExitLabel() { return &exit_label_; }
Andreas Gampe878d58c2015-01-15 23:24:00 -080096
Zheng Xuda403092015-04-24 17:35:39 +080097 void SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) OVERRIDE;
98 void RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations) OVERRIDE;
99
Andreas Gampe878d58c2015-01-15 23:24:00 -0800100 private:
Scott Wakeling97c72b72016-06-24 16:19:36 +0100101 vixl::aarch64::Label entry_label_;
102 vixl::aarch64::Label exit_label_;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800103
104 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
105};
106
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100107class JumpTableARM64 : public DeletableArenaObject<kArenaAllocSwitchTable> {
Zheng Xu3927c8b2015-11-18 17:46:25 +0800108 public:
109 explicit JumpTableARM64(HPackedSwitch* switch_instr)
110 : switch_instr_(switch_instr), table_start_() {}
111
Scott Wakeling97c72b72016-06-24 16:19:36 +0100112 vixl::aarch64::Label* GetTableStartLabel() { return &table_start_; }
Zheng Xu3927c8b2015-11-18 17:46:25 +0800113
114 void EmitTable(CodeGeneratorARM64* codegen);
115
116 private:
117 HPackedSwitch* const switch_instr_;
Scott Wakeling97c72b72016-06-24 16:19:36 +0100118 vixl::aarch64::Label table_start_;
Zheng Xu3927c8b2015-11-18 17:46:25 +0800119
120 DISALLOW_COPY_AND_ASSIGN(JumpTableARM64);
121};
122
Scott Wakeling97c72b72016-06-24 16:19:36 +0100123static const vixl::aarch64::Register kRuntimeParameterCoreRegisters[] =
124 { vixl::aarch64::x0,
125 vixl::aarch64::x1,
126 vixl::aarch64::x2,
127 vixl::aarch64::x3,
128 vixl::aarch64::x4,
129 vixl::aarch64::x5,
130 vixl::aarch64::x6,
131 vixl::aarch64::x7 };
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000132static constexpr size_t kRuntimeParameterCoreRegistersLength =
133 arraysize(kRuntimeParameterCoreRegisters);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100134static const vixl::aarch64::FPRegister kRuntimeParameterFpuRegisters[] =
135 { vixl::aarch64::d0,
136 vixl::aarch64::d1,
137 vixl::aarch64::d2,
138 vixl::aarch64::d3,
139 vixl::aarch64::d4,
140 vixl::aarch64::d5,
141 vixl::aarch64::d6,
142 vixl::aarch64::d7 };
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000143static constexpr size_t kRuntimeParameterFpuRegistersLength =
144 arraysize(kRuntimeParameterCoreRegisters);
145
Scott Wakeling97c72b72016-06-24 16:19:36 +0100146class InvokeRuntimeCallingConvention : public CallingConvention<vixl::aarch64::Register,
147 vixl::aarch64::FPRegister> {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000148 public:
149 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
150
151 InvokeRuntimeCallingConvention()
152 : CallingConvention(kRuntimeParameterCoreRegisters,
153 kRuntimeParameterCoreRegistersLength,
154 kRuntimeParameterFpuRegisters,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700155 kRuntimeParameterFpuRegistersLength,
156 kArm64PointerSize) {}
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000157
158 Location GetReturnLocation(Primitive::Type return_type);
159
160 private:
161 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
162};
163
Scott Wakeling97c72b72016-06-24 16:19:36 +0100164class InvokeDexCallingConvention : public CallingConvention<vixl::aarch64::Register,
165 vixl::aarch64::FPRegister> {
Alexandre Rames5319def2014-10-23 10:03:10 +0100166 public:
167 InvokeDexCallingConvention()
168 : CallingConvention(kParameterCoreRegisters,
169 kParameterCoreRegistersLength,
170 kParameterFPRegisters,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700171 kParameterFPRegistersLength,
172 kArm64PointerSize) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100173
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100174 Location GetReturnLocation(Primitive::Type return_type) const {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000175 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100176 }
177
178
179 private:
180 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConvention);
181};
182
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100183class InvokeDexCallingConventionVisitorARM64 : public InvokeDexCallingConventionVisitor {
Alexandre Rames5319def2014-10-23 10:03:10 +0100184 public:
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100185 InvokeDexCallingConventionVisitorARM64() {}
186 virtual ~InvokeDexCallingConventionVisitorARM64() {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100187
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100188 Location GetNextLocation(Primitive::Type type) OVERRIDE;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100189 Location GetReturnLocation(Primitive::Type return_type) const OVERRIDE {
Alexandre Rames5319def2014-10-23 10:03:10 +0100190 return calling_convention.GetReturnLocation(return_type);
191 }
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100192 Location GetMethodLocation() const OVERRIDE;
Alexandre Rames5319def2014-10-23 10:03:10 +0100193
194 private:
195 InvokeDexCallingConvention calling_convention;
Alexandre Rames5319def2014-10-23 10:03:10 +0100196
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100197 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitorARM64);
Alexandre Rames5319def2014-10-23 10:03:10 +0100198};
199
Calin Juravlee460d1d2015-09-29 04:52:17 +0100200class FieldAccessCallingConventionARM64 : public FieldAccessCallingConvention {
201 public:
202 FieldAccessCallingConventionARM64() {}
203
204 Location GetObjectLocation() const OVERRIDE {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100205 return helpers::LocationFrom(vixl::aarch64::x1);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100206 }
207 Location GetFieldIndexLocation() const OVERRIDE {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100208 return helpers::LocationFrom(vixl::aarch64::x0);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100209 }
210 Location GetReturnLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100211 return helpers::LocationFrom(vixl::aarch64::x0);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100212 }
213 Location GetSetValueLocation(Primitive::Type type, bool is_instance) const OVERRIDE {
214 return Primitive::Is64BitType(type)
Scott Wakeling97c72b72016-06-24 16:19:36 +0100215 ? helpers::LocationFrom(vixl::aarch64::x2)
Calin Juravlee460d1d2015-09-29 04:52:17 +0100216 : (is_instance
Scott Wakeling97c72b72016-06-24 16:19:36 +0100217 ? helpers::LocationFrom(vixl::aarch64::x2)
218 : helpers::LocationFrom(vixl::aarch64::x1));
Calin Juravlee460d1d2015-09-29 04:52:17 +0100219 }
220 Location GetFpuLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100221 return helpers::LocationFrom(vixl::aarch64::d0);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100222 }
223
224 private:
225 DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConventionARM64);
226};
227
Aart Bik42249c32016-01-07 15:33:50 -0800228class InstructionCodeGeneratorARM64 : public InstructionCodeGenerator {
Alexandre Rames5319def2014-10-23 10:03:10 +0100229 public:
230 InstructionCodeGeneratorARM64(HGraph* graph, CodeGeneratorARM64* codegen);
231
232#define DECLARE_VISIT_INSTRUCTION(name, super) \
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000233 void Visit##name(H##name* instr) OVERRIDE;
Alexandre Ramesef20f712015-06-09 10:29:30 +0100234
235 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
236 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(DECLARE_VISIT_INSTRUCTION)
Artem Udovichenko4a0dad62016-01-26 12:28:31 +0300237 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(DECLARE_VISIT_INSTRUCTION)
Alexandre Ramesef20f712015-06-09 10:29:30 +0100238
Alexandre Rames5319def2014-10-23 10:03:10 +0100239#undef DECLARE_VISIT_INSTRUCTION
240
Alexandre Ramesef20f712015-06-09 10:29:30 +0100241 void VisitInstruction(HInstruction* instruction) OVERRIDE {
242 LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
243 << " (id " << instruction->GetId() << ")";
244 }
245
Alexandre Rames5319def2014-10-23 10:03:10 +0100246 Arm64Assembler* GetAssembler() const { return assembler_; }
Scott Wakeling97c72b72016-06-24 16:19:36 +0100247 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->vixl_masm_; }
Alexandre Rames5319def2014-10-23 10:03:10 +0100248
249 private:
Scott Wakeling97c72b72016-06-24 16:19:36 +0100250 void GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
251 vixl::aarch64::Register class_reg);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000252 void GenerateSuspendCheck(HSuspendCheck* instruction, HBasicBlock* successor);
Alexandre Rames67555f72014-11-18 10:55:16 +0000253 void HandleBinaryOp(HBinaryOperation* instr);
Roland Levillain44015862016-01-22 11:47:17 +0000254
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100255 void HandleFieldSet(HInstruction* instruction,
256 const FieldInfo& field_info,
257 bool value_can_be_null);
Alexandre Rames09a99962015-04-15 11:47:56 +0100258 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +0000259 void HandleCondition(HCondition* instruction);
Roland Levillain44015862016-01-22 11:47:17 +0000260
261 // Generate a heap reference load using one register `out`:
262 //
263 // out <- *(out + offset)
264 //
265 // while honoring heap poisoning and/or read barriers (if any).
266 //
267 // Location `maybe_temp` is used when generating a read barrier and
268 // shall be a register in that case; it may be an invalid location
269 // otherwise.
270 void GenerateReferenceLoadOneRegister(HInstruction* instruction,
271 Location out,
272 uint32_t offset,
273 Location maybe_temp);
274 // Generate a heap reference load using two different registers
275 // `out` and `obj`:
276 //
277 // out <- *(obj + offset)
278 //
279 // while honoring heap poisoning and/or read barriers (if any).
280 //
281 // Location `maybe_temp` is used when generating a Baker's (fast
282 // path) read barrier and shall be a register in that case; it may
283 // be an invalid location otherwise.
284 void GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
285 Location out,
286 Location obj,
287 uint32_t offset,
288 Location maybe_temp);
289 // Generate a GC root reference load:
290 //
291 // root <- *(obj + offset)
292 //
293 // while honoring read barriers (if any).
294 void GenerateGcRootFieldLoad(HInstruction* instruction,
295 Location root,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100296 vixl::aarch64::Register obj,
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000297 uint32_t offset,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100298 vixl::aarch64::Label* fixup_label = nullptr);
Roland Levillain44015862016-01-22 11:47:17 +0000299
Roland Levillain1a653882016-03-18 18:05:57 +0000300 // Generate a floating-point comparison.
301 void GenerateFcmp(HInstruction* instruction);
302
Serban Constantinescu02164b32014-11-13 14:05:07 +0000303 void HandleShift(HBinaryOperation* instr);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700304 void GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +0000305 size_t condition_input_index,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100306 vixl::aarch64::Label* true_target,
307 vixl::aarch64::Label* false_target);
Zheng Xuc6667102015-05-15 16:08:45 +0800308 void DivRemOneOrMinusOne(HBinaryOperation* instruction);
309 void DivRemByPowerOfTwo(HBinaryOperation* instruction);
310 void GenerateDivRemWithAnyConstant(HBinaryOperation* instruction);
311 void GenerateDivRemIntegral(HBinaryOperation* instruction);
David Brazdilfc6a86a2015-06-26 10:33:45 +0000312 void HandleGoto(HInstruction* got, HBasicBlock* successor);
Alexandre Rames5319def2014-10-23 10:03:10 +0100313
314 Arm64Assembler* const assembler_;
315 CodeGeneratorARM64* const codegen_;
316
317 DISALLOW_COPY_AND_ASSIGN(InstructionCodeGeneratorARM64);
318};
319
320class LocationsBuilderARM64 : public HGraphVisitor {
321 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100322 LocationsBuilderARM64(HGraph* graph, CodeGeneratorARM64* codegen)
Alexandre Rames5319def2014-10-23 10:03:10 +0100323 : HGraphVisitor(graph), codegen_(codegen) {}
324
325#define DECLARE_VISIT_INSTRUCTION(name, super) \
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000326 void Visit##name(H##name* instr) OVERRIDE;
Alexandre Ramesef20f712015-06-09 10:29:30 +0100327
328 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
329 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(DECLARE_VISIT_INSTRUCTION)
Artem Udovichenko4a0dad62016-01-26 12:28:31 +0300330 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(DECLARE_VISIT_INSTRUCTION)
Alexandre Ramesef20f712015-06-09 10:29:30 +0100331
Alexandre Rames5319def2014-10-23 10:03:10 +0100332#undef DECLARE_VISIT_INSTRUCTION
333
Alexandre Ramesef20f712015-06-09 10:29:30 +0100334 void VisitInstruction(HInstruction* instruction) OVERRIDE {
335 LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
336 << " (id " << instruction->GetId() << ")";
337 }
338
Alexandre Rames5319def2014-10-23 10:03:10 +0100339 private:
Alexandre Rames67555f72014-11-18 10:55:16 +0000340 void HandleBinaryOp(HBinaryOperation* instr);
Alexandre Rames09a99962015-04-15 11:47:56 +0100341 void HandleFieldSet(HInstruction* instruction);
342 void HandleFieldGet(HInstruction* instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +0100343 void HandleInvoke(HInvoke* instr);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +0000344 void HandleCondition(HCondition* instruction);
Alexandre Rames09a99962015-04-15 11:47:56 +0100345 void HandleShift(HBinaryOperation* instr);
Alexandre Rames5319def2014-10-23 10:03:10 +0100346
347 CodeGeneratorARM64* const codegen_;
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100348 InvokeDexCallingConventionVisitorARM64 parameter_visitor_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100349
350 DISALLOW_COPY_AND_ASSIGN(LocationsBuilderARM64);
351};
352
Zheng Xuad4450e2015-04-17 18:48:56 +0800353class ParallelMoveResolverARM64 : public ParallelMoveResolverNoSwap {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000354 public:
355 ParallelMoveResolverARM64(ArenaAllocator* allocator, CodeGeneratorARM64* codegen)
Zheng Xuad4450e2015-04-17 18:48:56 +0800356 : ParallelMoveResolverNoSwap(allocator), codegen_(codegen), vixl_temps_() {}
Alexandre Rames3e69f162014-12-10 10:36:50 +0000357
Zheng Xuad4450e2015-04-17 18:48:56 +0800358 protected:
359 void PrepareForEmitNativeCode() OVERRIDE;
360 void FinishEmitNativeCode() OVERRIDE;
361 Location AllocateScratchLocationFor(Location::Kind kind) OVERRIDE;
362 void FreeScratchLocation(Location loc) OVERRIDE;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000363 void EmitMove(size_t index) OVERRIDE;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000364
365 private:
366 Arm64Assembler* GetAssembler() const;
Scott Wakeling97c72b72016-06-24 16:19:36 +0100367 vixl::aarch64::MacroAssembler* GetVIXLAssembler() const {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000368 return GetAssembler()->vixl_masm_;
369 }
370
371 CodeGeneratorARM64* const codegen_;
Scott Wakeling97c72b72016-06-24 16:19:36 +0100372 vixl::aarch64::UseScratchRegisterScope vixl_temps_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000373
374 DISALLOW_COPY_AND_ASSIGN(ParallelMoveResolverARM64);
375};
376
Alexandre Rames5319def2014-10-23 10:03:10 +0100377class CodeGeneratorARM64 : public CodeGenerator {
378 public:
Serban Constantinescu579885a2015-02-22 20:51:33 +0000379 CodeGeneratorARM64(HGraph* graph,
380 const Arm64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100381 const CompilerOptions& compiler_options,
382 OptimizingCompilerStats* stats = nullptr);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000383 virtual ~CodeGeneratorARM64() {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100384
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000385 void GenerateFrameEntry() OVERRIDE;
386 void GenerateFrameExit() OVERRIDE;
Alexandre Rames5319def2014-10-23 10:03:10 +0100387
Scott Wakeling97c72b72016-06-24 16:19:36 +0100388 vixl::aarch64::CPURegList GetFramePreservedCoreRegisters() const;
389 vixl::aarch64::CPURegList GetFramePreservedFPRegisters() const;
Alexandre Rames5319def2014-10-23 10:03:10 +0100390
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000391 void Bind(HBasicBlock* block) OVERRIDE;
Alexandre Rames5319def2014-10-23 10:03:10 +0100392
Scott Wakeling97c72b72016-06-24 16:19:36 +0100393 vixl::aarch64::Label* GetLabelOf(HBasicBlock* block) {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100394 block = FirstNonEmptyBlock(block);
395 return &(block_labels_[block->GetBlockId()]);
Alexandre Rames5319def2014-10-23 10:03:10 +0100396 }
397
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000398 size_t GetWordSize() const OVERRIDE {
Alexandre Rames5319def2014-10-23 10:03:10 +0100399 return kArm64WordSize;
400 }
401
Mark Mendellf85a9ca2015-01-13 09:20:58 -0500402 size_t GetFloatingPointSpillSlotSize() const OVERRIDE {
403 // Allocated in D registers, which are word sized.
404 return kArm64WordSize;
405 }
406
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100407 uintptr_t GetAddressOf(HBasicBlock* block) OVERRIDE {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100408 vixl::aarch64::Label* block_entry_label = GetLabelOf(block);
Alexandre Rames67555f72014-11-18 10:55:16 +0000409 DCHECK(block_entry_label->IsBound());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100410 return block_entry_label->GetLocation();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000411 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100412
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000413 HGraphVisitor* GetLocationBuilder() OVERRIDE { return &location_builder_; }
414 HGraphVisitor* GetInstructionVisitor() OVERRIDE { return &instruction_visitor_; }
415 Arm64Assembler* GetAssembler() OVERRIDE { return &assembler_; }
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100416 const Arm64Assembler& GetAssembler() const OVERRIDE { return assembler_; }
Scott Wakeling97c72b72016-06-24 16:19:36 +0100417 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->vixl_masm_; }
Alexandre Rames5319def2014-10-23 10:03:10 +0100418
419 // Emit a write barrier.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100420 void MarkGCCard(vixl::aarch64::Register object,
421 vixl::aarch64::Register value,
422 bool value_can_be_null);
Alexandre Rames5319def2014-10-23 10:03:10 +0100423
Roland Levillain44015862016-01-22 11:47:17 +0000424 void GenerateMemoryBarrier(MemBarrierKind kind);
425
Alexandre Rames5319def2014-10-23 10:03:10 +0100426 // Register allocation.
427
David Brazdil58282f42016-01-14 12:45:10 +0000428 void SetupBlockedRegisters() const OVERRIDE;
Alexandre Rames5319def2014-10-23 10:03:10 +0100429
Zheng Xuda403092015-04-24 17:35:39 +0800430 size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
431 size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
432 size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
433 size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
Alexandre Rames5319def2014-10-23 10:03:10 +0100434
435 // The number of registers that can be allocated. The register allocator may
436 // decide to reserve and not use a few of them.
437 // We do not consider registers sp, xzr, wzr. They are either not allocatable
438 // (xzr, wzr), or make for poor allocatable registers (sp alignment
439 // requirements, etc.). This also facilitates our task as all other registers
440 // can easily be mapped via to or from their type and index or code.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100441 static const int kNumberOfAllocatableRegisters = vixl::aarch64::kNumberOfRegisters - 1;
442 static const int kNumberOfAllocatableFPRegisters = vixl::aarch64::kNumberOfFPRegisters;
Alexandre Rames5319def2014-10-23 10:03:10 +0100443 static constexpr int kNumberOfAllocatableRegisterPairs = 0;
444
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000445 void DumpCoreRegister(std::ostream& stream, int reg) const OVERRIDE;
446 void DumpFloatingPointRegister(std::ostream& stream, int reg) const OVERRIDE;
Alexandre Rames5319def2014-10-23 10:03:10 +0100447
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000448 InstructionSet GetInstructionSet() const OVERRIDE {
Alexandre Rames5319def2014-10-23 10:03:10 +0100449 return InstructionSet::kArm64;
450 }
451
Serban Constantinescu579885a2015-02-22 20:51:33 +0000452 const Arm64InstructionSetFeatures& GetInstructionSetFeatures() const {
453 return isa_features_;
454 }
455
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000456 void Initialize() OVERRIDE {
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100457 block_labels_.resize(GetGraph()->GetBlocks().size());
Alexandre Rames5319def2014-10-23 10:03:10 +0100458 }
459
Alexandre Rames68bd9b92016-07-15 17:41:13 +0100460 // We want to use the STP and LDP instructions to spill and restore registers for slow paths.
461 // These instructions can only encode offsets that are multiples of the register size accessed.
Roland Levillain71280fc2016-07-18 16:03:05 +0100462 uint32_t GetPreferredSlotsAlignment() const OVERRIDE { return vixl::aarch64::kXRegSizeInBytes; }
Alexandre Rames68bd9b92016-07-15 17:41:13 +0100463
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100464 JumpTableARM64* CreateJumpTable(HPackedSwitch* switch_instr) {
465 jump_tables_.emplace_back(new (GetGraph()->GetArena()) JumpTableARM64(switch_instr));
466 return jump_tables_.back().get();
Zheng Xu3927c8b2015-11-18 17:46:25 +0800467 }
468
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000469 void Finalize(CodeAllocator* allocator) OVERRIDE;
470
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000471 // Code generation helpers.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100472 void MoveConstant(vixl::aarch64::CPURegister destination, HConstant* constant);
Calin Juravle175dc732015-08-25 15:42:32 +0100473 void MoveConstant(Location destination, int32_t value) OVERRIDE;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100474 void MoveLocation(Location dst, Location src, Primitive::Type dst_type) OVERRIDE;
475 void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE;
476
Scott Wakeling97c72b72016-06-24 16:19:36 +0100477 void Load(Primitive::Type type,
478 vixl::aarch64::CPURegister dst,
479 const vixl::aarch64::MemOperand& src);
480 void Store(Primitive::Type type,
481 vixl::aarch64::CPURegister src,
482 const vixl::aarch64::MemOperand& dst);
Roland Levillain44015862016-01-22 11:47:17 +0000483 void LoadAcquire(HInstruction* instruction,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100484 vixl::aarch64::CPURegister dst,
485 const vixl::aarch64::MemOperand& src,
Roland Levillain44015862016-01-22 11:47:17 +0000486 bool needs_null_check);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100487 void StoreRelease(Primitive::Type type,
488 vixl::aarch64::CPURegister src,
489 const vixl::aarch64::MemOperand& dst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000490
491 // Generate code to invoke a runtime entry point.
Calin Juravle175dc732015-08-25 15:42:32 +0100492 void InvokeRuntime(QuickEntrypointEnum entrypoint,
493 HInstruction* instruction,
494 uint32_t dex_pc,
495 SlowPathCode* slow_path) OVERRIDE;
496
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000497 void InvokeRuntime(int32_t offset,
498 HInstruction* instruction,
499 uint32_t dex_pc,
500 SlowPathCode* slow_path);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000501
Alexandre Ramese6dbf482015-10-19 10:10:41 +0100502 ParallelMoveResolverARM64* GetMoveResolver() OVERRIDE { return &move_resolver_; }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000503
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000504 bool NeedsTwoRegisters(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
505 return false;
506 }
507
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000508 // Check if the desired_string_load_kind is supported. If it is, return it,
509 // otherwise return a fall-back kind that should be used instead.
510 HLoadString::LoadKind GetSupportedLoadStringKind(
511 HLoadString::LoadKind desired_string_load_kind) OVERRIDE;
512
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100513 // Check if the desired_class_load_kind is supported. If it is, return it,
514 // otherwise return a fall-back kind that should be used instead.
515 HLoadClass::LoadKind GetSupportedLoadClassKind(
516 HLoadClass::LoadKind desired_class_load_kind) OVERRIDE;
517
Vladimir Markodc151b22015-10-15 18:02:30 +0100518 // Check if the desired_dispatch_info is supported. If it is, return it,
519 // otherwise return a fall-back info that should be used instead.
520 HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
521 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
522 MethodReference target_method) OVERRIDE;
523
Andreas Gampe85b62f22015-09-09 13:15:38 -0700524 void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) OVERRIDE;
525 void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) OVERRIDE;
526
527 void MoveFromReturnRegister(Location trg ATTRIBUTE_UNUSED,
528 Primitive::Type type ATTRIBUTE_UNUSED) OVERRIDE {
529 UNIMPLEMENTED(FATAL);
530 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800531
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000532 // Add a new PC-relative string patch for an instruction and return the label
533 // to be bound before the instruction. The instruction will be either the
534 // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing
535 // to the associated ADRP patch label).
Scott Wakeling97c72b72016-06-24 16:19:36 +0100536 vixl::aarch64::Label* NewPcRelativeStringPatch(const DexFile& dex_file,
537 uint32_t string_index,
538 vixl::aarch64::Label* adrp_label = nullptr);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000539
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100540 // Add a new PC-relative type patch for an instruction and return the label
541 // to be bound before the instruction. The instruction will be either the
542 // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing
543 // to the associated ADRP patch label).
Scott Wakeling97c72b72016-06-24 16:19:36 +0100544 vixl::aarch64::Label* NewPcRelativeTypePatch(const DexFile& dex_file,
545 uint32_t type_index,
546 vixl::aarch64::Label* adrp_label = nullptr);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100547
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000548 // Add a new PC-relative dex cache array patch for an instruction and return
549 // the label to be bound before the instruction. The instruction will be
550 // either the ADRP (pass `adrp_label = null`) or the LDR (pass `adrp_label`
551 // pointing to the associated ADRP patch label).
Scott Wakeling97c72b72016-06-24 16:19:36 +0100552 vixl::aarch64::Label* NewPcRelativeDexCacheArrayPatch(
553 const DexFile& dex_file,
554 uint32_t element_offset,
555 vixl::aarch64::Label* adrp_label = nullptr);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000556
Scott Wakeling97c72b72016-06-24 16:19:36 +0100557 vixl::aarch64::Literal<uint32_t>* DeduplicateBootImageStringLiteral(const DexFile& dex_file,
558 uint32_t string_index);
559 vixl::aarch64::Literal<uint32_t>* DeduplicateBootImageTypeLiteral(const DexFile& dex_file,
560 uint32_t type_index);
561 vixl::aarch64::Literal<uint32_t>* DeduplicateBootImageAddressLiteral(uint64_t address);
562 vixl::aarch64::Literal<uint64_t>* DeduplicateDexCacheAddressLiteral(uint64_t address);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000563
Vladimir Marko58155012015-08-19 12:49:41 +0000564 void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
565
Roland Levillain44015862016-01-22 11:47:17 +0000566 // Fast path implementation of ReadBarrier::Barrier for a heap
567 // reference field load when Baker's read barriers are used.
568 void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
569 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100570 vixl::aarch64::Register obj,
Roland Levillain44015862016-01-22 11:47:17 +0000571 uint32_t offset,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100572 vixl::aarch64::Register temp,
Roland Levillain44015862016-01-22 11:47:17 +0000573 bool needs_null_check,
574 bool use_load_acquire);
575 // Fast path implementation of ReadBarrier::Barrier for a heap
576 // reference array load when Baker's read barriers are used.
577 void GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
578 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100579 vixl::aarch64::Register obj,
Roland Levillain44015862016-01-22 11:47:17 +0000580 uint32_t data_offset,
581 Location index,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100582 vixl::aarch64::Register temp,
Roland Levillain44015862016-01-22 11:47:17 +0000583 bool needs_null_check);
Roland Levillainbfea3352016-06-23 13:48:47 +0100584 // Factored implementation used by GenerateFieldLoadWithBakerReadBarrier
585 // and GenerateArrayLoadWithBakerReadBarrier.
586 void GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
587 Location ref,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100588 vixl::aarch64::Register obj,
Roland Levillainbfea3352016-06-23 13:48:47 +0100589 uint32_t offset,
590 Location index,
591 size_t scale_factor,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100592 vixl::aarch64::Register temp,
Roland Levillainbfea3352016-06-23 13:48:47 +0100593 bool needs_null_check,
594 bool use_load_acquire);
Roland Levillain44015862016-01-22 11:47:17 +0000595
596 // Generate a read barrier for a heap reference within `instruction`
597 // using a slow path.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000598 //
599 // A read barrier for an object reference read from the heap is
600 // implemented as a call to the artReadBarrierSlow runtime entry
601 // point, which is passed the values in locations `ref`, `obj`, and
602 // `offset`:
603 //
604 // mirror::Object* artReadBarrierSlow(mirror::Object* ref,
605 // mirror::Object* obj,
606 // uint32_t offset);
607 //
608 // The `out` location contains the value returned by
609 // artReadBarrierSlow.
610 //
611 // When `index` is provided (i.e. for array accesses), the offset
612 // value passed to artReadBarrierSlow is adjusted to take `index`
613 // into account.
Roland Levillain44015862016-01-22 11:47:17 +0000614 void GenerateReadBarrierSlow(HInstruction* instruction,
615 Location out,
616 Location ref,
617 Location obj,
618 uint32_t offset,
619 Location index = Location::NoLocation());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000620
Roland Levillain44015862016-01-22 11:47:17 +0000621 // If read barriers are enabled, generate a read barrier for a heap
622 // reference using a slow path. If heap poisoning is enabled, also
623 // unpoison the reference in `out`.
624 void MaybeGenerateReadBarrierSlow(HInstruction* instruction,
625 Location out,
626 Location ref,
627 Location obj,
628 uint32_t offset,
629 Location index = Location::NoLocation());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000630
Roland Levillain44015862016-01-22 11:47:17 +0000631 // Generate a read barrier for a GC root within `instruction` using
632 // a slow path.
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000633 //
634 // A read barrier for an object reference GC root is implemented as
635 // a call to the artReadBarrierForRootSlow runtime entry point,
636 // which is passed the value in location `root`:
637 //
638 // mirror::Object* artReadBarrierForRootSlow(GcRoot<mirror::Object>* root);
639 //
640 // The `out` location contains the value returned by
641 // artReadBarrierForRootSlow.
Roland Levillain44015862016-01-22 11:47:17 +0000642 void GenerateReadBarrierForRootSlow(HInstruction* instruction, Location out, Location root);
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000643
David Srbeckyc7098ff2016-02-09 14:30:11 +0000644 void GenerateNop();
645
Calin Juravle2ae48182016-03-16 14:05:09 +0000646 void GenerateImplicitNullCheck(HNullCheck* instruction);
647 void GenerateExplicitNullCheck(HNullCheck* instruction);
648
Alexandre Rames5319def2014-10-23 10:03:10 +0100649 private:
Scott Wakeling97c72b72016-06-24 16:19:36 +0100650 using Uint64ToLiteralMap = ArenaSafeMap<uint64_t, vixl::aarch64::Literal<uint64_t>*>;
651 using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, vixl::aarch64::Literal<uint32_t>*>;
Vladimir Marko58155012015-08-19 12:49:41 +0000652 using MethodToLiteralMap = ArenaSafeMap<MethodReference,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100653 vixl::aarch64::Literal<uint64_t>*,
Vladimir Marko58155012015-08-19 12:49:41 +0000654 MethodReferenceComparator>;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000655 using BootStringToLiteralMap = ArenaSafeMap<StringReference,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100656 vixl::aarch64::Literal<uint32_t>*,
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000657 StringReferenceValueComparator>;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100658 using BootTypeToLiteralMap = ArenaSafeMap<TypeReference,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100659 vixl::aarch64::Literal<uint32_t>*,
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100660 TypeReferenceValueComparator>;
Vladimir Marko58155012015-08-19 12:49:41 +0000661
Scott Wakeling97c72b72016-06-24 16:19:36 +0100662 vixl::aarch64::Literal<uint32_t>* DeduplicateUint32Literal(uint32_t value,
663 Uint32ToLiteralMap* map);
664 vixl::aarch64::Literal<uint64_t>* DeduplicateUint64Literal(uint64_t value);
665 vixl::aarch64::Literal<uint64_t>* DeduplicateMethodLiteral(MethodReference target_method,
666 MethodToLiteralMap* map);
667 vixl::aarch64::Literal<uint64_t>* DeduplicateMethodAddressLiteral(MethodReference target_method);
668 vixl::aarch64::Literal<uint64_t>* DeduplicateMethodCodeLiteral(MethodReference target_method);
Vladimir Marko58155012015-08-19 12:49:41 +0000669
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000670 // The PcRelativePatchInfo is used for PC-relative addressing of dex cache arrays
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100671 // and boot image strings/types. The only difference is the interpretation of the
672 // offset_or_index.
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000673 struct PcRelativePatchInfo {
674 PcRelativePatchInfo(const DexFile& dex_file, uint32_t off_or_idx)
675 : target_dex_file(dex_file), offset_or_index(off_or_idx), label(), pc_insn_label() { }
Vladimir Marko58155012015-08-19 12:49:41 +0000676
677 const DexFile& target_dex_file;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100678 // Either the dex cache array element offset or the string/type index.
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000679 uint32_t offset_or_index;
Scott Wakeling97c72b72016-06-24 16:19:36 +0100680 vixl::aarch64::Label label;
681 vixl::aarch64::Label* pc_insn_label;
Vladimir Marko58155012015-08-19 12:49:41 +0000682 };
683
Scott Wakeling97c72b72016-06-24 16:19:36 +0100684 vixl::aarch64::Label* NewPcRelativePatch(const DexFile& dex_file,
685 uint32_t offset_or_index,
686 vixl::aarch64::Label* adrp_label,
687 ArenaDeque<PcRelativePatchInfo>* patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000688
Zheng Xu3927c8b2015-11-18 17:46:25 +0800689 void EmitJumpTables();
690
Alexandre Rames5319def2014-10-23 10:03:10 +0100691 // Labels for each block that will be compiled.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100692 // We use a deque so that the `vixl::aarch64::Label` objects do not move in memory.
693 ArenaDeque<vixl::aarch64::Label> block_labels_; // Indexed by block id.
694 vixl::aarch64::Label frame_entry_label_;
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100695 ArenaVector<std::unique_ptr<JumpTableARM64>> jump_tables_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100696
697 LocationsBuilderARM64 location_builder_;
698 InstructionCodeGeneratorARM64 instruction_visitor_;
Alexandre Rames3e69f162014-12-10 10:36:50 +0000699 ParallelMoveResolverARM64 move_resolver_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100700 Arm64Assembler assembler_;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000701 const Arm64InstructionSetFeatures& isa_features_;
Alexandre Rames5319def2014-10-23 10:03:10 +0100702
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000703 // Deduplication map for 32-bit literals, used for non-patchable boot image addresses.
704 Uint32ToLiteralMap uint32_literals_;
705 // Deduplication map for 64-bit literals, used for non-patchable method address, method code
706 // or string dex cache address.
Vladimir Marko58155012015-08-19 12:49:41 +0000707 Uint64ToLiteralMap uint64_literals_;
708 // Method patch info, map MethodReference to a literal for method address and method code.
709 MethodToLiteralMap method_patches_;
710 MethodToLiteralMap call_patches_;
711 // Relative call patch info.
712 // Using ArenaDeque<> which retains element addresses on push/emplace_back().
Scott Wakeling97c72b72016-06-24 16:19:36 +0100713 ArenaDeque<MethodPatchInfo<vixl::aarch64::Label>> relative_call_patches_;
Vladimir Marko58155012015-08-19 12:49:41 +0000714 // PC-relative DexCache access info.
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000715 ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_;
716 // Deduplication map for boot string literals for kBootImageLinkTimeAddress.
717 BootStringToLiteralMap boot_image_string_patches_;
718 // PC-relative String patch info.
719 ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100720 // Deduplication map for boot type literals for kBootImageLinkTimeAddress.
721 BootTypeToLiteralMap boot_image_type_patches_;
722 // PC-relative type patch info.
723 ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000724 // Deduplication map for patchable boot image addresses.
725 Uint32ToLiteralMap boot_image_address_patches_;
Vladimir Marko58155012015-08-19 12:49:41 +0000726
Alexandre Rames5319def2014-10-23 10:03:10 +0100727 DISALLOW_COPY_AND_ASSIGN(CodeGeneratorARM64);
728};
729
Alexandre Rames3e69f162014-12-10 10:36:50 +0000730inline Arm64Assembler* ParallelMoveResolverARM64::GetAssembler() const {
731 return codegen_->GetAssembler();
732}
733
Alexandre Rames5319def2014-10-23 10:03:10 +0100734} // namespace arm64
735} // namespace art
736
737#endif // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_ARM64_H_