blob: eade05d7b655cb1af87220d6d55f397786df4a00 [file] [log] [blame]
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_H_
18#define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_H_
19
Ian Rogersd582fa42014-11-05 23:46:43 -080020#include "arch/instruction_set.h"
Calin Juravle34166012014-12-19 17:22:29 +000021#include "arch/instruction_set_features.h"
Vladimir Markof9f64412015-09-02 14:05:49 +010022#include "base/arena_containers.h"
23#include "base/arena_object.h"
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010024#include "base/bit_field.h"
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +000025#include "compiled_method.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000026#include "driver/compiler_options.h"
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +000027#include "globals.h"
Alexandre Rameseb7b7392015-06-19 14:47:01 +010028#include "graph_visualizer.h"
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +010029#include "locations.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000030#include "memory_region.h"
31#include "nodes.h"
Serban Constantinescuecc43662015-08-13 13:33:12 +010032#include "optimizing_compiler_stats.h"
Nicolas Geoffray39468442014-09-02 15:17:15 +010033#include "stack_map_stream.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070034#include "utils/label.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000035
36namespace art {
37
Roland Levillain6d0e4832014-11-27 18:31:21 +000038// Binary encoding of 2^32 for type double.
39static int64_t constexpr k2Pow32EncodingForDouble = INT64_C(0x41F0000000000000);
40// Binary encoding of 2^31 for type double.
41static int64_t constexpr k2Pow31EncodingForDouble = INT64_C(0x41E0000000000000);
42
Mark Mendelle82549b2015-05-06 10:55:34 -040043// Minimum value for a primitive integer.
44static int32_t constexpr kPrimIntMin = 0x80000000;
45// Minimum value for a primitive long.
46static int64_t constexpr kPrimLongMin = INT64_C(0x8000000000000000);
47
Roland Levillain3f8f9362014-12-02 17:45:01 +000048// Maximum value for a primitive integer.
49static int32_t constexpr kPrimIntMax = 0x7fffffff;
Roland Levillain624279f2014-12-04 11:54:28 +000050// Maximum value for a primitive long.
Mark Mendelle82549b2015-05-06 10:55:34 -040051static int64_t constexpr kPrimLongMax = INT64_C(0x7fffffffffffffff);
Roland Levillain3f8f9362014-12-02 17:45:01 +000052
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +010053class Assembler;
Nicolas Geoffraye5038322014-07-04 09:41:32 +010054class CodeGenerator;
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +000055class CompilerDriver;
Vladimir Marko58155012015-08-19 12:49:41 +000056class LinkerPatch;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +000057class ParallelMoveResolver;
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +000058
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000059class CodeAllocator {
60 public:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010061 CodeAllocator() {}
62 virtual ~CodeAllocator() {}
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000063
64 virtual uint8_t* Allocate(size_t size) = 0;
65
66 private:
67 DISALLOW_COPY_AND_ASSIGN(CodeAllocator);
68};
69
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070070class SlowPathCode : public ArenaObject<kArenaAllocSlowPaths> {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010071 public:
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +000072 SlowPathCode() {
73 for (size_t i = 0; i < kMaximumNumberOfExpectedRegisters; ++i) {
74 saved_core_stack_offsets_[i] = kRegisterNotSaved;
75 saved_fpu_stack_offsets_[i] = kRegisterNotSaved;
76 }
77 }
78
Nicolas Geoffraye5038322014-07-04 09:41:32 +010079 virtual ~SlowPathCode() {}
80
Nicolas Geoffraye5038322014-07-04 09:41:32 +010081 virtual void EmitNativeCode(CodeGenerator* codegen) = 0;
82
Zheng Xuda403092015-04-24 17:35:39 +080083 virtual void SaveLiveRegisters(CodeGenerator* codegen, LocationSummary* locations);
84 virtual void RestoreLiveRegisters(CodeGenerator* codegen, LocationSummary* locations);
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +000085
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +000086 bool IsCoreRegisterSaved(int reg) const {
87 return saved_core_stack_offsets_[reg] != kRegisterNotSaved;
88 }
89
90 bool IsFpuRegisterSaved(int reg) const {
91 return saved_fpu_stack_offsets_[reg] != kRegisterNotSaved;
92 }
93
94 uint32_t GetStackOffsetOfCoreRegister(int reg) const {
95 return saved_core_stack_offsets_[reg];
96 }
97
98 uint32_t GetStackOffsetOfFpuRegister(int reg) const {
99 return saved_fpu_stack_offsets_[reg];
100 }
101
Alexandre Rames8158f282015-08-07 10:26:17 +0100102 virtual bool IsFatal() const { return false; }
103
Alexandre Rames9931f312015-06-19 14:47:01 +0100104 virtual const char* GetDescription() const = 0;
105
Andreas Gampe85b62f22015-09-09 13:15:38 -0700106 Label* GetEntryLabel() { return &entry_label_; }
107 Label* GetExitLabel() { return &exit_label_; }
108
Zheng Xuda403092015-04-24 17:35:39 +0800109 protected:
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000110 static constexpr size_t kMaximumNumberOfExpectedRegisters = 32;
111 static constexpr uint32_t kRegisterNotSaved = -1;
112 uint32_t saved_core_stack_offsets_[kMaximumNumberOfExpectedRegisters];
113 uint32_t saved_fpu_stack_offsets_[kMaximumNumberOfExpectedRegisters];
Zheng Xuda403092015-04-24 17:35:39 +0800114
115 private:
Andreas Gampe85b62f22015-09-09 13:15:38 -0700116 Label entry_label_;
117 Label exit_label_;
118
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100119 DISALLOW_COPY_AND_ASSIGN(SlowPathCode);
120};
121
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100122class InvokeDexCallingConventionVisitor {
123 public:
124 virtual Location GetNextLocation(Primitive::Type type) = 0;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100125 virtual Location GetReturnLocation(Primitive::Type type) const = 0;
126 virtual Location GetMethodLocation() const = 0;
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100127
128 protected:
129 InvokeDexCallingConventionVisitor() {}
130 virtual ~InvokeDexCallingConventionVisitor() {}
131
132 // The current index for core registers.
133 uint32_t gp_index_ = 0u;
134 // The current index for floating-point registers.
135 uint32_t float_index_ = 0u;
136 // The current stack index.
137 uint32_t stack_index_ = 0u;
138
139 private:
140 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitor);
141};
142
Calin Juravlee460d1d2015-09-29 04:52:17 +0100143class FieldAccessCallingConvention {
144 public:
145 virtual Location GetObjectLocation() const = 0;
146 virtual Location GetFieldIndexLocation() const = 0;
147 virtual Location GetReturnLocation(Primitive::Type type) const = 0;
148 virtual Location GetSetValueLocation(Primitive::Type type, bool is_instance) const = 0;
149 virtual Location GetFpuLocation(Primitive::Type type) const = 0;
150 virtual ~FieldAccessCallingConvention() {}
151
152 protected:
153 FieldAccessCallingConvention() {}
154
155 private:
156 DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConvention);
157};
158
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000159class CodeGenerator {
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000160 public:
161 // Compiles the graph to executable instructions. Returns whether the compilation
162 // succeeded.
Nicolas Geoffray73e80c32014-07-22 17:47:56 +0100163 void CompileBaseline(CodeAllocator* allocator, bool is_leaf = false);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100164 void CompileOptimized(CodeAllocator* allocator);
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000165 static CodeGenerator* Create(HGraph* graph,
Calin Juravle34166012014-12-19 17:22:29 +0000166 InstructionSet instruction_set,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000167 const InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100168 const CompilerOptions& compiler_options,
169 OptimizingCompilerStats* stats = nullptr);
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000170 virtual ~CodeGenerator() {}
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000171
Vladimir Markodc151b22015-10-15 18:02:30 +0100172 // Get the graph. This is the outermost graph, never the graph of a method being inlined.
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000173 HGraph* GetGraph() const { return graph_; }
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000174
Nicolas Geoffraydc23d832015-02-16 11:15:43 +0000175 HBasicBlock* GetNextBlockToEmit() const;
176 HBasicBlock* FirstNonEmptyBlock(HBasicBlock* block) const;
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +0000177 bool GoesToNextBlock(HBasicBlock* current, HBasicBlock* next) const;
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000178
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100179 size_t GetStackSlotOfParameter(HParameterValue* parameter) const {
180 // Note that this follows the current calling convention.
181 return GetFrameSize()
Mathieu Chartiere401d142015-04-22 13:56:20 -0700182 + InstructionSetPointerSize(GetInstructionSet()) // Art method
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +0100183 + parameter->GetIndex() * kVRegSize;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100184 }
185
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100186 virtual void Initialize() = 0;
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000187 virtual void Finalize(CodeAllocator* allocator);
Vladimir Marko58155012015-08-19 12:49:41 +0000188 virtual void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches);
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000189 virtual void GenerateFrameEntry() = 0;
190 virtual void GenerateFrameExit() = 0;
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100191 virtual void Bind(HBasicBlock* block) = 0;
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100192 virtual void Move(HInstruction* instruction, Location location, HInstruction* move_for) = 0;
Calin Juravle175dc732015-08-25 15:42:32 +0100193 virtual void MoveConstant(Location destination, int32_t value) = 0;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100194 virtual void MoveLocation(Location dst, Location src, Primitive::Type dst_type) = 0;
195 virtual void AddLocationAsTemp(Location location, LocationSummary* locations) = 0;
196
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000197 virtual Assembler* GetAssembler() = 0;
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100198 virtual const Assembler& GetAssembler() const = 0;
Nicolas Geoffray707c8092014-04-04 10:50:14 +0100199 virtual size_t GetWordSize() const = 0;
Mark Mendellf85a9ca2015-01-13 09:20:58 -0500200 virtual size_t GetFloatingPointSpillSlotSize() const = 0;
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000201 virtual uintptr_t GetAddressOf(HBasicBlock* block) const = 0;
Nicolas Geoffray4c204ba2015-02-03 15:12:35 +0000202 void InitializeCodeGeneration(size_t number_of_spill_slots,
203 size_t maximum_number_of_live_core_registers,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000204 size_t maximum_number_of_live_fpu_registers,
Nicolas Geoffray4c204ba2015-02-03 15:12:35 +0000205 size_t number_of_out_slots,
Vladimir Markofa6b93c2015-09-15 10:15:55 +0100206 const ArenaVector<HBasicBlock*>& block_order);
Nicolas Geoffrayab032bc2014-07-15 12:55:21 +0100207 int32_t GetStackSlot(HLocal* local) const;
208 Location GetTemporaryLocation(HTemporary* temp) const;
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000209
210 uint32_t GetFrameSize() const { return frame_size_; }
211 void SetFrameSize(uint32_t size) { frame_size_ = size; }
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +0000212 uint32_t GetCoreSpillMask() const { return core_spill_mask_; }
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000213 uint32_t GetFpuSpillMask() const { return fpu_spill_mask_; }
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000214
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100215 size_t GetNumberOfCoreRegisters() const { return number_of_core_registers_; }
216 size_t GetNumberOfFloatingPointRegisters() const { return number_of_fpu_registers_; }
Nicolas Geoffray98893962015-01-21 12:32:32 +0000217 virtual void SetupBlockedRegisters(bool is_baseline) const = 0;
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100218
Nicolas Geoffray4dee6362015-01-23 18:23:14 +0000219 virtual void ComputeSpillMask() {
220 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
221 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
222 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
223 }
224
225 static uint32_t ComputeRegisterMask(const int* registers, size_t length) {
226 uint32_t mask = 0;
227 for (size_t i = 0, e = length; i < e; ++i) {
228 mask |= (1 << registers[i]);
229 }
230 return mask;
231 }
232
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100233 virtual void DumpCoreRegister(std::ostream& stream, int reg) const = 0;
234 virtual void DumpFloatingPointRegister(std::ostream& stream, int reg) const = 0;
Nicolas Geoffray412f10c2014-06-19 10:00:34 +0100235 virtual InstructionSet GetInstructionSet() const = 0;
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000236
237 const CompilerOptions& GetCompilerOptions() const { return compiler_options_; }
238
Serban Constantinescuecc43662015-08-13 13:33:12 +0100239 void MaybeRecordStat(MethodCompilationStat compilation_stat, size_t count = 1) const;
240
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100241 // Saves the register in the stack. Returns the size taken on stack.
242 virtual size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) = 0;
243 // Restores the register from the stack. Returns the size taken on stack.
244 virtual size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) = 0;
Nicolas Geoffray234d69d2015-03-09 10:28:50 +0000245
246 virtual size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) = 0;
247 virtual size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) = 0;
248
Nicolas Geoffray840e5462015-01-07 16:01:24 +0000249 virtual bool NeedsTwoRegisters(Primitive::Type type) const = 0;
Nicolas Geoffray234d69d2015-03-09 10:28:50 +0000250 // Returns whether we should split long moves in parallel moves.
251 virtual bool ShouldSplitLongMoves() const { return false; }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100252
Roland Levillain0d5a2812015-11-13 10:07:31 +0000253 size_t GetNumberOfCoreCalleeSaveRegisters() const {
254 return POPCOUNT(core_callee_save_mask_);
255 }
256
257 size_t GetNumberOfCoreCallerSaveRegisters() const {
258 DCHECK_GE(GetNumberOfCoreRegisters(), GetNumberOfCoreCalleeSaveRegisters());
259 return GetNumberOfCoreRegisters() - GetNumberOfCoreCalleeSaveRegisters();
260 }
261
Nicolas Geoffray98893962015-01-21 12:32:32 +0000262 bool IsCoreCalleeSaveRegister(int reg) const {
263 return (core_callee_save_mask_ & (1 << reg)) != 0;
264 }
265
266 bool IsFloatingPointCalleeSaveRegister(int reg) const {
267 return (fpu_callee_save_mask_ & (1 << reg)) != 0;
268 }
269
Yevgeny Rouban2a7c1ef2015-07-22 18:36:24 +0600270 // Record native to dex mapping for a suspend point. Required by runtime.
Nicolas Geoffrayeeefa122015-03-13 18:52:59 +0000271 void RecordPcInfo(HInstruction* instruction, uint32_t dex_pc, SlowPathCode* slow_path = nullptr);
Yevgeny Rouban2a7c1ef2015-07-22 18:36:24 +0600272
Calin Juravle77520bc2015-01-12 18:45:46 +0000273 bool CanMoveNullCheckToUser(HNullCheck* null_check);
274 void MaybeRecordImplicitNullCheck(HInstruction* instruction);
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +0000275
David Brazdil77a48ae2015-09-15 12:34:04 +0000276 // Records a stack map which the runtime might use to set catch phi values
277 // during exception delivery.
278 // TODO: Replace with a catch-entering instruction that records the environment.
279 void RecordCatchBlockInfo();
280
281 // Returns true if implicit null checks are allowed in the compiler options
282 // and if the null check is not inside a try block. We currently cannot do
283 // implicit null checks in that case because we need the NullCheckSlowPath to
284 // save live registers, which may be needed by the runtime to set catch phis.
285 bool IsImplicitNullCheckAllowed(HNullCheck* null_check) const;
286
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100287 void AddSlowPath(SlowPathCode* slow_path) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100288 slow_paths_.push_back(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100289 }
290
Vladimir Markof9f64412015-09-02 14:05:49 +0100291 void BuildMappingTable(ArenaVector<uint8_t>* vector) const;
292 void BuildVMapTable(ArenaVector<uint8_t>* vector) const;
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +0000293 void BuildNativeGCMap(
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000294 ArenaVector<uint8_t>* vector, const CompilerDriver& compiler_driver) const;
295 void BuildStackMaps(MemoryRegion region);
296 size_t ComputeStackMapsSize();
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000297
Roland Levillain3e3d7332015-04-28 11:00:54 +0100298 bool IsBaseline() const {
299 return is_baseline_;
300 }
301
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +0100302 bool IsLeafMethod() const {
303 return is_leaf_;
304 }
305
306 void MarkNotLeaf() {
307 is_leaf_ = false;
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000308 requires_current_method_ = true;
309 }
310
311 void SetRequiresCurrentMethod() {
312 requires_current_method_ = true;
313 }
314
315 bool RequiresCurrentMethod() const {
316 return requires_current_method_;
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +0100317 }
318
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100319 // Clears the spill slots taken by loop phis in the `LocationSummary` of the
320 // suspend check. This is called when the code generator generates code
321 // for the suspend check at the back edge (instead of where the suspend check
322 // is, which is the loop entry). At this point, the spill slots for the phis
323 // have not been written to.
324 void ClearSpillSlotsFromLoopPhisInStackMap(HSuspendCheck* suspend_check) const;
325
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100326 bool* GetBlockedCoreRegisters() const { return blocked_core_registers_; }
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100327 bool* GetBlockedFloatingPointRegisters() const { return blocked_fpu_registers_; }
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100328
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100329 // Helper that returns the pointer offset of an index in an object array.
330 // Note: this method assumes we always have the same pointer size, regardless
331 // of the architecture.
332 static size_t GetCacheOffset(uint32_t index);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700333 // Pointer variant for ArtMethod and ArtField arrays.
334 size_t GetCachePointerOffset(uint32_t index);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100335
Nicolas Geoffray90218252015-04-15 11:56:51 +0100336 void EmitParallelMoves(Location from1,
337 Location to1,
338 Primitive::Type type1,
339 Location from2,
340 Location to2,
341 Primitive::Type type2);
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000342
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +0000343 static bool StoreNeedsWriteBarrier(Primitive::Type type, HInstruction* value) {
David Brazdil522e2242015-03-17 18:48:28 +0000344 // Check that null value is not represented as an integer constant.
345 DCHECK(type != Primitive::kPrimNot || !value->IsIntConstant());
346 return type == Primitive::kPrimNot && !value->IsNullConstant();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +0000347 }
348
Alexandre Rames78e3ef62015-08-12 13:43:29 +0100349 void ValidateInvokeRuntime(HInstruction* instruction, SlowPathCode* slow_path);
350
Nicolas Geoffray98893962015-01-21 12:32:32 +0000351 void AddAllocatedRegister(Location location) {
352 allocated_registers_.Add(location);
353 }
354
Nicolas Geoffray45b83af2015-07-06 15:12:53 +0000355 bool HasAllocatedRegister(bool is_core, int reg) const {
356 return is_core
357 ? allocated_registers_.ContainsCoreRegister(reg)
358 : allocated_registers_.ContainsFloatingPointRegister(reg);
359 }
360
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000361 void AllocateLocations(HInstruction* instruction);
362
Roland Levillainaa9b7c42015-02-17 15:40:09 +0000363 // Tells whether the stack frame of the compiled method is
364 // considered "empty", that is either actually having a size of zero,
365 // or just containing the saved return address register.
366 bool HasEmptyFrame() const {
367 return GetFrameSize() == (CallPushesPC() ? GetWordSize() : 0);
368 }
369
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000370 static int32_t GetInt32ValueOf(HConstant* constant) {
371 if (constant->IsIntConstant()) {
372 return constant->AsIntConstant()->GetValue();
373 } else if (constant->IsNullConstant()) {
374 return 0;
375 } else {
376 DCHECK(constant->IsFloatConstant());
Roland Levillainda4d79b2015-03-24 14:36:11 +0000377 return bit_cast<int32_t, float>(constant->AsFloatConstant()->GetValue());
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000378 }
379 }
380
381 static int64_t GetInt64ValueOf(HConstant* constant) {
382 if (constant->IsIntConstant()) {
383 return constant->AsIntConstant()->GetValue();
384 } else if (constant->IsNullConstant()) {
385 return 0;
386 } else if (constant->IsFloatConstant()) {
Roland Levillainda4d79b2015-03-24 14:36:11 +0000387 return bit_cast<int32_t, float>(constant->AsFloatConstant()->GetValue());
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000388 } else if (constant->IsLongConstant()) {
389 return constant->AsLongConstant()->GetValue();
390 } else {
391 DCHECK(constant->IsDoubleConstant());
Roland Levillainda4d79b2015-03-24 14:36:11 +0000392 return bit_cast<int64_t, double>(constant->AsDoubleConstant()->GetValue());
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +0000393 }
394 }
395
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000396 size_t GetFirstRegisterSlotInSlowPath() const {
397 return first_register_slot_in_slow_path_;
398 }
399
400 uint32_t FrameEntrySpillSize() const {
401 return GetFpuSpillSize() + GetCoreSpillSize();
402 }
403
Roland Levillainec525fc2015-04-28 15:50:20 +0100404 virtual ParallelMoveResolver* GetMoveResolver() = 0;
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000405
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +0100406 static void CreateCommonInvokeLocationSummary(
407 HInvoke* invoke, InvokeDexCallingConventionVisitor* visitor);
408
Calin Juravle175dc732015-08-25 15:42:32 +0100409 void GenerateInvokeUnresolvedRuntimeCall(HInvokeUnresolved* invoke);
410
Calin Juravlee460d1d2015-09-29 04:52:17 +0100411 void CreateUnresolvedFieldLocationSummary(
412 HInstruction* field_access,
413 Primitive::Type field_type,
414 const FieldAccessCallingConvention& calling_convention);
415
416 void GenerateUnresolvedFieldAccess(
417 HInstruction* field_access,
418 Primitive::Type field_type,
419 uint32_t field_index,
420 uint32_t dex_pc,
421 const FieldAccessCallingConvention& calling_convention);
422
Calin Juravle98893e12015-10-02 21:05:03 +0100423 // TODO: This overlaps a bit with MoveFromReturnRegister. Refactor for a better design.
424 static void CreateLoadClassLocationSummary(HLoadClass* cls,
425 Location runtime_type_index_location,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000426 Location runtime_return_location,
427 bool code_generator_supports_read_barrier = false);
Calin Juravle98893e12015-10-02 21:05:03 +0100428
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +0100429 static void CreateSystemArrayCopyLocationSummary(HInvoke* invoke);
430
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100431 void SetDisassemblyInformation(DisassemblyInformation* info) { disasm_info_ = info; }
432 DisassemblyInformation* GetDisassemblyInformation() const { return disasm_info_; }
433
Calin Juravle175dc732015-08-25 15:42:32 +0100434 virtual void InvokeRuntime(QuickEntrypointEnum entrypoint,
435 HInstruction* instruction,
436 uint32_t dex_pc,
437 SlowPathCode* slow_path) = 0;
438
Vladimir Markodc151b22015-10-15 18:02:30 +0100439 // Check if the desired_dispatch_info is supported. If it is, return it,
440 // otherwise return a fall-back info that should be used instead.
441 virtual HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
442 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
443 MethodReference target_method) = 0;
444
Andreas Gampe85b62f22015-09-09 13:15:38 -0700445 // Generate a call to a static or direct method.
446 virtual void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) = 0;
447 // Generate a call to a virtual method.
448 virtual void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) = 0;
449
450 // Copy the result of a call into the given target.
451 virtual void MoveFromReturnRegister(Location trg, Primitive::Type type) = 0;
452
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000453 protected:
Vladimir Marko58155012015-08-19 12:49:41 +0000454 // Method patch info used for recording locations of required linker patches and
455 // target methods. The target method can be used for various purposes, whether for
456 // patching the address of the method or the code pointer or a PC-relative call.
457 template <typename LabelType>
458 struct MethodPatchInfo {
459 explicit MethodPatchInfo(MethodReference m) : target_method(m), label() { }
460
461 MethodReference target_method;
462 LabelType label;
463 };
464
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100465 CodeGenerator(HGraph* graph,
466 size_t number_of_core_registers,
467 size_t number_of_fpu_registers,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000468 size_t number_of_register_pairs,
Nicolas Geoffray98893962015-01-21 12:32:32 +0000469 uint32_t core_callee_save_mask,
470 uint32_t fpu_callee_save_mask,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100471 const CompilerOptions& compiler_options,
472 OptimizingCompilerStats* stats)
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000473 : frame_size_(0),
Nicolas Geoffray4361bef2014-08-20 04:59:12 +0100474 core_spill_mask_(0),
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000475 fpu_spill_mask_(0),
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100476 first_register_slot_in_slow_path_(0),
Vladimir Marko5233f932015-09-29 19:01:15 +0100477 blocked_core_registers_(graph->GetArena()->AllocArray<bool>(number_of_core_registers,
478 kArenaAllocCodeGenerator)),
479 blocked_fpu_registers_(graph->GetArena()->AllocArray<bool>(number_of_fpu_registers,
480 kArenaAllocCodeGenerator)),
481 blocked_register_pairs_(graph->GetArena()->AllocArray<bool>(number_of_register_pairs,
482 kArenaAllocCodeGenerator)),
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100483 number_of_core_registers_(number_of_core_registers),
484 number_of_fpu_registers_(number_of_fpu_registers),
485 number_of_register_pairs_(number_of_register_pairs),
Nicolas Geoffray98893962015-01-21 12:32:32 +0000486 core_callee_save_mask_(core_callee_save_mask),
487 fpu_callee_save_mask_(fpu_callee_save_mask),
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000488 stack_map_stream_(graph->GetArena()),
489 block_order_(nullptr),
Roland Levillain3e3d7332015-04-28 11:00:54 +0100490 is_baseline_(false),
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100491 disasm_info_(nullptr),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100492 stats_(stats),
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000493 graph_(graph),
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000494 compiler_options_(compiler_options),
Vladimir Marko225b6462015-09-28 12:17:40 +0100495 slow_paths_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000496 current_slow_path_(nullptr),
Nicolas Geoffray4c204ba2015-02-03 15:12:35 +0000497 current_block_index_(0),
Nicolas Geoffray39468442014-09-02 15:17:15 +0100498 is_leaf_(true),
Vladimir Marko225b6462015-09-28 12:17:40 +0100499 requires_current_method_(false) {
500 slow_paths_.reserve(8);
501 }
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000502
Nicolas Geoffraya7aca372014-04-28 17:47:12 +0100503 // Register allocation logic.
504 void AllocateRegistersLocally(HInstruction* instruction) const;
505
506 // Backend specific implementation for allocating a register.
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100507 virtual Location AllocateFreeRegister(Primitive::Type type) const = 0;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +0100508
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100509 static size_t FindFreeEntry(bool* array, size_t length);
Nicolas Geoffray3c035032014-10-28 10:46:40 +0000510 static size_t FindTwoFreeConsecutiveAlignedEntries(bool* array, size_t length);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +0100511
Nicolas Geoffraya7aca372014-04-28 17:47:12 +0100512 virtual Location GetStackLocation(HLoadLocal* load) const = 0;
513
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000514 virtual HGraphVisitor* GetLocationBuilder() = 0;
515 virtual HGraphVisitor* GetInstructionVisitor() = 0;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000516
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000517 // Returns the location of the first spilled entry for floating point registers,
518 // relative to the stack pointer.
519 uint32_t GetFpuSpillStart() const {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000520 return GetFrameSize() - FrameEntrySpillSize();
521 }
522
523 uint32_t GetFpuSpillSize() const {
524 return POPCOUNT(fpu_spill_mask_) * GetFloatingPointSpillSlotSize();
525 }
526
527 uint32_t GetCoreSpillSize() const {
528 return POPCOUNT(core_spill_mask_) * GetWordSize();
529 }
530
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000531 bool HasAllocatedCalleeSaveRegisters() const {
532 // We check the core registers against 1 because it always comprises the return PC.
533 return (POPCOUNT(allocated_registers_.GetCoreRegisters() & core_callee_save_mask_) != 1)
534 || (POPCOUNT(allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_) != 0);
535 }
536
537 bool CallPushesPC() const {
538 InstructionSet instruction_set = GetInstructionSet();
539 return instruction_set == kX86 || instruction_set == kX86_64;
540 }
541
Vladimir Marko225b6462015-09-28 12:17:40 +0100542 // Arm64 has its own type for a label, so we need to templatize these methods
Nicolas Geoffraydc23d832015-02-16 11:15:43 +0000543 // to share the logic.
Vladimir Marko225b6462015-09-28 12:17:40 +0100544
545 template <typename LabelType>
546 LabelType* CommonInitializeLabels() {
Vladimir Markob95fb772015-09-30 13:32:31 +0100547 // We use raw array allocations instead of ArenaVector<> because Labels are
548 // non-constructible and non-movable and as such cannot be held in a vector.
Vladimir Marko225b6462015-09-28 12:17:40 +0100549 size_t size = GetGraph()->GetBlocks().size();
550 LabelType* labels = GetGraph()->GetArena()->AllocArray<LabelType>(size,
551 kArenaAllocCodeGenerator);
552 for (size_t i = 0; i != size; ++i) {
553 new(labels + i) LabelType();
554 }
555 return labels;
556 }
557
Vladimir Marko58155012015-08-19 12:49:41 +0000558 template <typename LabelType>
559 LabelType* CommonGetLabelOf(LabelType* raw_pointer_to_labels_array, HBasicBlock* block) const {
Nicolas Geoffraydc23d832015-02-16 11:15:43 +0000560 block = FirstNonEmptyBlock(block);
561 return raw_pointer_to_labels_array + block->GetBlockId();
562 }
563
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000564 SlowPathCode* GetCurrentSlowPath() {
565 return current_slow_path_;
566 }
567
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +0000568 // Frame size required for this method.
569 uint32_t frame_size_;
570 uint32_t core_spill_mask_;
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000571 uint32_t fpu_spill_mask_;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100572 uint32_t first_register_slot_in_slow_path_;
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +0000573
Nicolas Geoffray98893962015-01-21 12:32:32 +0000574 // Registers that were allocated during linear scan.
575 RegisterSet allocated_registers_;
576
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100577 // Arrays used when doing register allocation to know which
578 // registers we can allocate. `SetupBlockedRegisters` updates the
579 // arrays.
580 bool* const blocked_core_registers_;
581 bool* const blocked_fpu_registers_;
582 bool* const blocked_register_pairs_;
583 size_t number_of_core_registers_;
584 size_t number_of_fpu_registers_;
585 size_t number_of_register_pairs_;
Nicolas Geoffray98893962015-01-21 12:32:32 +0000586 const uint32_t core_callee_save_mask_;
587 const uint32_t fpu_callee_save_mask_;
Nicolas Geoffray71175b72014-10-09 22:13:55 +0100588
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000589 StackMapStream stack_map_stream_;
590
591 // The order to use for code generation.
Vladimir Markofa6b93c2015-09-15 10:15:55 +0100592 const ArenaVector<HBasicBlock*>* block_order_;
Vladimir Markocf93a5c2015-06-16 11:33:24 +0000593
Roland Levillain3e3d7332015-04-28 11:00:54 +0100594 // Whether we are using baseline.
595 bool is_baseline_;
596
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100597 DisassemblyInformation* disasm_info_;
598
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +0000599 private:
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000600 void InitLocationsBaseline(HInstruction* instruction);
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100601 size_t GetStackOffsetOfSavedRegister(size_t index);
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100602 void GenerateSlowPaths();
Nicolas Geoffray4c204ba2015-02-03 15:12:35 +0000603 void CompileInternal(CodeAllocator* allocator, bool is_baseline);
Mark Mendell5f874182015-03-04 15:42:45 -0500604 void BlockIfInRegister(Location location, bool is_out = false) const;
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100605 void EmitEnvironment(HEnvironment* environment, SlowPathCode* slow_path);
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000606
Serban Constantinescuecc43662015-08-13 13:33:12 +0100607 OptimizingCompilerStats* stats_;
608
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000609 HGraph* const graph_;
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000610 const CompilerOptions& compiler_options_;
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000611
Vladimir Marko225b6462015-09-28 12:17:40 +0100612 ArenaVector<SlowPathCode*> slow_paths_;
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000613
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000614 // The current slow path that we're generating code for.
615 SlowPathCode* current_slow_path_;
616
Nicolas Geoffray4c204ba2015-02-03 15:12:35 +0000617 // The current block index in `block_order_` of the block
618 // we are generating code for.
619 size_t current_block_index_;
620
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000621 // Whether the method is a leaf method.
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +0100622 bool is_leaf_;
623
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +0000624 // Whether an instruction in the graph accesses the current method.
625 bool requires_current_method_;
626
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100627 friend class OptimizingCFITest;
628
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000629 DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
630};
631
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100632template <typename C, typename F>
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100633class CallingConvention {
634 public:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100635 CallingConvention(const C* registers,
636 size_t number_of_registers,
637 const F* fpu_registers,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700638 size_t number_of_fpu_registers,
639 size_t pointer_size)
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100640 : registers_(registers),
641 number_of_registers_(number_of_registers),
642 fpu_registers_(fpu_registers),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700643 number_of_fpu_registers_(number_of_fpu_registers),
644 pointer_size_(pointer_size) {}
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100645
646 size_t GetNumberOfRegisters() const { return number_of_registers_; }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100647 size_t GetNumberOfFpuRegisters() const { return number_of_fpu_registers_; }
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100648
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100649 C GetRegisterAt(size_t index) const {
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100650 DCHECK_LT(index, number_of_registers_);
651 return registers_[index];
652 }
653
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100654 F GetFpuRegisterAt(size_t index) const {
655 DCHECK_LT(index, number_of_fpu_registers_);
656 return fpu_registers_[index];
657 }
658
659 size_t GetStackOffsetOf(size_t index) const {
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100660 // We still reserve the space for parameters passed by registers.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700661 // Add space for the method pointer.
662 return pointer_size_ + index * kVRegSize;
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100663 }
664
665 private:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100666 const C* registers_;
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100667 const size_t number_of_registers_;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +0100668 const F* fpu_registers_;
669 const size_t number_of_fpu_registers_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700670 const size_t pointer_size_;
Nicolas Geoffray4a34a422014-04-03 10:38:37 +0100671
672 DISALLOW_COPY_AND_ASSIGN(CallingConvention);
673};
674
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +0000675} // namespace art
676
677#endif // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_H_