blob: 9fe47ee2974e3c99886ad98d597ca6797a5cb391 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_MIPS64_H_
18#define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_MIPS64_H_
19
20#include "code_generator.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070021#include "driver/compiler_options.h"
22#include "nodes.h"
23#include "parallel_move_resolver.h"
Mathieu Chartierdbddc222017-05-24 12:04:13 -070024#include "type_reference.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070025#include "utils/mips64/assembler_mips64.h"
26
27namespace art {
28namespace mips64 {
29
Alexey Frunze4dda3372015-06-01 18:31:49 -070030// InvokeDexCallingConvention registers
31
32static constexpr GpuRegister kParameterCoreRegisters[] =
33 { A1, A2, A3, A4, A5, A6, A7 };
34static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
35
36static constexpr FpuRegister kParameterFpuRegisters[] =
37 { F13, F14, F15, F16, F17, F18, F19 };
38static constexpr size_t kParameterFpuRegistersLength = arraysize(kParameterFpuRegisters);
39
40
41// InvokeRuntimeCallingConvention registers
42
43static constexpr GpuRegister kRuntimeParameterCoreRegisters[] =
44 { A0, A1, A2, A3, A4, A5, A6, A7 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46 arraysize(kRuntimeParameterCoreRegisters);
47
48static constexpr FpuRegister kRuntimeParameterFpuRegisters[] =
49 { F12, F13, F14, F15, F16, F17, F18, F19 };
50static constexpr size_t kRuntimeParameterFpuRegistersLength =
51 arraysize(kRuntimeParameterFpuRegisters);
52
53
54static constexpr GpuRegister kCoreCalleeSaves[] =
Alexey Frunze627c1a02017-01-30 19:28:14 -080055 { S0, S1, S2, S3, S4, S5, S6, S7, GP, S8, RA };
Alexey Frunze4dda3372015-06-01 18:31:49 -070056static constexpr FpuRegister kFpuCalleeSaves[] =
57 { F24, F25, F26, F27, F28, F29, F30, F31 };
58
59
60class CodeGeneratorMIPS64;
61
Lena Djokicca8c2952017-05-29 11:31:46 +020062VectorRegister VectorRegisterFrom(Location location);
63
Alexey Frunze4dda3372015-06-01 18:31:49 -070064class InvokeDexCallingConvention : public CallingConvention<GpuRegister, FpuRegister> {
65 public:
66 InvokeDexCallingConvention()
67 : CallingConvention(kParameterCoreRegisters,
68 kParameterCoreRegistersLength,
69 kParameterFpuRegisters,
70 kParameterFpuRegistersLength,
71 kMips64PointerSize) {}
72
73 private:
74 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConvention);
75};
76
77class InvokeDexCallingConventionVisitorMIPS64 : public InvokeDexCallingConventionVisitor {
78 public:
79 InvokeDexCallingConventionVisitorMIPS64() {}
80 virtual ~InvokeDexCallingConventionVisitorMIPS64() {}
81
82 Location GetNextLocation(Primitive::Type type) OVERRIDE;
83 Location GetReturnLocation(Primitive::Type type) const OVERRIDE;
84 Location GetMethodLocation() const OVERRIDE;
85
86 private:
87 InvokeDexCallingConvention calling_convention;
88
89 DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitorMIPS64);
90};
91
92class InvokeRuntimeCallingConvention : public CallingConvention<GpuRegister, FpuRegister> {
93 public:
94 InvokeRuntimeCallingConvention()
95 : CallingConvention(kRuntimeParameterCoreRegisters,
96 kRuntimeParameterCoreRegistersLength,
97 kRuntimeParameterFpuRegisters,
98 kRuntimeParameterFpuRegistersLength,
99 kMips64PointerSize) {}
100
101 Location GetReturnLocation(Primitive::Type return_type);
102
103 private:
104 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
105};
106
Calin Juravlee460d1d2015-09-29 04:52:17 +0100107class FieldAccessCallingConventionMIPS64 : public FieldAccessCallingConvention {
108 public:
109 FieldAccessCallingConventionMIPS64() {}
110
111 Location GetObjectLocation() const OVERRIDE {
112 return Location::RegisterLocation(A1);
113 }
114 Location GetFieldIndexLocation() const OVERRIDE {
115 return Location::RegisterLocation(A0);
116 }
117 Location GetReturnLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
Goran Jakovljevic8c34ec12015-10-14 11:23:48 +0200118 return Location::RegisterLocation(V0);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100119 }
Alexey Frunze0cb12422017-01-25 19:30:18 -0800120 Location GetSetValueLocation(Primitive::Type type ATTRIBUTE_UNUSED,
121 bool is_instance) const OVERRIDE {
122 return is_instance
Alexey Frunze00580bd2015-11-11 13:31:12 -0800123 ? Location::RegisterLocation(A2)
Alexey Frunze0cb12422017-01-25 19:30:18 -0800124 : Location::RegisterLocation(A1);
Calin Juravlee460d1d2015-09-29 04:52:17 +0100125 }
126 Location GetFpuLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
127 return Location::FpuRegisterLocation(F0);
128 }
129
130 private:
131 DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConventionMIPS64);
132};
133
Alexey Frunze4dda3372015-06-01 18:31:49 -0700134class ParallelMoveResolverMIPS64 : public ParallelMoveResolverWithSwap {
135 public:
136 ParallelMoveResolverMIPS64(ArenaAllocator* allocator, CodeGeneratorMIPS64* codegen)
137 : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {}
138
139 void EmitMove(size_t index) OVERRIDE;
140 void EmitSwap(size_t index) OVERRIDE;
141 void SpillScratch(int reg) OVERRIDE;
142 void RestoreScratch(int reg) OVERRIDE;
143
144 void Exchange(int index1, int index2, bool double_slot);
145
146 Mips64Assembler* GetAssembler() const;
147
148 private:
149 CodeGeneratorMIPS64* const codegen_;
150
151 DISALLOW_COPY_AND_ASSIGN(ParallelMoveResolverMIPS64);
152};
153
154class SlowPathCodeMIPS64 : public SlowPathCode {
155 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000156 explicit SlowPathCodeMIPS64(HInstruction* instruction)
157 : SlowPathCode(instruction), entry_label_(), exit_label_() {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700158
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700159 Mips64Label* GetEntryLabel() { return &entry_label_; }
160 Mips64Label* GetExitLabel() { return &exit_label_; }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700161
162 private:
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700163 Mips64Label entry_label_;
164 Mips64Label exit_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700165
166 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeMIPS64);
167};
168
169class LocationsBuilderMIPS64 : public HGraphVisitor {
170 public:
171 LocationsBuilderMIPS64(HGraph* graph, CodeGeneratorMIPS64* codegen)
172 : HGraphVisitor(graph), codegen_(codegen) {}
173
174#define DECLARE_VISIT_INSTRUCTION(name, super) \
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100175 void Visit##name(H##name* instr) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700176
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100177 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
178 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(DECLARE_VISIT_INSTRUCTION)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700179
180#undef DECLARE_VISIT_INSTRUCTION
181
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100182 void VisitInstruction(HInstruction* instruction) OVERRIDE {
183 LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
184 << " (id " << instruction->GetId() << ")";
185 }
186
Alexey Frunze4dda3372015-06-01 18:31:49 -0700187 private:
188 void HandleInvoke(HInvoke* invoke);
189 void HandleBinaryOp(HBinaryOperation* operation);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +0000190 void HandleCondition(HCondition* instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700191 void HandleShift(HBinaryOperation* operation);
192 void HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info);
193 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +0100194 Location RegisterOrZeroConstant(HInstruction* instruction);
195 Location FpuRegisterOrConstantForStore(HInstruction* instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700196
197 InvokeDexCallingConventionVisitorMIPS64 parameter_visitor_;
198
199 CodeGeneratorMIPS64* const codegen_;
200
201 DISALLOW_COPY_AND_ASSIGN(LocationsBuilderMIPS64);
202};
203
Aart Bik42249c32016-01-07 15:33:50 -0800204class InstructionCodeGeneratorMIPS64 : public InstructionCodeGenerator {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700205 public:
206 InstructionCodeGeneratorMIPS64(HGraph* graph, CodeGeneratorMIPS64* codegen);
207
208#define DECLARE_VISIT_INSTRUCTION(name, super) \
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100209 void Visit##name(H##name* instr) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700210
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100211 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
212 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(DECLARE_VISIT_INSTRUCTION)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700213
214#undef DECLARE_VISIT_INSTRUCTION
215
Alexandre Ramesf39e0642015-06-23 11:33:45 +0100216 void VisitInstruction(HInstruction* instruction) OVERRIDE {
217 LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
218 << " (id " << instruction->GetId() << ")";
219 }
220
Alexey Frunze4dda3372015-06-01 18:31:49 -0700221 Mips64Assembler* GetAssembler() const { return assembler_; }
222
Alexey Frunze0960ac52016-12-20 17:24:59 -0800223 // Compare-and-jump packed switch generates approx. 3 + 2.5 * N 32-bit
224 // instructions for N cases.
225 // Table-based packed switch generates approx. 11 32-bit instructions
226 // and N 32-bit data words for N cases.
227 // At N = 6 they come out as 18 and 17 32-bit words respectively.
228 // We switch to the table-based method starting with 7 cases.
229 static constexpr uint32_t kPackedSwitchJumpTableThreshold = 6;
230
Chris Larsen5633ce72017-04-10 15:47:40 -0700231 void GenerateMemoryBarrier(MemBarrierKind kind);
232
Alexey Frunze4dda3372015-06-01 18:31:49 -0700233 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700234 void GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path, GpuRegister class_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700235 void GenerateSuspendCheck(HSuspendCheck* check, HBasicBlock* successor);
236 void HandleBinaryOp(HBinaryOperation* operation);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +0000237 void HandleCondition(HCondition* instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700238 void HandleShift(HBinaryOperation* operation);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100239 void HandleFieldSet(HInstruction* instruction,
240 const FieldInfo& field_info,
241 bool value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700242 void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
Alexey Frunze15958152017-02-09 19:08:30 -0800243
244 // Generate a heap reference load using one register `out`:
245 //
246 // out <- *(out + offset)
247 //
248 // while honoring heap poisoning and/or read barriers (if any).
249 //
250 // Location `maybe_temp` is used when generating a read barrier and
251 // shall be a register in that case; it may be an invalid location
252 // otherwise.
253 void GenerateReferenceLoadOneRegister(HInstruction* instruction,
254 Location out,
255 uint32_t offset,
256 Location maybe_temp,
257 ReadBarrierOption read_barrier_option);
258 // Generate a heap reference load using two different registers
259 // `out` and `obj`:
260 //
261 // out <- *(obj + offset)
262 //
263 // while honoring heap poisoning and/or read barriers (if any).
264 //
265 // Location `maybe_temp` is used when generating a Baker's (fast
266 // path) read barrier and shall be a register in that case; it may
267 // be an invalid location otherwise.
268 void GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
269 Location out,
270 Location obj,
271 uint32_t offset,
272 Location maybe_temp,
273 ReadBarrierOption read_barrier_option);
274
Alexey Frunzef63f5692016-12-13 17:43:11 -0800275 // Generate a GC root reference load:
276 //
277 // root <- *(obj + offset)
278 //
279 // while honoring read barriers (if any).
280 void GenerateGcRootFieldLoad(HInstruction* instruction,
281 Location root,
282 GpuRegister obj,
Alexey Frunze15958152017-02-09 19:08:30 -0800283 uint32_t offset,
Alexey Frunze4147fcc2017-06-17 19:57:27 -0700284 ReadBarrierOption read_barrier_option,
285 Mips64Label* label_low = nullptr);
Alexey Frunze15958152017-02-09 19:08:30 -0800286
Alexey Frunze4dda3372015-06-01 18:31:49 -0700287 void GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +0000288 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700289 Mips64Label* true_target,
290 Mips64Label* false_target);
Alexey Frunzec857c742015-09-23 15:12:39 -0700291 void DivRemOneOrMinusOne(HBinaryOperation* instruction);
292 void DivRemByPowerOfTwo(HBinaryOperation* instruction);
293 void GenerateDivRemWithAnyConstant(HBinaryOperation* instruction);
294 void GenerateDivRemIntegral(HBinaryOperation* instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -0800295 void GenerateIntLongCompare(IfCondition cond, bool is64bit, LocationSummary* locations);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +0200296 // When the function returns `false` it means that the condition holds if `dst` is non-zero
297 // and doesn't hold if `dst` is zero. If it returns `true`, the roles of zero and non-zero
298 // `dst` are exchanged.
299 bool MaterializeIntLongCompare(IfCondition cond,
300 bool is64bit,
301 LocationSummary* input_locations,
302 GpuRegister dst);
Alexey Frunze299a9392015-12-08 16:08:02 -0800303 void GenerateIntLongCompareAndBranch(IfCondition cond,
304 bool is64bit,
305 LocationSummary* locations,
306 Mips64Label* label);
Tijana Jakovljevic43758192016-12-30 09:23:01 +0100307 void GenerateFpCompare(IfCondition cond,
308 bool gt_bias,
309 Primitive::Type type,
310 LocationSummary* locations);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +0200311 // When the function returns `false` it means that the condition holds if `dst` is non-zero
312 // and doesn't hold if `dst` is zero. If it returns `true`, the roles of zero and non-zero
313 // `dst` are exchanged.
314 bool MaterializeFpCompare(IfCondition cond,
315 bool gt_bias,
316 Primitive::Type type,
317 LocationSummary* input_locations,
318 FpuRegister dst);
Alexey Frunze299a9392015-12-08 16:08:02 -0800319 void GenerateFpCompareAndBranch(IfCondition cond,
320 bool gt_bias,
321 Primitive::Type type,
322 LocationSummary* locations,
323 Mips64Label* label);
David Brazdilfc6a86a2015-06-26 10:33:45 +0000324 void HandleGoto(HInstruction* got, HBasicBlock* successor);
Alexey Frunze0960ac52016-12-20 17:24:59 -0800325 void GenPackedSwitchWithCompares(GpuRegister value_reg,
326 int32_t lower_bound,
327 uint32_t num_entries,
328 HBasicBlock* switch_block,
329 HBasicBlock* default_block);
330 void GenTableBasedPackedSwitch(GpuRegister value_reg,
331 int32_t lower_bound,
332 uint32_t num_entries,
333 HBasicBlock* switch_block,
334 HBasicBlock* default_block);
Goran Jakovljevic19680d32017-05-11 10:38:36 +0200335 int32_t VecAddress(LocationSummary* locations,
336 size_t size,
337 /* out */ GpuRegister* adjusted_base);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +0200338 void GenConditionalMove(HSelect* select);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700339
340 Mips64Assembler* const assembler_;
341 CodeGeneratorMIPS64* const codegen_;
342
343 DISALLOW_COPY_AND_ASSIGN(InstructionCodeGeneratorMIPS64);
344};
345
346class CodeGeneratorMIPS64 : public CodeGenerator {
347 public:
348 CodeGeneratorMIPS64(HGraph* graph,
349 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100350 const CompilerOptions& compiler_options,
351 OptimizingCompilerStats* stats = nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700352 virtual ~CodeGeneratorMIPS64() {}
353
354 void GenerateFrameEntry() OVERRIDE;
355 void GenerateFrameExit() OVERRIDE;
356
357 void Bind(HBasicBlock* block) OVERRIDE;
358
Lazar Trsicd9672662015-09-03 17:33:01 +0200359 size_t GetWordSize() const OVERRIDE { return kMips64DoublewordSize; }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700360
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200361 size_t GetFloatingPointSpillSlotSize() const OVERRIDE {
362 return GetGraph()->HasSIMD()
363 ? 2 * kMips64DoublewordSize // 16 bytes for each spill.
364 : 1 * kMips64DoublewordSize; // 8 bytes for each spill.
365 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700366
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100367 uintptr_t GetAddressOf(HBasicBlock* block) OVERRIDE {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700368 return assembler_.GetLabelLocation(GetLabelOf(block));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700369 }
370
371 HGraphVisitor* GetLocationBuilder() OVERRIDE { return &location_builder_; }
372 HGraphVisitor* GetInstructionVisitor() OVERRIDE { return &instruction_visitor_; }
373 Mips64Assembler* GetAssembler() OVERRIDE { return &assembler_; }
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100374 const Mips64Assembler& GetAssembler() const OVERRIDE { return assembler_; }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700375
Alexey Frunze19f6c692016-11-30 19:19:55 -0800376 // Emit linker patches.
Vladimir Markod8dbc8d2017-09-20 13:37:47 +0100377 void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) OVERRIDE;
Alexey Frunze627c1a02017-01-30 19:28:14 -0800378 void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800379
Alexey Frunze15958152017-02-09 19:08:30 -0800380 // Fast path implementation of ReadBarrier::Barrier for a heap
381 // reference field load when Baker's read barriers are used.
382 void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
383 Location ref,
384 GpuRegister obj,
385 uint32_t offset,
386 Location temp,
387 bool needs_null_check);
388 // Fast path implementation of ReadBarrier::Barrier for a heap
389 // reference array load when Baker's read barriers are used.
390 void GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
391 Location ref,
392 GpuRegister obj,
393 uint32_t data_offset,
394 Location index,
395 Location temp,
396 bool needs_null_check);
397
398 // Factored implementation, used by GenerateFieldLoadWithBakerReadBarrier,
399 // GenerateArrayLoadWithBakerReadBarrier and some intrinsics.
400 //
401 // Load the object reference located at the address
402 // `obj + offset + (index << scale_factor)`, held by object `obj`, into
403 // `ref`, and mark it if needed.
404 //
405 // If `always_update_field` is true, the value of the reference is
406 // atomically updated in the holder (`obj`).
407 void GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
408 Location ref,
409 GpuRegister obj,
410 uint32_t offset,
411 Location index,
412 ScaleFactor scale_factor,
413 Location temp,
414 bool needs_null_check,
415 bool always_update_field = false);
416
417 // Generate a read barrier for a heap reference within `instruction`
418 // using a slow path.
419 //
420 // A read barrier for an object reference read from the heap is
421 // implemented as a call to the artReadBarrierSlow runtime entry
422 // point, which is passed the values in locations `ref`, `obj`, and
423 // `offset`:
424 //
425 // mirror::Object* artReadBarrierSlow(mirror::Object* ref,
426 // mirror::Object* obj,
427 // uint32_t offset);
428 //
429 // The `out` location contains the value returned by
430 // artReadBarrierSlow.
431 //
432 // When `index` is provided (i.e. for array accesses), the offset
433 // value passed to artReadBarrierSlow is adjusted to take `index`
434 // into account.
435 void GenerateReadBarrierSlow(HInstruction* instruction,
436 Location out,
437 Location ref,
438 Location obj,
439 uint32_t offset,
440 Location index = Location::NoLocation());
441
442 // If read barriers are enabled, generate a read barrier for a heap
443 // reference using a slow path. If heap poisoning is enabled, also
444 // unpoison the reference in `out`.
445 void MaybeGenerateReadBarrierSlow(HInstruction* instruction,
446 Location out,
447 Location ref,
448 Location obj,
449 uint32_t offset,
450 Location index = Location::NoLocation());
451
452 // Generate a read barrier for a GC root within `instruction` using
453 // a slow path.
454 //
455 // A read barrier for an object reference GC root is implemented as
456 // a call to the artReadBarrierForRootSlow runtime entry point,
457 // which is passed the value in location `root`:
458 //
459 // mirror::Object* artReadBarrierForRootSlow(GcRoot<mirror::Object>* root);
460 //
461 // The `out` location contains the value returned by
462 // artReadBarrierForRootSlow.
463 void GenerateReadBarrierForRootSlow(HInstruction* instruction, Location out, Location root);
464
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100465 void MarkGCCard(GpuRegister object, GpuRegister value, bool value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700466
467 // Register allocation.
468
David Brazdil58282f42016-01-14 12:45:10 +0000469 void SetupBlockedRegisters() const OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700470
Roland Levillainf41f9562016-09-14 19:26:48 +0100471 size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
472 size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
473 size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
474 size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700475
476 void DumpCoreRegister(std::ostream& stream, int reg) const OVERRIDE;
477 void DumpFloatingPointRegister(std::ostream& stream, int reg) const OVERRIDE;
478
479 InstructionSet GetInstructionSet() const OVERRIDE { return InstructionSet::kMips64; }
480
481 const Mips64InstructionSetFeatures& GetInstructionSetFeatures() const {
482 return isa_features_;
483 }
484
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700485 Mips64Label* GetLabelOf(HBasicBlock* block) const {
486 return CommonGetLabelOf<Mips64Label>(block_labels_, block);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700487 }
488
489 void Initialize() OVERRIDE {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700490 block_labels_ = CommonInitializeLabels<Mips64Label>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700491 }
492
Alexey Frunzec3789802016-12-22 13:54:23 -0800493 // We prefer aligned loads and stores (less code), so spill and restore registers in slow paths
494 // at aligned locations.
495 uint32_t GetPreferredSlotsAlignment() const OVERRIDE { return kMips64DoublewordSize; }
496
Alexey Frunze4dda3372015-06-01 18:31:49 -0700497 void Finalize(CodeAllocator* allocator) OVERRIDE;
498
499 // Code generation helpers.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100500 void MoveLocation(Location dst, Location src, Primitive::Type dst_type) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700501
Calin Juravle175dc732015-08-25 15:42:32 +0100502 void MoveConstant(Location destination, int32_t value) OVERRIDE;
503
Calin Juravlee460d1d2015-09-29 04:52:17 +0100504 void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE;
505
506
Alexey Frunze4dda3372015-06-01 18:31:49 -0700507 void SwapLocations(Location loc1, Location loc2, Primitive::Type type);
508
509 // Generate code to invoke a runtime entry point.
Calin Juravle175dc732015-08-25 15:42:32 +0100510 void InvokeRuntime(QuickEntrypointEnum entrypoint,
511 HInstruction* instruction,
512 uint32_t dex_pc,
Serban Constantinescufc734082016-07-19 17:18:07 +0100513 SlowPathCode* slow_path = nullptr) OVERRIDE;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700514
Alexey Frunze15958152017-02-09 19:08:30 -0800515 // Generate code to invoke a runtime entry point, but do not record
516 // PC-related information in a stack map.
517 void InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
518 HInstruction* instruction,
519 SlowPathCode* slow_path);
520
521 void GenerateInvokeRuntime(int32_t entry_point_offset);
522
Alexey Frunze4dda3372015-06-01 18:31:49 -0700523 ParallelMoveResolver* GetMoveResolver() OVERRIDE { return &move_resolver_; }
524
Roland Levillainf41f9562016-09-14 19:26:48 +0100525 bool NeedsTwoRegisters(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE { return false; }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700526
Vladimir Markocac5a7e2016-02-22 10:39:50 +0000527 // Check if the desired_string_load_kind is supported. If it is, return it,
528 // otherwise return a fall-back kind that should be used instead.
529 HLoadString::LoadKind GetSupportedLoadStringKind(
530 HLoadString::LoadKind desired_string_load_kind) OVERRIDE;
531
Vladimir Markodbb7f5b2016-03-30 13:23:58 +0100532 // Check if the desired_class_load_kind is supported. If it is, return it,
533 // otherwise return a fall-back kind that should be used instead.
534 HLoadClass::LoadKind GetSupportedLoadClassKind(
535 HLoadClass::LoadKind desired_class_load_kind) OVERRIDE;
536
Vladimir Markodc151b22015-10-15 18:02:30 +0100537 // Check if the desired_dispatch_info is supported. If it is, return it,
538 // otherwise return a fall-back info that should be used instead.
539 HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
540 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +0100541 HInvokeStaticOrDirect* invoke) OVERRIDE;
Vladimir Markodc151b22015-10-15 18:02:30 +0100542
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100543 void GenerateStaticOrDirectCall(
544 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path = nullptr) OVERRIDE;
545 void GenerateVirtualCall(
546 HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) OVERRIDE;
Andreas Gampe85b62f22015-09-09 13:15:38 -0700547
548 void MoveFromReturnRegister(Location trg ATTRIBUTE_UNUSED,
549 Primitive::Type type ATTRIBUTE_UNUSED) OVERRIDE {
Chris Larsen3acee732015-11-18 13:31:08 -0800550 UNIMPLEMENTED(FATAL) << "Not implemented on MIPS64";
Andreas Gampe85b62f22015-09-09 13:15:38 -0700551 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700552
Roland Levillainf41f9562016-09-14 19:26:48 +0100553 void GenerateNop() OVERRIDE;
554 void GenerateImplicitNullCheck(HNullCheck* instruction) OVERRIDE;
555 void GenerateExplicitNullCheck(HNullCheck* instruction) OVERRIDE;
David Srbeckyc7098ff2016-02-09 14:30:11 +0000556
Alexey Frunze19f6c692016-11-30 19:19:55 -0800557 // The PcRelativePatchInfo is used for PC-relative addressing of dex cache arrays,
558 // boot image strings and method calls. The only difference is the interpretation of
559 // the offset_or_index.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700560 // The 16-bit halves of the 32-bit PC-relative offset are patched separately, necessitating
561 // two patches/infos. There can be more than two patches/infos if the instruction supplying
562 // the high half is shared with e.g. a slow path, while the low half is supplied by separate
563 // instructions, e.g.:
564 // auipc r1, high // patch
565 // lwu r2, low(r1) // patch
566 // beqzc r2, slow_path
567 // back:
568 // ...
569 // slow_path:
570 // ...
571 // sw r2, low(r1) // patch
572 // bc back
Alexey Frunze19f6c692016-11-30 19:19:55 -0800573 struct PcRelativePatchInfo {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700574 PcRelativePatchInfo(const DexFile& dex_file,
575 uint32_t off_or_idx,
576 const PcRelativePatchInfo* info_high)
577 : target_dex_file(dex_file),
578 offset_or_index(off_or_idx),
579 label(),
580 patch_info_high(info_high) { }
Alexey Frunze19f6c692016-11-30 19:19:55 -0800581
582 const DexFile& target_dex_file;
583 // Either the dex cache array element offset or the string/type/method index.
584 uint32_t offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700585 // Label for the instruction to patch.
586 Mips64Label label;
587 // Pointer to the info for the high half patch or nullptr if this is the high half patch info.
588 const PcRelativePatchInfo* patch_info_high;
589
590 private:
591 PcRelativePatchInfo(PcRelativePatchInfo&& other) = delete;
592 DISALLOW_COPY_AND_ASSIGN(PcRelativePatchInfo);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800593 };
594
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700595 PcRelativePatchInfo* NewPcRelativeMethodPatch(MethodReference target_method,
596 const PcRelativePatchInfo* info_high = nullptr);
597 PcRelativePatchInfo* NewMethodBssEntryPatch(MethodReference target_method,
598 const PcRelativePatchInfo* info_high = nullptr);
599 PcRelativePatchInfo* NewPcRelativeTypePatch(const DexFile& dex_file,
600 dex::TypeIndex type_index,
601 const PcRelativePatchInfo* info_high = nullptr);
602 PcRelativePatchInfo* NewTypeBssEntryPatch(const DexFile& dex_file,
603 dex::TypeIndex type_index,
604 const PcRelativePatchInfo* info_high = nullptr);
Vladimir Marko65979462017-05-19 17:25:12 +0100605 PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700606 dex::StringIndex string_index,
607 const PcRelativePatchInfo* info_high = nullptr);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100608 PcRelativePatchInfo* NewStringBssEntryPatch(const DexFile& dex_file,
609 dex::StringIndex string_index,
610 const PcRelativePatchInfo* info_high = nullptr);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800611 Literal* DeduplicateBootImageAddressLiteral(uint64_t address);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800612
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700613 void EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
614 GpuRegister out,
Alexey Frunze4147fcc2017-06-17 19:57:27 -0700615 PcRelativePatchInfo* info_low = nullptr);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800616
Alexey Frunze627c1a02017-01-30 19:28:14 -0800617 void PatchJitRootUse(uint8_t* code,
618 const uint8_t* roots_data,
619 const Literal* literal,
620 uint64_t index_in_table) const;
621 Literal* DeduplicateJitStringLiteral(const DexFile& dex_file,
622 dex::StringIndex string_index,
623 Handle<mirror::String> handle);
624 Literal* DeduplicateJitClassLiteral(const DexFile& dex_file,
625 dex::TypeIndex type_index,
626 Handle<mirror::Class> handle);
627
Alexey Frunze4dda3372015-06-01 18:31:49 -0700628 private:
Alexey Frunzef63f5692016-12-13 17:43:11 -0800629 using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, Literal*>;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800630 using Uint64ToLiteralMap = ArenaSafeMap<uint64_t, Literal*>;
Alexey Frunze627c1a02017-01-30 19:28:14 -0800631 using StringToLiteralMap = ArenaSafeMap<StringReference,
632 Literal*,
633 StringReferenceValueComparator>;
634 using TypeToLiteralMap = ArenaSafeMap<TypeReference,
635 Literal*,
636 TypeReferenceValueComparator>;
Alexey Frunzef63f5692016-12-13 17:43:11 -0800637
638 Literal* DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800639 Literal* DeduplicateUint64Literal(uint64_t value);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800640
641 PcRelativePatchInfo* NewPcRelativePatch(const DexFile& dex_file,
642 uint32_t offset_or_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700643 const PcRelativePatchInfo* info_high,
Alexey Frunze19f6c692016-11-30 19:19:55 -0800644 ArenaDeque<PcRelativePatchInfo>* patches);
645
Vladimir Markod8dbc8d2017-09-20 13:37:47 +0100646 template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Alexey Frunze19f6c692016-11-30 19:19:55 -0800647 void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +0100648 ArenaVector<linker::LinkerPatch>* linker_patches);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800649
Alexey Frunze4dda3372015-06-01 18:31:49 -0700650 // Labels for each block that will be compiled.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700651 Mips64Label* block_labels_; // Indexed by block id.
652 Mips64Label frame_entry_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700653 LocationsBuilderMIPS64 location_builder_;
654 InstructionCodeGeneratorMIPS64 instruction_visitor_;
655 ParallelMoveResolverMIPS64 move_resolver_;
656 Mips64Assembler assembler_;
657 const Mips64InstructionSetFeatures& isa_features_;
658
Alexey Frunzef63f5692016-12-13 17:43:11 -0800659 // Deduplication map for 32-bit literals, used for non-patchable boot image addresses.
660 Uint32ToLiteralMap uint32_literals_;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800661 // Deduplication map for 64-bit literals, used for non-patchable method address or method code
662 // address.
663 Uint64ToLiteralMap uint64_literals_;
Vladimir Marko65979462017-05-19 17:25:12 +0100664 // PC-relative method patch info for kBootImageLinkTimePcRelative.
665 ArenaDeque<PcRelativePatchInfo> pc_relative_method_patches_;
Vladimir Marko0eb882b2017-05-15 13:39:18 +0100666 // PC-relative method patch info for kBssEntry.
667 ArenaDeque<PcRelativePatchInfo> method_bss_entry_patches_;
Vladimir Marko1998cd02017-01-13 13:02:58 +0000668 // PC-relative type patch info for kBootImageLinkTimePcRelative.
Alexey Frunzef63f5692016-12-13 17:43:11 -0800669 ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
Vladimir Marko1998cd02017-01-13 13:02:58 +0000670 // PC-relative type patch info for kBssEntry.
671 ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100672 // PC-relative String patch info; type depends on configuration (intern table or boot image PIC).
Vladimir Marko65979462017-05-19 17:25:12 +0100673 ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100674 // PC-relative type patch info for kBssEntry.
675 ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_;
Vladimir Marko65979462017-05-19 17:25:12 +0100676
Alexey Frunze627c1a02017-01-30 19:28:14 -0800677 // Patches for string root accesses in JIT compiled code.
678 StringToLiteralMap jit_string_patches_;
679 // Patches for class root accesses in JIT compiled code.
680 TypeToLiteralMap jit_class_patches_;
Alexey Frunze19f6c692016-11-30 19:19:55 -0800681
Alexey Frunze4dda3372015-06-01 18:31:49 -0700682 DISALLOW_COPY_AND_ASSIGN(CodeGeneratorMIPS64);
683};
684
685} // namespace mips64
686} // namespace art
687
688#endif // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_MIPS64_H_