blob: f9cf7d87af517144dd91bdc00d12b4233f1bb191 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/arm64/assembler_arm64.h"
26#include "utils/assembler.h"
27#include "utils/stack_checks.h"
28
29
30using namespace vixl; // NOLINT(build/namespaces)
31
32#ifdef __
33#error "ARM64 Codegen VIXL macro-assembler macro already defined."
34#endif
35
36
37namespace art {
38
39namespace arm64 {
40
41static bool IsFPType(Primitive::Type type) {
42 return type == Primitive::kPrimFloat || type == Primitive::kPrimDouble;
43}
44
45// TODO: clean-up some of the constant definitions.
46static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
47static constexpr int kCurrentMethodStackOffset = 0;
48
49namespace {
50// Convenience helpers to ease conversion to and from VIXL operands.
51
52int VIXLRegCodeFromART(int code) {
53 // TODO: static check?
54 DCHECK_EQ(SP, 31);
55 DCHECK_EQ(WSP, 31);
56 DCHECK_EQ(XZR, 32);
57 DCHECK_EQ(WZR, 32);
58 if (code == SP) {
59 return vixl::kSPRegInternalCode;
60 }
61 if (code == XZR) {
62 return vixl::kZeroRegCode;
63 }
64 return code;
65}
66
67int ARTRegCodeFromVIXL(int code) {
68 // TODO: static check?
69 DCHECK_EQ(SP, 31);
70 DCHECK_EQ(WSP, 31);
71 DCHECK_EQ(XZR, 32);
72 DCHECK_EQ(WZR, 32);
73 if (code == vixl::kSPRegInternalCode) {
74 return SP;
75 }
76 if (code == vixl::kZeroRegCode) {
77 return XZR;
78 }
79 return code;
80}
81
82Register XRegisterFrom(Location location) {
83 return Register::XRegFromCode(VIXLRegCodeFromART(location.reg()));
84}
85
86Register WRegisterFrom(Location location) {
87 return Register::WRegFromCode(VIXLRegCodeFromART(location.reg()));
88}
89
90Register RegisterFrom(Location location, Primitive::Type type) {
91 DCHECK(type != Primitive::kPrimVoid && !IsFPType(type));
92 return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location);
93}
94
95Register OutputRegister(HInstruction* instr) {
96 return RegisterFrom(instr->GetLocations()->Out(), instr->GetType());
97}
98
99Register InputRegisterAt(HInstruction* instr, int input_index) {
100 return RegisterFrom(instr->GetLocations()->InAt(input_index),
101 instr->InputAt(input_index)->GetType());
102}
103
104int64_t Int64ConstantFrom(Location location) {
105 HConstant* instr = location.GetConstant();
106 return instr->IsIntConstant() ? instr->AsIntConstant()->GetValue()
107 : instr->AsLongConstant()->GetValue();
108}
109
110Operand OperandFrom(Location location, Primitive::Type type) {
111 if (location.IsRegister()) {
112 return Operand(RegisterFrom(location, type));
113 } else {
114 return Operand(Int64ConstantFrom(location));
115 }
116}
117
118Operand InputOperandAt(HInstruction* instr, int input_index) {
119 return OperandFrom(instr->GetLocations()->InAt(input_index),
120 instr->InputAt(input_index)->GetType());
121}
122
123MemOperand StackOperandFrom(Location location) {
124 return MemOperand(sp, location.GetStackIndex());
125}
126
127MemOperand HeapOperand(const Register& base, Offset offset) {
128 // A heap reference must be 32bit, so fit in a W register.
129 DCHECK(base.IsW());
130 return MemOperand(base.X(), offset.SizeValue());
131}
132
133MemOperand HeapOperandFrom(Location location, Primitive::Type type, Offset offset) {
134 return HeapOperand(RegisterFrom(location, type), offset);
135}
136
137Location LocationFrom(const Register& reg) {
138 return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code()));
139}
140
141} // namespace
142
143inline Condition ARM64Condition(IfCondition cond) {
144 switch (cond) {
145 case kCondEQ: return eq;
146 case kCondNE: return ne;
147 case kCondLT: return lt;
148 case kCondLE: return le;
149 case kCondGT: return gt;
150 case kCondGE: return ge;
151 default:
152 LOG(FATAL) << "Unknown if condition";
153 }
154 return nv; // Unreachable.
155}
156
157static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
158static constexpr size_t kRuntimeParameterCoreRegistersLength =
159 arraysize(kRuntimeParameterCoreRegisters);
160static const FPRegister kRuntimeParameterFpuRegisters[] = { };
161static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
162
163class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
164 public:
165 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
166
167 InvokeRuntimeCallingConvention()
168 : CallingConvention(kRuntimeParameterCoreRegisters,
169 kRuntimeParameterCoreRegistersLength,
170 kRuntimeParameterFpuRegisters,
171 kRuntimeParameterFpuRegistersLength) {}
172
173 Location GetReturnLocation(Primitive::Type return_type);
174
175 private:
176 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
177};
178
179Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
180 DCHECK_NE(return_type, Primitive::kPrimVoid);
181 if (return_type == Primitive::kPrimFloat || return_type == Primitive::kPrimDouble) {
182 LOG(FATAL) << "Unimplemented return type " << return_type;
183 }
184 return LocationFrom(x0);
185}
186
187#define __ reinterpret_cast<Arm64Assembler*>(codegen->GetAssembler())->vixl_masm_->
188
189class SlowPathCodeARM64 : public SlowPathCode {
190 public:
191 SlowPathCodeARM64() : entry_label_(), exit_label_() {}
192
193 vixl::Label* GetEntryLabel() { return &entry_label_; }
194 vixl::Label* GetExitLabel() { return &exit_label_; }
195
196 private:
197 vixl::Label entry_label_;
198 vixl::Label exit_label_;
199
200 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
201};
202
203class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
204 public:
205 explicit BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
206 Location index_location,
207 Location length_location)
208 : instruction_(instruction),
209 index_location_(index_location),
210 length_location_(length_location) {}
211
212 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
213 CodeGeneratorARM64* arm64_codegen = reinterpret_cast<CodeGeneratorARM64*>(codegen);
214 __ Bind(GetEntryLabel());
215 InvokeRuntimeCallingConvention calling_convention;
216 arm64_codegen->MoveHelper(LocationFrom(calling_convention.GetRegisterAt(0)),
217 index_location_, Primitive::kPrimInt);
218 arm64_codegen->MoveHelper(LocationFrom(calling_convention.GetRegisterAt(1)),
219 length_location_, Primitive::kPrimInt);
220 size_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pThrowArrayBounds).SizeValue();
221 __ Ldr(lr, MemOperand(tr, offset));
222 __ Blr(lr);
223 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
224 }
225
226 private:
227 HBoundsCheck* const instruction_;
228 const Location index_location_;
229 const Location length_location_;
230
231 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
232};
233
234class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
235 public:
236 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
237
238 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
239 __ Bind(GetEntryLabel());
240 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pThrowNullPointer).Int32Value();
241 __ Ldr(lr, MemOperand(tr, offset));
242 __ Blr(lr);
243 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
244 }
245
246 private:
247 HNullCheck* const instruction_;
248
249 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
250};
251
252class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
253 public:
254 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
255 HBasicBlock* successor)
256 : instruction_(instruction), successor_(successor) {}
257
258 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
259 size_t offset = QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pTestSuspend).SizeValue();
260 __ Bind(GetEntryLabel());
261 __ Ldr(lr, MemOperand(tr, offset));
262 __ Blr(lr);
263 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
264 __ B(GetReturnLabel());
265 }
266
267 vixl::Label* GetReturnLabel() {
268 DCHECK(successor_ == nullptr);
269 return &return_label_;
270 }
271
272
273 private:
274 HSuspendCheck* const instruction_;
275 // If not null, the block to branch to after the suspend check.
276 HBasicBlock* const successor_;
277
278 // If `successor_` is null, the label to branch to after the suspend check.
279 vixl::Label return_label_;
280
281 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
282};
283
284#undef __
285
286Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
287 Location next_location;
288 if (type == Primitive::kPrimVoid) {
289 LOG(FATAL) << "Unreachable type " << type;
290 }
291
292 if (type == Primitive::kPrimFloat || type == Primitive::kPrimDouble) {
293 LOG(FATAL) << "Unimplemented type " << type;
294 }
295
296 if (gp_index_ < calling_convention.GetNumberOfRegisters()) {
297 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_));
298 if (type == Primitive::kPrimLong) {
299 // Double stack slot reserved on the stack.
300 stack_index_++;
301 }
302 } else { // Stack.
303 if (type == Primitive::kPrimLong) {
304 next_location = Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_));
305 // Double stack slot reserved on the stack.
306 stack_index_++;
307 } else {
308 next_location = Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_));
309 }
310 }
311 // Move to the next register/stack slot.
312 gp_index_++;
313 stack_index_++;
314 return next_location;
315}
316
317CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph)
318 : CodeGenerator(graph,
319 kNumberOfAllocatableRegisters,
320 kNumberOfAllocatableFloatingPointRegisters,
321 kNumberOfAllocatableRegisterPairs),
322 block_labels_(nullptr),
323 location_builder_(graph, this),
324 instruction_visitor_(graph, this) {}
325
326#define __ reinterpret_cast<Arm64Assembler*>(GetAssembler())->vixl_masm_->
327
328void CodeGeneratorARM64::GenerateFrameEntry() {
329 // TODO: Add proper support for the stack overflow check.
330 UseScratchRegisterScope temps(assembler_.vixl_masm_);
331 Register temp = temps.AcquireX();
332 __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
333 __ Ldr(temp, MemOperand(temp, 0));
334 RecordPcInfo(nullptr, 0);
335
336 CPURegList preserved_regs = GetFramePreservedRegisters();
337 int frame_size = GetFrameSize();
338 core_spill_mask_ |= preserved_regs.list();
339
340 __ Str(w0, MemOperand(sp, -frame_size, PreIndex));
341 __ PokeCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
342
343 // Stack layout:
344 // sp[frame_size - 8] : lr.
345 // ... : other preserved registers.
346 // sp[frame_size - regs_size]: first preserved register.
347 // ... : reserved frame space.
348 // sp[0] : context pointer.
349}
350
351void CodeGeneratorARM64::GenerateFrameExit() {
352 int frame_size = GetFrameSize();
353 CPURegList preserved_regs = GetFramePreservedRegisters();
354 __ PeekCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
355 __ Drop(frame_size);
356}
357
358void CodeGeneratorARM64::Bind(HBasicBlock* block) {
359 __ Bind(GetLabelOf(block));
360}
361
362void CodeGeneratorARM64::MoveHelper(Location destination,
363 Location source,
364 Primitive::Type type) {
365 if (source.Equals(destination)) {
366 return;
367 }
368 if (destination.IsRegister()) {
369 Register dst = RegisterFrom(destination, type);
370 if (source.IsRegister()) {
371 Register src = RegisterFrom(source, type);
372 DCHECK(dst.IsSameSizeAndType(src));
373 __ Mov(dst, src);
374 } else {
375 DCHECK(dst.Is64Bits() || !source.IsDoubleStackSlot());
376 __ Ldr(dst, StackOperandFrom(source));
377 }
378 } else {
379 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
380 if (source.IsRegister()) {
381 __ Str(RegisterFrom(source, type), StackOperandFrom(destination));
382 } else {
383 UseScratchRegisterScope temps(assembler_.vixl_masm_);
384 Register temp = destination.IsDoubleStackSlot() ? temps.AcquireX() : temps.AcquireW();
385 __ Ldr(temp, StackOperandFrom(source));
386 __ Str(temp, StackOperandFrom(destination));
387 }
388 }
389}
390
391void CodeGeneratorARM64::Move(HInstruction* instruction,
392 Location location,
393 HInstruction* move_for) {
394 LocationSummary* locations = instruction->GetLocations();
395 if (locations != nullptr && locations->Out().Equals(location)) {
396 return;
397 }
398
399 Primitive::Type type = instruction->GetType();
400
401 if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
402 int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
403 : instruction->AsLongConstant()->GetValue();
404 if (location.IsRegister()) {
405 Register dst = RegisterFrom(location, type);
406 DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
407 (instruction->IsLongConstant() && dst.Is64Bits()));
408 __ Mov(dst, value);
409 } else {
410 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
411 UseScratchRegisterScope temps(assembler_.vixl_masm_);
412 Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
413 __ Mov(temp, value);
414 __ Str(temp, StackOperandFrom(location));
415 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000416 } else if (instruction->IsTemporary()) {
417 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
418 MoveHelper(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100419 } else if (instruction->IsLoadLocal()) {
420 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
421 switch (type) {
422 case Primitive::kPrimNot:
423 case Primitive::kPrimBoolean:
424 case Primitive::kPrimByte:
425 case Primitive::kPrimChar:
426 case Primitive::kPrimShort:
427 case Primitive::kPrimInt:
428 MoveHelper(location, Location::StackSlot(stack_slot), type);
429 break;
430 case Primitive::kPrimLong:
431 MoveHelper(location, Location::DoubleStackSlot(stack_slot), type);
432 break;
433 default:
434 LOG(FATAL) << "Unimplemented type" << type;
435 }
436
437 } else {
438 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
439 MoveHelper(location, locations->Out(), type);
440 }
441}
442
443size_t CodeGeneratorARM64::FrameEntrySpillSize() const {
444 return GetFramePreservedRegistersSize();
445}
446
447Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
448 Primitive::Type type = load->GetType();
449 switch (type) {
450 case Primitive::kPrimNot:
451 case Primitive::kPrimBoolean:
452 case Primitive::kPrimByte:
453 case Primitive::kPrimChar:
454 case Primitive::kPrimShort:
455 case Primitive::kPrimInt:
456 return Location::StackSlot(GetStackSlot(load->GetLocal()));
457 case Primitive::kPrimLong:
458 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
459 case Primitive::kPrimFloat:
460 case Primitive::kPrimDouble:
461 LOG(FATAL) << "Unimplemented type " << type;
462 break;
463 case Primitive::kPrimVoid:
464 default:
465 LOG(FATAL) << "Unexpected type " << type;
466 }
467 LOG(FATAL) << "Unreachable";
468 return Location::NoLocation();
469}
470
471void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
472 UseScratchRegisterScope temps(assembler_.vixl_masm_);
473 Register card = temps.AcquireX();
474 Register temp = temps.AcquireX();
475 vixl::Label done;
476 __ Cbz(value, &done);
477 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
478 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
479 __ Strb(card, MemOperand(card, temp));
480 __ Bind(&done);
481}
482
483void CodeGeneratorARM64::SetupBlockedRegisters() const {
484 // Block reserved registers:
485 // ip0 (VIXL temporary)
486 // ip1 (VIXL temporary)
487 // xSuspend (Suspend counter)
488 // lr
489 // sp is not part of the allocatable registers, so we don't need to block it.
490 CPURegList reserved_core_registers = vixl_reserved_core_registers;
491 reserved_core_registers.Combine(runtime_reserved_core_registers);
492 // TODO: See if we should instead allow allocating but preserve those if used.
493 reserved_core_registers.Combine(quick_callee_saved_registers);
494 while (!reserved_core_registers.IsEmpty()) {
495 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
496 }
497}
498
499Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
500 if (type == Primitive::kPrimVoid) {
501 LOG(FATAL) << "Unreachable type " << type;
502 }
503
504 if (type == Primitive::kPrimFloat || type == Primitive::kPrimDouble) {
505 LOG(FATAL) << "Unimplemented support for floating-point";
506 }
507
508 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfXRegisters);
509 DCHECK_NE(reg, -1);
510 blocked_core_registers_[reg] = true;
511
512 if (IsFPType(type)) {
513 return Location::FpuRegisterLocation(reg);
514 } else {
515 return Location::RegisterLocation(reg);
516 }
517}
518
519void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
520 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
521}
522
523void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
524 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
525}
526
527#undef __
528#define __ assembler_->vixl_masm_->
529
530InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
531 CodeGeneratorARM64* codegen)
532 : HGraphVisitor(graph),
533 assembler_(codegen->GetAssembler()),
534 codegen_(codegen) {}
535
536#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
537 M(ArrayGet) \
538 M(ArraySet) \
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100539 M(ClinitCheck) \
Alexandre Rames5319def2014-10-23 10:03:10 +0100540 M(DoubleConstant) \
Calin Juravle7c4954d2014-10-28 16:57:40 +0000541 M(Div) \
Calin Juravled0d48522014-11-04 16:40:20 +0000542 M(DivZeroCheck) \
Alexandre Rames5319def2014-10-23 10:03:10 +0100543 M(FloatConstant) \
Nicolas Geoffray52839d12014-11-07 17:47:25 +0000544 M(InvokeInterface) \
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100545 M(LoadClass) \
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000546 M(LoadException) \
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000547 M(LoadString) \
Alexandre Rames5319def2014-10-23 10:03:10 +0100548 M(Neg) \
549 M(NewArray) \
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +0100550 M(ParallelMove) \
551 M(StaticFieldGet) \
552 M(StaticFieldSet) \
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +0000553 M(Throw) \
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000554 M(TypeCheck) \
Roland Levillaindff1f282014-11-05 14:15:05 +0000555 M(TypeConversion) \
Alexandre Rames5319def2014-10-23 10:03:10 +0100556
557#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
558
559enum UnimplementedInstructionBreakCode {
560#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
561 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
562#undef ENUM_UNIMPLEMENTED_INSTRUCTION
563};
564
565#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
566 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700567 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +0100568 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
569 } \
570 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
571 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
572 locations->SetOut(Location::Any()); \
573 }
574 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
575#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
576
577#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
578
579void LocationsBuilderARM64::HandleAddSub(HBinaryOperation* instr) {
580 DCHECK(instr->IsAdd() || instr->IsSub());
581 DCHECK_EQ(instr->InputCount(), 2U);
582 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
583 Primitive::Type type = instr->GetResultType();
584 switch (type) {
585 case Primitive::kPrimInt:
586 case Primitive::kPrimLong: {
587 locations->SetInAt(0, Location::RequiresRegister());
588 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +0000589 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +0100590 break;
591 }
592 case Primitive::kPrimBoolean:
593 case Primitive::kPrimByte:
594 case Primitive::kPrimChar:
595 case Primitive::kPrimShort:
596 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
597 break;
598 default:
599 LOG(FATAL) << "Unimplemented " << instr->DebugName() << " type " << type;
600 }
601}
602
603void InstructionCodeGeneratorARM64::HandleAddSub(HBinaryOperation* instr) {
604 DCHECK(instr->IsAdd() || instr->IsSub());
605
606 Primitive::Type type = instr->GetType();
607 Register dst = OutputRegister(instr);
608 Register lhs = InputRegisterAt(instr, 0);
609 Operand rhs = InputOperandAt(instr, 1);
610
611 switch (type) {
612 case Primitive::kPrimInt:
613 case Primitive::kPrimLong:
614 if (instr->IsAdd()) {
615 __ Add(dst, lhs, rhs);
616 } else {
617 __ Sub(dst, lhs, rhs);
618 }
619 break;
620
621 case Primitive::kPrimBoolean:
622 case Primitive::kPrimByte:
623 case Primitive::kPrimChar:
624 case Primitive::kPrimShort:
625 LOG(FATAL) << "Unexpected add/sub type " << type;
626 break;
627 default:
628 LOG(FATAL) << "Unimplemented add/sub type " << type;
629 }
630}
631
632void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
633 HandleAddSub(instruction);
634}
635
636void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
637 HandleAddSub(instruction);
638}
639
640void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
641 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
642 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +0000643 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +0100644}
645
646void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
647 __ Ldr(OutputRegister(instruction),
648 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
649}
650
651void LocationsBuilderARM64::VisitCompare(HCompare* instruction) {
652 LocationSummary* locations =
653 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
654 locations->SetInAt(0, Location::RequiresRegister());
655 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +0000656 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +0100657}
658
659void InstructionCodeGeneratorARM64::VisitCompare(HCompare* instruction) {
660 Primitive::Type in_type = instruction->InputAt(0)->GetType();
661
662 DCHECK_EQ(in_type, Primitive::kPrimLong);
663 switch (in_type) {
664 case Primitive::kPrimLong: {
665 vixl::Label done;
666 Register result = OutputRegister(instruction);
667 Register left = InputRegisterAt(instruction, 0);
668 Operand right = InputOperandAt(instruction, 1);
669 __ Subs(result, left, right);
670 __ B(eq, &done);
671 __ Mov(result, 1);
672 __ Cneg(result, result, le);
673 __ Bind(&done);
674 break;
675 }
676 default:
677 LOG(FATAL) << "Unimplemented compare type " << in_type;
678 }
679}
680
681void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
682 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
683 locations->SetInAt(0, Location::RequiresRegister());
684 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
685 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +0000686 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +0100687 }
688}
689
690void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
691 if (!instruction->NeedsMaterialization()) {
692 return;
693 }
694
695 LocationSummary* locations = instruction->GetLocations();
696 Register lhs = InputRegisterAt(instruction, 0);
697 Operand rhs = InputOperandAt(instruction, 1);
698 Register res = RegisterFrom(locations->Out(), instruction->GetType());
699 Condition cond = ARM64Condition(instruction->GetCondition());
700
701 __ Cmp(lhs, rhs);
702 __ Csel(res, vixl::Assembler::AppropriateZeroRegFor(res), Operand(1), InvertCondition(cond));
703}
704
705#define FOR_EACH_CONDITION_INSTRUCTION(M) \
706 M(Equal) \
707 M(NotEqual) \
708 M(LessThan) \
709 M(LessThanOrEqual) \
710 M(GreaterThan) \
711 M(GreaterThanOrEqual)
712#define DEFINE_CONDITION_VISITORS(Name) \
713void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
714void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
715FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
716#undef FOR_EACH_CONDITION_INSTRUCTION
717
718void LocationsBuilderARM64::VisitExit(HExit* exit) {
719 exit->SetLocations(nullptr);
720}
721
722void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700723 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +0100724 if (kIsDebugBuild) {
725 down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
726 __ Brk(0); // TODO: Introduce special markers for such code locations.
727 }
728}
729
730void LocationsBuilderARM64::VisitGoto(HGoto* got) {
731 got->SetLocations(nullptr);
732}
733
734void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
735 HBasicBlock* successor = got->GetSuccessor();
736 // TODO: Support for suspend checks emission.
737 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
738 __ B(codegen_->GetLabelOf(successor));
739 }
740}
741
742void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
743 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
744 HInstruction* cond = if_instr->InputAt(0);
745 DCHECK(cond->IsCondition());
746 if (cond->AsCondition()->NeedsMaterialization()) {
747 locations->SetInAt(0, Location::RequiresRegister());
748 }
749}
750
751void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
752 HInstruction* cond = if_instr->InputAt(0);
753 DCHECK(cond->IsCondition());
754 HCondition* condition = cond->AsCondition();
755 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
756 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
757
758 // TODO: Support constant condition input in VisitIf.
759
760 if (condition->NeedsMaterialization()) {
761 // The condition instruction has been materialized, compare the output to 0.
762 Location cond_val = if_instr->GetLocations()->InAt(0);
763 DCHECK(cond_val.IsRegister());
764 __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
765
766 } else {
767 // The condition instruction has not been materialized, use its inputs as
768 // the comparison and its condition as the branch condition.
769 Register lhs = InputRegisterAt(condition, 0);
770 Operand rhs = InputOperandAt(condition, 1);
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800771 Condition arm64_cond = ARM64Condition(condition->GetCondition());
772 if ((arm64_cond == eq || arm64_cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
773 if (arm64_cond == eq) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100774 __ Cbz(lhs, true_target);
775 } else {
776 __ Cbnz(lhs, true_target);
777 }
778 } else {
779 __ Cmp(lhs, rhs);
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800780 __ B(arm64_cond, true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +0100781 }
782 }
783
784 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
785 __ B(false_target);
786 }
787}
788
789void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
790 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
791 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +0000792 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +0100793}
794
795void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
796 Primitive::Type res_type = instruction->GetType();
797 Register res = OutputRegister(instruction);
798 Register obj = InputRegisterAt(instruction, 0);
799 uint32_t offset = instruction->GetFieldOffset().Uint32Value();
800
801 switch (res_type) {
802 case Primitive::kPrimBoolean: {
803 __ Ldrb(res, MemOperand(obj, offset));
804 break;
805 }
806 case Primitive::kPrimByte: {
807 __ Ldrsb(res, MemOperand(obj, offset));
808 break;
809 }
810 case Primitive::kPrimShort: {
811 __ Ldrsh(res, MemOperand(obj, offset));
812 break;
813 }
814 case Primitive::kPrimChar: {
815 __ Ldrh(res, MemOperand(obj, offset));
816 break;
817 }
818 case Primitive::kPrimInt:
819 case Primitive::kPrimNot:
820 case Primitive::kPrimLong: { // TODO: support volatile.
821 DCHECK(res.IsX() == (res_type == Primitive::kPrimLong));
822 __ Ldr(res, MemOperand(obj, offset));
823 break;
824 }
825
826 case Primitive::kPrimFloat:
827 case Primitive::kPrimDouble:
828 LOG(FATAL) << "Unimplemented register res_type " << res_type;
829 break;
830
831 case Primitive::kPrimVoid:
832 LOG(FATAL) << "Unreachable res_type " << res_type;
833 }
834}
835
836void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
837 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
838 locations->SetInAt(0, Location::RequiresRegister());
839 locations->SetInAt(1, Location::RequiresRegister());
840}
841
842void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
843 Register obj = InputRegisterAt(instruction, 0);
844 Register value = InputRegisterAt(instruction, 1);
845 Primitive::Type field_type = instruction->InputAt(1)->GetType();
846 uint32_t offset = instruction->GetFieldOffset().Uint32Value();
847
848 switch (field_type) {
849 case Primitive::kPrimBoolean:
850 case Primitive::kPrimByte: {
851 __ Strb(value, MemOperand(obj, offset));
852 break;
853 }
854
855 case Primitive::kPrimShort:
856 case Primitive::kPrimChar: {
857 __ Strh(value, MemOperand(obj, offset));
858 break;
859 }
860
861 case Primitive::kPrimInt:
862 case Primitive::kPrimNot:
863 case Primitive::kPrimLong: {
864 DCHECK(value.IsX() == (field_type == Primitive::kPrimLong));
865 __ Str(value, MemOperand(obj, offset));
866
867 if (field_type == Primitive::kPrimNot) {
868 codegen_->MarkGCCard(obj, value);
869 }
870 break;
871 }
872
873 case Primitive::kPrimFloat:
874 case Primitive::kPrimDouble:
875 LOG(FATAL) << "Unimplemented register type " << field_type;
876 break;
877
878 case Primitive::kPrimVoid:
879 LOG(FATAL) << "Unreachable type " << field_type;
880 }
881}
882
883void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
884 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
885 locations->SetOut(Location::ConstantLocation(constant));
886}
887
888void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
889 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700890 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +0100891}
892
893void LocationsBuilderARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
894 HandleInvoke(invoke);
895}
896
897void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
898 HandleInvoke(invoke);
899}
900
901void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
902 LocationSummary* locations =
903 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
904 locations->AddTemp(LocationFrom(x0));
905
906 InvokeDexCallingConventionVisitor calling_convention_visitor;
907 for (size_t i = 0; i < invoke->InputCount(); i++) {
908 HInstruction* input = invoke->InputAt(i);
909 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
910 }
911
912 Primitive::Type return_type = invoke->GetType();
913 if (return_type != Primitive::kPrimVoid) {
914 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
915 }
916}
917
918void InstructionCodeGeneratorARM64::VisitInvokeStatic(HInvokeStatic* invoke) {
919 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
920 // Make sure that ArtMethod* is passed in W0 as per the calling convention
921 DCHECK(temp.Is(w0));
922 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
923 invoke->GetIndexInDexCache() * kHeapRefSize;
924
925 // TODO: Implement all kinds of calls:
926 // 1) boot -> boot
927 // 2) app -> boot
928 // 3) app -> app
929 //
930 // Currently we implement the app -> app logic, which looks up in the resolve cache.
931
932 // temp = method;
933 __ Ldr(temp, MemOperand(sp, kCurrentMethodStackOffset));
934 // temp = temp->dex_cache_resolved_methods_;
935 __ Ldr(temp, MemOperand(temp.X(), mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue()));
936 // temp = temp[index_in_cache];
937 __ Ldr(temp, MemOperand(temp.X(), index_in_cache));
938 // lr = temp->entry_point_from_quick_compiled_code_;
939 __ Ldr(lr, MemOperand(temp.X(), mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
940 // lr();
941 __ Blr(lr);
942
943 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
944 DCHECK(!codegen_->IsLeafMethod());
945}
946
947void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
948 LocationSummary* locations = invoke->GetLocations();
949 Location receiver = locations->InAt(0);
950 Register temp = XRegisterFrom(invoke->GetLocations()->GetTemp(0));
951 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
952 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
953 Offset class_offset = mirror::Object::ClassOffset();
954 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset();
955
956 // temp = object->GetClass();
957 if (receiver.IsStackSlot()) {
958 __ Ldr(temp.W(), MemOperand(sp, receiver.GetStackIndex()));
959 __ Ldr(temp.W(), MemOperand(temp, class_offset.SizeValue()));
960 } else {
961 DCHECK(receiver.IsRegister());
962 __ Ldr(temp.W(), HeapOperandFrom(receiver, Primitive::kPrimNot,
963 class_offset));
964 }
965 // temp = temp->GetMethodAt(method_offset);
966 __ Ldr(temp.W(), MemOperand(temp, method_offset));
967 // lr = temp->GetEntryPoint();
968 __ Ldr(lr, MemOperand(temp, entry_point.SizeValue()));
969 // lr();
970 __ Blr(lr);
971 DCHECK(!codegen_->IsLeafMethod());
972 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
973}
974
975void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
976 load->SetLocations(nullptr);
977}
978
979void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
980 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700981 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +0100982}
983
984void LocationsBuilderARM64::VisitLocal(HLocal* local) {
985 local->SetLocations(nullptr);
986}
987
988void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
989 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
990}
991
992void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
993 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
994 locations->SetOut(Location::ConstantLocation(constant));
995}
996
997void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
998 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700999 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001000}
1001
Alexandre Rames42d641b2014-10-27 14:00:51 +00001002void LocationsBuilderARM64::VisitMul(HMul* mul) {
1003 LocationSummary* locations =
1004 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1005 switch (mul->GetResultType()) {
1006 case Primitive::kPrimInt:
1007 case Primitive::kPrimLong:
1008 locations->SetInAt(0, Location::RequiresRegister());
1009 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001010 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00001011 break;
1012
1013 case Primitive::kPrimFloat:
1014 case Primitive::kPrimDouble:
1015 LOG(FATAL) << "Unimplemented mul type " << mul->GetResultType();
1016 break;
1017
1018 default:
1019 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1020 }
1021}
1022
1023void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
1024 switch (mul->GetResultType()) {
1025 case Primitive::kPrimInt:
1026 case Primitive::kPrimLong:
1027 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
1028 break;
1029
1030 case Primitive::kPrimFloat:
1031 case Primitive::kPrimDouble:
1032 LOG(FATAL) << "Unimplemented mul type " << mul->GetResultType();
1033 break;
1034
1035 default:
1036 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1037 }
1038}
1039
Alexandre Rames5319def2014-10-23 10:03:10 +01001040void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
1041 LocationSummary* locations =
1042 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1043 InvokeRuntimeCallingConvention calling_convention;
1044 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
1045 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
1046 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1047}
1048
1049void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
1050 LocationSummary* locations = instruction->GetLocations();
1051 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
1052 DCHECK(type_index.Is(w0));
1053 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
1054 DCHECK(current_method.Is(w1));
1055 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1056 __ Mov(type_index, instruction->GetTypeIndex());
1057 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocObjectWithAccessCheck).Int32Value()));
1058 __ Blr(lr);
1059 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1060 DCHECK(!codegen_->IsLeafMethod());
1061}
1062
1063void LocationsBuilderARM64::VisitNot(HNot* instruction) {
1064 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00001065 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001066 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001067}
1068
1069void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
1070 switch (instruction->InputAt(0)->GetType()) {
1071 case Primitive::kPrimBoolean:
1072 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), Operand(1));
1073 break;
1074
1075 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01001076 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01001077 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01001078 break;
1079
1080 default:
1081 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
1082 }
1083}
1084
1085void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
1086 LocationSummary* locations =
1087 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1088 locations->SetInAt(0, Location::RequiresRegister());
1089 if (instruction->HasUses()) {
1090 locations->SetOut(Location::SameAsFirstInput());
1091 }
1092}
1093
1094void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
1095 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
1096 codegen_->AddSlowPath(slow_path);
1097
1098 LocationSummary* locations = instruction->GetLocations();
1099 Location obj = locations->InAt(0);
1100 if (obj.IsRegister()) {
1101 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
1102 } else {
1103 DCHECK(obj.IsConstant()) << obj;
1104 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1105 __ B(slow_path->GetEntryLabel());
1106 }
1107}
1108
1109void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
1110 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1111 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1112 if (location.IsStackSlot()) {
1113 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1114 } else if (location.IsDoubleStackSlot()) {
1115 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1116 }
1117 locations->SetOut(location);
1118}
1119
1120void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
1121 // Nothing to do, the parameter is already at its location.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001122 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001123}
1124
1125void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
1126 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1127 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1128 locations->SetInAt(i, Location::Any());
1129 }
1130 locations->SetOut(Location::Any());
1131}
1132
1133void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001134 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001135 LOG(FATAL) << "Unreachable";
1136}
1137
1138void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
1139 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1140 Primitive::Type return_type = instruction->InputAt(0)->GetType();
1141
1142 if (return_type == Primitive::kPrimFloat || return_type == Primitive::kPrimDouble) {
1143 LOG(FATAL) << "Unimplemented return type " << return_type;
1144 }
1145
1146 locations->SetInAt(0, LocationFrom(x0));
1147}
1148
1149void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
1150 if (kIsDebugBuild) {
1151 Primitive::Type type = instruction->InputAt(0)->GetType();
1152 switch (type) {
1153 case Primitive::kPrimBoolean:
1154 case Primitive::kPrimByte:
1155 case Primitive::kPrimChar:
1156 case Primitive::kPrimShort:
1157 case Primitive::kPrimInt:
1158 case Primitive::kPrimNot:
1159 DCHECK(InputRegisterAt(instruction, 0).Is(w0));
1160 break;
1161
1162 case Primitive::kPrimLong:
1163 DCHECK(InputRegisterAt(instruction, 0).Is(x0));
1164 break;
1165
1166 default:
1167 LOG(FATAL) << "Unimplemented return type " << type;
1168 }
1169 }
1170 codegen_->GenerateFrameExit();
1171 __ Br(lr);
1172}
1173
1174void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
1175 instruction->SetLocations(nullptr);
1176}
1177
1178void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001179 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001180 codegen_->GenerateFrameExit();
1181 __ Br(lr);
1182}
1183
1184void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
1185 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
1186 Primitive::Type field_type = store->InputAt(1)->GetType();
1187 switch (field_type) {
1188 case Primitive::kPrimBoolean:
1189 case Primitive::kPrimByte:
1190 case Primitive::kPrimChar:
1191 case Primitive::kPrimShort:
1192 case Primitive::kPrimInt:
1193 case Primitive::kPrimNot:
1194 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
1195 break;
1196
1197 case Primitive::kPrimLong:
1198 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
1199 break;
1200
1201 default:
1202 LOG(FATAL) << "Unimplemented local type " << field_type;
1203 }
1204}
1205
1206void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001207 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01001208}
1209
1210void LocationsBuilderARM64::VisitSub(HSub* instruction) {
1211 HandleAddSub(instruction);
1212}
1213
1214void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
1215 HandleAddSub(instruction);
1216}
1217
1218void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1219 LocationSummary* locations =
1220 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1221 locations->SetInAt(0, Location::RequiresRegister());
1222 locations->SetInAt(1, Location::RequiresRegister());
1223 if (instruction->HasUses()) {
1224 locations->SetOut(Location::SameAsFirstInput());
1225 }
1226}
1227
1228void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1229 LocationSummary* locations = instruction->GetLocations();
1230 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1231 instruction, locations->InAt(0), locations->InAt(1));
1232 codegen_->AddSlowPath(slow_path);
1233
1234 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1235 __ B(slow_path->GetEntryLabel(), hs);
1236}
1237
1238void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
1239 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
1240}
1241
1242void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
1243 // TODO: Improve support for suspend checks.
1244 SuspendCheckSlowPathARM64* slow_path =
1245 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, nullptr);
1246 codegen_->AddSlowPath(slow_path);
1247
1248 __ Subs(wSuspend, wSuspend, 1);
1249 __ B(slow_path->GetEntryLabel(), le);
1250 __ Bind(slow_path->GetReturnLabel());
1251}
1252
1253void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
1254 temp->SetLocations(nullptr);
1255}
1256
1257void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
1258 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001259 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01001260}
1261
1262} // namespace arm64
1263} // namespace art