blob: 306845beb8ad87e002f1255cf7911965622bf030 [file] [log] [blame]
Alexandre Rames5319def2014-10-23 10:03:10 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe1cc7dba2014-12-17 18:43:01 -080020#include "entrypoints/quick/quick_entrypoints_enum.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010021#include "gc/accounting/card_table.h"
22#include "mirror/array-inl.h"
23#include "mirror/art_method.h"
24#include "mirror/class.h"
Calin Juravlecd6dffe2015-01-08 17:35:35 +000025#include "offsets.h"
Alexandre Rames5319def2014-10-23 10:03:10 +010026#include "thread.h"
27#include "utils/arm64/assembler_arm64.h"
28#include "utils/assembler.h"
29#include "utils/stack_checks.h"
30
31
32using namespace vixl; // NOLINT(build/namespaces)
33
34#ifdef __
35#error "ARM64 Codegen VIXL macro-assembler macro already defined."
36#endif
37
38
39namespace art {
40
41namespace arm64 {
42
Serban Constantinescu02d81cc2015-01-05 16:08:49 +000043// TODO: Tune the use of Load-Acquire, Store-Release vs Data Memory Barriers.
44// For now we prefer the use of load-acquire, store-release over explicit memory barriers.
45static constexpr bool kUseAcquireRelease = true;
Alexandre Rames5319def2014-10-23 10:03:10 +010046static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference<mirror::Object>);
47static constexpr int kCurrentMethodStackOffset = 0;
48
49namespace {
Alexandre Ramesa89086e2014-11-07 17:13:25 +000050
51bool IsFPType(Primitive::Type type) {
52 return type == Primitive::kPrimFloat || type == Primitive::kPrimDouble;
53}
54
Alexandre Rames67555f72014-11-18 10:55:16 +000055bool IsIntegralType(Primitive::Type type) {
56 switch (type) {
57 case Primitive::kPrimByte:
58 case Primitive::kPrimChar:
59 case Primitive::kPrimShort:
60 case Primitive::kPrimInt:
61 case Primitive::kPrimLong:
62 return true;
63 default:
64 return false;
65 }
66}
67
Alexandre Ramesa89086e2014-11-07 17:13:25 +000068bool Is64BitType(Primitive::Type type) {
69 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
70}
71
Alexandre Rames5319def2014-10-23 10:03:10 +010072// Convenience helpers to ease conversion to and from VIXL operands.
Alexandre Rames67555f72014-11-18 10:55:16 +000073static_assert((SP == 31) && (WSP == 31) && (XZR == 32) && (WZR == 32),
74 "Unexpected values for register codes.");
Alexandre Rames5319def2014-10-23 10:03:10 +010075
76int VIXLRegCodeFromART(int code) {
Alexandre Rames5319def2014-10-23 10:03:10 +010077 if (code == SP) {
78 return vixl::kSPRegInternalCode;
79 }
80 if (code == XZR) {
81 return vixl::kZeroRegCode;
82 }
83 return code;
84}
85
86int ARTRegCodeFromVIXL(int code) {
Alexandre Rames5319def2014-10-23 10:03:10 +010087 if (code == vixl::kSPRegInternalCode) {
88 return SP;
89 }
90 if (code == vixl::kZeroRegCode) {
91 return XZR;
92 }
93 return code;
94}
95
96Register XRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +000097 DCHECK(location.IsRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +010098 return Register::XRegFromCode(VIXLRegCodeFromART(location.reg()));
99}
100
101Register WRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000102 DCHECK(location.IsRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +0100103 return Register::WRegFromCode(VIXLRegCodeFromART(location.reg()));
104}
105
106Register RegisterFrom(Location location, Primitive::Type type) {
107 DCHECK(type != Primitive::kPrimVoid && !IsFPType(type));
108 return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location);
109}
110
111Register OutputRegister(HInstruction* instr) {
112 return RegisterFrom(instr->GetLocations()->Out(), instr->GetType());
113}
114
115Register InputRegisterAt(HInstruction* instr, int input_index) {
116 return RegisterFrom(instr->GetLocations()->InAt(input_index),
117 instr->InputAt(input_index)->GetType());
118}
119
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000120FPRegister DRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000121 DCHECK(location.IsFpuRegister());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000122 return FPRegister::DRegFromCode(location.reg());
123}
124
125FPRegister SRegisterFrom(Location location) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000126 DCHECK(location.IsFpuRegister());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000127 return FPRegister::SRegFromCode(location.reg());
128}
129
130FPRegister FPRegisterFrom(Location location, Primitive::Type type) {
131 DCHECK(IsFPType(type));
132 return type == Primitive::kPrimDouble ? DRegisterFrom(location) : SRegisterFrom(location);
133}
134
135FPRegister OutputFPRegister(HInstruction* instr) {
136 return FPRegisterFrom(instr->GetLocations()->Out(), instr->GetType());
137}
138
139FPRegister InputFPRegisterAt(HInstruction* instr, int input_index) {
140 return FPRegisterFrom(instr->GetLocations()->InAt(input_index),
141 instr->InputAt(input_index)->GetType());
142}
143
Alexandre Rames3e69f162014-12-10 10:36:50 +0000144CPURegister CPURegisterFrom(Location location, Primitive::Type type) {
145 return IsFPType(type) ? CPURegister(FPRegisterFrom(location, type))
146 : CPURegister(RegisterFrom(location, type));
147}
148
Alexandre Rames67555f72014-11-18 10:55:16 +0000149CPURegister OutputCPURegister(HInstruction* instr) {
150 return IsFPType(instr->GetType()) ? static_cast<CPURegister>(OutputFPRegister(instr))
151 : static_cast<CPURegister>(OutputRegister(instr));
152}
153
154CPURegister InputCPURegisterAt(HInstruction* instr, int index) {
155 return IsFPType(instr->InputAt(index)->GetType())
156 ? static_cast<CPURegister>(InputFPRegisterAt(instr, index))
157 : static_cast<CPURegister>(InputRegisterAt(instr, index));
158}
159
Alexandre Rames5319def2014-10-23 10:03:10 +0100160int64_t Int64ConstantFrom(Location location) {
161 HConstant* instr = location.GetConstant();
162 return instr->IsIntConstant() ? instr->AsIntConstant()->GetValue()
163 : instr->AsLongConstant()->GetValue();
164}
165
166Operand OperandFrom(Location location, Primitive::Type type) {
167 if (location.IsRegister()) {
168 return Operand(RegisterFrom(location, type));
169 } else {
170 return Operand(Int64ConstantFrom(location));
171 }
172}
173
174Operand InputOperandAt(HInstruction* instr, int input_index) {
175 return OperandFrom(instr->GetLocations()->InAt(input_index),
176 instr->InputAt(input_index)->GetType());
177}
178
179MemOperand StackOperandFrom(Location location) {
180 return MemOperand(sp, location.GetStackIndex());
181}
182
Serban Constantinescu02164b32014-11-13 14:05:07 +0000183MemOperand HeapOperand(const Register& base, size_t offset = 0) {
Alexandre Rames5319def2014-10-23 10:03:10 +0100184 // A heap reference must be 32bit, so fit in a W register.
185 DCHECK(base.IsW());
Alexandre Rames67555f72014-11-18 10:55:16 +0000186 return MemOperand(base.X(), offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100187}
188
Alexandre Rames67555f72014-11-18 10:55:16 +0000189MemOperand HeapOperand(const Register& base, Offset offset) {
190 return HeapOperand(base, offset.SizeValue());
191}
192
193MemOperand HeapOperandFrom(Location location, Offset offset) {
194 return HeapOperand(RegisterFrom(location, Primitive::kPrimNot), offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100195}
196
197Location LocationFrom(const Register& reg) {
198 return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.code()));
199}
200
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000201Location LocationFrom(const FPRegister& fpreg) {
202 return Location::FpuRegisterLocation(fpreg.code());
203}
204
Alexandre Rames5319def2014-10-23 10:03:10 +0100205} // namespace
206
207inline Condition ARM64Condition(IfCondition cond) {
208 switch (cond) {
209 case kCondEQ: return eq;
210 case kCondNE: return ne;
211 case kCondLT: return lt;
212 case kCondLE: return le;
213 case kCondGT: return gt;
214 case kCondGE: return ge;
215 default:
216 LOG(FATAL) << "Unknown if condition";
217 }
218 return nv; // Unreachable.
219}
220
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000221Location ARM64ReturnLocation(Primitive::Type return_type) {
222 DCHECK_NE(return_type, Primitive::kPrimVoid);
223 // Note that in practice, `LocationFrom(x0)` and `LocationFrom(w0)` create the
224 // same Location object, and so do `LocationFrom(d0)` and `LocationFrom(s0)`,
225 // but we use the exact registers for clarity.
226 if (return_type == Primitive::kPrimFloat) {
227 return LocationFrom(s0);
228 } else if (return_type == Primitive::kPrimDouble) {
229 return LocationFrom(d0);
230 } else if (return_type == Primitive::kPrimLong) {
231 return LocationFrom(x0);
232 } else {
233 return LocationFrom(w0);
234 }
235}
236
Alexandre Rames5319def2014-10-23 10:03:10 +0100237static const Register kRuntimeParameterCoreRegisters[] = { x0, x1, x2, x3, x4, x5, x6, x7 };
238static constexpr size_t kRuntimeParameterCoreRegistersLength =
239 arraysize(kRuntimeParameterCoreRegisters);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000240static const FPRegister kRuntimeParameterFpuRegisters[] = { d0, d1, d2, d3, d4, d5, d6, d7 };
241static constexpr size_t kRuntimeParameterFpuRegistersLength =
242 arraysize(kRuntimeParameterCoreRegisters);
Alexandre Rames5319def2014-10-23 10:03:10 +0100243
244class InvokeRuntimeCallingConvention : public CallingConvention<Register, FPRegister> {
245 public:
246 static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
247
248 InvokeRuntimeCallingConvention()
249 : CallingConvention(kRuntimeParameterCoreRegisters,
250 kRuntimeParameterCoreRegistersLength,
251 kRuntimeParameterFpuRegisters,
252 kRuntimeParameterFpuRegistersLength) {}
253
254 Location GetReturnLocation(Primitive::Type return_type);
255
256 private:
257 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
258};
259
260Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type return_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000261 return ARM64ReturnLocation(return_type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100262}
263
Alexandre Rames67555f72014-11-18 10:55:16 +0000264#define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()->
265#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, x).Int32Value()
Alexandre Rames5319def2014-10-23 10:03:10 +0100266
267class SlowPathCodeARM64 : public SlowPathCode {
268 public:
269 SlowPathCodeARM64() : entry_label_(), exit_label_() {}
270
271 vixl::Label* GetEntryLabel() { return &entry_label_; }
272 vixl::Label* GetExitLabel() { return &exit_label_; }
273
274 private:
275 vixl::Label entry_label_;
276 vixl::Label exit_label_;
277
278 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM64);
279};
280
281class BoundsCheckSlowPathARM64 : public SlowPathCodeARM64 {
282 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000283 BoundsCheckSlowPathARM64(HBoundsCheck* instruction,
284 Location index_location,
285 Location length_location)
286 : instruction_(instruction),
287 index_location_(index_location),
288 length_location_(length_location) {}
289
Alexandre Rames5319def2014-10-23 10:03:10 +0100290
Alexandre Rames67555f72014-11-18 10:55:16 +0000291 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000292 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100293 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000294 // We're moving two locations to locations that could overlap, so we need a parallel
295 // move resolver.
296 InvokeRuntimeCallingConvention calling_convention;
297 codegen->EmitParallelMoves(
298 index_location_, LocationFrom(calling_convention.GetRegisterAt(0)),
299 length_location_, LocationFrom(calling_convention.GetRegisterAt(1)));
300 arm64_codegen->InvokeRuntime(
301 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800302 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100303 }
304
305 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000306 HBoundsCheck* const instruction_;
307 const Location index_location_;
308 const Location length_location_;
309
Alexandre Rames5319def2014-10-23 10:03:10 +0100310 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM64);
311};
312
Alexandre Rames67555f72014-11-18 10:55:16 +0000313class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 {
314 public:
315 explicit DivZeroCheckSlowPathARM64(HDivZeroCheck* instruction) : instruction_(instruction) {}
316
317 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
318 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
319 __ Bind(GetEntryLabel());
320 arm64_codegen->InvokeRuntime(
321 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800322 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000323 }
324
325 private:
326 HDivZeroCheck* const instruction_;
327 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64);
328};
329
330class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
331 public:
332 LoadClassSlowPathARM64(HLoadClass* cls,
333 HInstruction* at,
334 uint32_t dex_pc,
335 bool do_clinit)
336 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
337 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
338 }
339
340 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
341 LocationSummary* locations = at_->GetLocations();
342 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
343
344 __ Bind(GetEntryLabel());
345 codegen->SaveLiveRegisters(locations);
346
347 InvokeRuntimeCallingConvention calling_convention;
348 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex());
349 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
350 int32_t entry_point_offset = do_clinit_ ? QUICK_ENTRY_POINT(pInitializeStaticStorage)
351 : QUICK_ENTRY_POINT(pInitializeType);
352 arm64_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800353 if (do_clinit_) {
354 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t, mirror::ArtMethod*>();
355 } else {
356 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t, mirror::ArtMethod*>();
357 }
Alexandre Rames67555f72014-11-18 10:55:16 +0000358
359 // Move the class to the desired location.
360 Location out = locations->Out();
361 if (out.IsValid()) {
362 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
363 Primitive::Type type = at_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000364 arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000365 }
366
367 codegen->RestoreLiveRegisters(locations);
368 __ B(GetExitLabel());
369 }
370
371 private:
372 // The class this slow path will load.
373 HLoadClass* const cls_;
374
375 // The instruction where this slow path is happening.
376 // (Might be the load class or an initialization check).
377 HInstruction* const at_;
378
379 // The dex PC of `at_`.
380 const uint32_t dex_pc_;
381
382 // Whether to initialize the class.
383 const bool do_clinit_;
384
385 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
386};
387
388class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
389 public:
390 explicit LoadStringSlowPathARM64(HLoadString* instruction) : instruction_(instruction) {}
391
392 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
393 LocationSummary* locations = instruction_->GetLocations();
394 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
395 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
396
397 __ Bind(GetEntryLabel());
398 codegen->SaveLiveRegisters(locations);
399
400 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800401 arm64_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1).W());
402 __ Mov(calling_convention.GetRegisterAt(0).W(), instruction_->GetStringIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +0000403 arm64_codegen->InvokeRuntime(
404 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800405 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000406 Primitive::Type type = instruction_->GetType();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000407 arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type);
Alexandre Rames67555f72014-11-18 10:55:16 +0000408
409 codegen->RestoreLiveRegisters(locations);
410 __ B(GetExitLabel());
411 }
412
413 private:
414 HLoadString* const instruction_;
415
416 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64);
417};
418
Alexandre Rames5319def2014-10-23 10:03:10 +0100419class NullCheckSlowPathARM64 : public SlowPathCodeARM64 {
420 public:
421 explicit NullCheckSlowPathARM64(HNullCheck* instr) : instruction_(instr) {}
422
Alexandre Rames67555f72014-11-18 10:55:16 +0000423 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
424 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100425 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000426 arm64_codegen->InvokeRuntime(
427 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800428 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Alexandre Rames5319def2014-10-23 10:03:10 +0100429 }
430
431 private:
432 HNullCheck* const instruction_;
433
434 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM64);
435};
436
Serban Constantinescu02164b32014-11-13 14:05:07 +0000437class StackOverflowCheckSlowPathARM64 : public SlowPathCodeARM64 {
438 public:
439 StackOverflowCheckSlowPathARM64() {}
440
441 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
442 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
443 __ Bind(GetEntryLabel());
444 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pThrowStackOverflow), nullptr, 0);
Calin Juravle93edf732015-01-20 20:14:07 +0000445 CheckEntrypointTypes<kQuickThrowStackOverflow, void, void*>();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000446 }
447
448 private:
449 DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM64);
450};
451
Alexandre Rames5319def2014-10-23 10:03:10 +0100452class SuspendCheckSlowPathARM64 : public SlowPathCodeARM64 {
453 public:
454 explicit SuspendCheckSlowPathARM64(HSuspendCheck* instruction,
455 HBasicBlock* successor)
456 : instruction_(instruction), successor_(successor) {}
457
Alexandre Rames67555f72014-11-18 10:55:16 +0000458 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
459 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
Alexandre Rames5319def2014-10-23 10:03:10 +0100460 __ Bind(GetEntryLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000461 codegen->SaveLiveRegisters(instruction_->GetLocations());
462 arm64_codegen->InvokeRuntime(
463 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800464 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexandre Rames67555f72014-11-18 10:55:16 +0000465 codegen->RestoreLiveRegisters(instruction_->GetLocations());
466 if (successor_ == nullptr) {
467 __ B(GetReturnLabel());
468 } else {
469 __ B(arm64_codegen->GetLabelOf(successor_));
470 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100471 }
472
473 vixl::Label* GetReturnLabel() {
474 DCHECK(successor_ == nullptr);
475 return &return_label_;
476 }
477
Alexandre Rames5319def2014-10-23 10:03:10 +0100478 private:
479 HSuspendCheck* const instruction_;
480 // If not null, the block to branch to after the suspend check.
481 HBasicBlock* const successor_;
482
483 // If `successor_` is null, the label to branch to after the suspend check.
484 vixl::Label return_label_;
485
486 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM64);
487};
488
Alexandre Rames67555f72014-11-18 10:55:16 +0000489class TypeCheckSlowPathARM64 : public SlowPathCodeARM64 {
490 public:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000491 TypeCheckSlowPathARM64(HInstruction* instruction,
492 Location class_to_check,
493 Location object_class,
494 uint32_t dex_pc)
495 : instruction_(instruction),
496 class_to_check_(class_to_check),
497 object_class_(object_class),
498 dex_pc_(dex_pc) {}
Alexandre Rames67555f72014-11-18 10:55:16 +0000499
500 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000501 LocationSummary* locations = instruction_->GetLocations();
502 DCHECK(instruction_->IsCheckCast()
503 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
504 CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
505
Alexandre Rames67555f72014-11-18 10:55:16 +0000506 __ Bind(GetEntryLabel());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000507 codegen->SaveLiveRegisters(locations);
508
509 // We're moving two locations to locations that could overlap, so we need a parallel
510 // move resolver.
511 InvokeRuntimeCallingConvention calling_convention;
512 codegen->EmitParallelMoves(
513 class_to_check_, LocationFrom(calling_convention.GetRegisterAt(0)),
514 object_class_, LocationFrom(calling_convention.GetRegisterAt(1)));
515
516 if (instruction_->IsInstanceOf()) {
517 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_);
518 Primitive::Type ret_type = instruction_->GetType();
519 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
520 arm64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800521 CheckEntrypointTypes<kQuickInstanceofNonTrivial, uint32_t,
522 const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000523 } else {
524 DCHECK(instruction_->IsCheckCast());
525 arm64_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800526 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Alexandre Rames3e69f162014-12-10 10:36:50 +0000527 }
528
529 codegen->RestoreLiveRegisters(locations);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000530 __ B(GetExitLabel());
Alexandre Rames67555f72014-11-18 10:55:16 +0000531 }
532
533 private:
Alexandre Rames3e69f162014-12-10 10:36:50 +0000534 HInstruction* const instruction_;
535 const Location class_to_check_;
536 const Location object_class_;
537 uint32_t dex_pc_;
538
Alexandre Rames67555f72014-11-18 10:55:16 +0000539 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM64);
540};
541
Alexandre Rames5319def2014-10-23 10:03:10 +0100542#undef __
543
544Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
545 Location next_location;
546 if (type == Primitive::kPrimVoid) {
547 LOG(FATAL) << "Unreachable type " << type;
548 }
549
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000550 if (IsFPType(type) && (fp_index_ < calling_convention.GetNumberOfFpuRegisters())) {
551 next_location = LocationFrom(calling_convention.GetFpuRegisterAt(fp_index_++));
552 } else if (!IsFPType(type) && (gp_index_ < calling_convention.GetNumberOfRegisters())) {
553 next_location = LocationFrom(calling_convention.GetRegisterAt(gp_index_++));
554 } else {
555 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
556 next_location = Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
557 : Location::StackSlot(stack_offset);
Alexandre Rames5319def2014-10-23 10:03:10 +0100558 }
559
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000560 // Space on the stack is reserved for all arguments.
561 stack_index_ += Is64BitType(type) ? 2 : 1;
Alexandre Rames5319def2014-10-23 10:03:10 +0100562 return next_location;
563}
564
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000565CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph, const CompilerOptions& compiler_options)
Alexandre Rames5319def2014-10-23 10:03:10 +0100566 : CodeGenerator(graph,
567 kNumberOfAllocatableRegisters,
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000568 kNumberOfAllocatableFPRegisters,
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000569 kNumberOfAllocatableRegisterPairs,
570 compiler_options),
Alexandre Rames5319def2014-10-23 10:03:10 +0100571 block_labels_(nullptr),
572 location_builder_(graph, this),
Alexandre Rames3e69f162014-12-10 10:36:50 +0000573 instruction_visitor_(graph, this),
574 move_resolver_(graph->GetArena(), this) {}
Alexandre Rames5319def2014-10-23 10:03:10 +0100575
Alexandre Rames67555f72014-11-18 10:55:16 +0000576#undef __
577#define __ GetVIXLAssembler()->
Alexandre Rames5319def2014-10-23 10:03:10 +0100578
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000579void CodeGeneratorARM64::Finalize(CodeAllocator* allocator) {
580 // Ensure we emit the literal pool.
581 __ FinalizeCode();
582 CodeGenerator::Finalize(allocator);
583}
584
Alexandre Rames3e69f162014-12-10 10:36:50 +0000585void ParallelMoveResolverARM64::EmitMove(size_t index) {
586 MoveOperands* move = moves_.Get(index);
587 codegen_->MoveLocation(move->GetDestination(), move->GetSource());
588}
589
590void ParallelMoveResolverARM64::EmitSwap(size_t index) {
591 MoveOperands* move = moves_.Get(index);
592 codegen_->SwapLocations(move->GetDestination(), move->GetSource());
593}
594
595void ParallelMoveResolverARM64::RestoreScratch(int reg) {
596 __ Pop(Register(VIXLRegCodeFromART(reg), kXRegSize));
597}
598
599void ParallelMoveResolverARM64::SpillScratch(int reg) {
600 __ Push(Register(VIXLRegCodeFromART(reg), kXRegSize));
601}
602
Alexandre Rames5319def2014-10-23 10:03:10 +0100603void CodeGeneratorARM64::GenerateFrameEntry() {
Serban Constantinescu02164b32014-11-13 14:05:07 +0000604 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kArm64) || !IsLeafMethod();
605 if (do_overflow_check) {
606 UseScratchRegisterScope temps(GetVIXLAssembler());
607 Register temp = temps.AcquireX();
Calin Juravle93edf732015-01-20 20:14:07 +0000608 if (GetCompilerOptions().GetImplicitStackOverflowChecks()) {
609 __ Add(temp, sp, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm64)));
610 __ Ldr(wzr, MemOperand(temp, 0));
611 RecordPcInfo(nullptr, 0);
612 } else {
Serban Constantinescu02164b32014-11-13 14:05:07 +0000613 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM64();
614 AddSlowPath(slow_path);
615
616 __ Ldr(temp, MemOperand(tr, Thread::StackEndOffset<kArm64WordSize>().Int32Value()));
617 __ Cmp(sp, temp);
618 __ B(lo, slow_path->GetEntryLabel());
Serban Constantinescu02164b32014-11-13 14:05:07 +0000619 }
620 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100621
622 CPURegList preserved_regs = GetFramePreservedRegisters();
623 int frame_size = GetFrameSize();
624 core_spill_mask_ |= preserved_regs.list();
625
626 __ Str(w0, MemOperand(sp, -frame_size, PreIndex));
627 __ PokeCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
628
629 // Stack layout:
630 // sp[frame_size - 8] : lr.
631 // ... : other preserved registers.
632 // sp[frame_size - regs_size]: first preserved register.
633 // ... : reserved frame space.
Alexandre Rames67555f72014-11-18 10:55:16 +0000634 // sp[0] : current method.
Alexandre Rames5319def2014-10-23 10:03:10 +0100635}
636
637void CodeGeneratorARM64::GenerateFrameExit() {
638 int frame_size = GetFrameSize();
639 CPURegList preserved_regs = GetFramePreservedRegisters();
640 __ PeekCPURegList(preserved_regs, frame_size - preserved_regs.TotalSizeInBytes());
641 __ Drop(frame_size);
642}
643
644void CodeGeneratorARM64::Bind(HBasicBlock* block) {
645 __ Bind(GetLabelOf(block));
646}
647
Alexandre Rames5319def2014-10-23 10:03:10 +0100648void CodeGeneratorARM64::Move(HInstruction* instruction,
649 Location location,
650 HInstruction* move_for) {
651 LocationSummary* locations = instruction->GetLocations();
652 if (locations != nullptr && locations->Out().Equals(location)) {
653 return;
654 }
655
656 Primitive::Type type = instruction->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000657 DCHECK_NE(type, Primitive::kPrimVoid);
Alexandre Rames5319def2014-10-23 10:03:10 +0100658
659 if (instruction->IsIntConstant() || instruction->IsLongConstant()) {
660 int64_t value = instruction->IsIntConstant() ? instruction->AsIntConstant()->GetValue()
661 : instruction->AsLongConstant()->GetValue();
662 if (location.IsRegister()) {
663 Register dst = RegisterFrom(location, type);
664 DCHECK((instruction->IsIntConstant() && dst.Is32Bits()) ||
665 (instruction->IsLongConstant() && dst.Is64Bits()));
666 __ Mov(dst, value);
667 } else {
668 DCHECK(location.IsStackSlot() || location.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000669 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100670 Register temp = instruction->IsIntConstant() ? temps.AcquireW() : temps.AcquireX();
671 __ Mov(temp, value);
672 __ Str(temp, StackOperandFrom(location));
673 }
Nicolas Geoffrayf43083d2014-11-07 10:48:10 +0000674 } else if (instruction->IsTemporary()) {
675 Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000676 MoveLocation(location, temp_location, type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100677 } else if (instruction->IsLoadLocal()) {
678 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000679 if (Is64BitType(type)) {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000680 MoveLocation(location, Location::DoubleStackSlot(stack_slot), type);
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000681 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000682 MoveLocation(location, Location::StackSlot(stack_slot), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100683 }
684
685 } else {
686 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000687 MoveLocation(location, locations->Out(), type);
Alexandre Rames5319def2014-10-23 10:03:10 +0100688 }
689}
690
691size_t CodeGeneratorARM64::FrameEntrySpillSize() const {
692 return GetFramePreservedRegistersSize();
693}
694
695Location CodeGeneratorARM64::GetStackLocation(HLoadLocal* load) const {
696 Primitive::Type type = load->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000697
Alexandre Rames5319def2014-10-23 10:03:10 +0100698 switch (type) {
699 case Primitive::kPrimNot:
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000700 case Primitive::kPrimInt:
701 case Primitive::kPrimFloat:
702 return Location::StackSlot(GetStackSlot(load->GetLocal()));
703
704 case Primitive::kPrimLong:
705 case Primitive::kPrimDouble:
706 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
707
Alexandre Rames5319def2014-10-23 10:03:10 +0100708 case Primitive::kPrimBoolean:
709 case Primitive::kPrimByte:
710 case Primitive::kPrimChar:
711 case Primitive::kPrimShort:
Alexandre Rames5319def2014-10-23 10:03:10 +0100712 case Primitive::kPrimVoid:
Alexandre Rames5319def2014-10-23 10:03:10 +0100713 LOG(FATAL) << "Unexpected type " << type;
714 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000715
Alexandre Rames5319def2014-10-23 10:03:10 +0100716 LOG(FATAL) << "Unreachable";
717 return Location::NoLocation();
718}
719
720void CodeGeneratorARM64::MarkGCCard(Register object, Register value) {
Alexandre Rames67555f72014-11-18 10:55:16 +0000721 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames5319def2014-10-23 10:03:10 +0100722 Register card = temps.AcquireX();
Serban Constantinescu02164b32014-11-13 14:05:07 +0000723 Register temp = temps.AcquireW(); // Index within the CardTable - 32bit.
Alexandre Rames5319def2014-10-23 10:03:10 +0100724 vixl::Label done;
725 __ Cbz(value, &done);
726 __ Ldr(card, MemOperand(tr, Thread::CardTableOffset<kArm64WordSize>().Int32Value()));
727 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
Serban Constantinescu02164b32014-11-13 14:05:07 +0000728 __ Strb(card, MemOperand(card, temp.X()));
Alexandre Rames5319def2014-10-23 10:03:10 +0100729 __ Bind(&done);
730}
731
732void CodeGeneratorARM64::SetupBlockedRegisters() const {
733 // Block reserved registers:
734 // ip0 (VIXL temporary)
735 // ip1 (VIXL temporary)
Serban Constantinescu02164b32014-11-13 14:05:07 +0000736 // tr
Alexandre Rames5319def2014-10-23 10:03:10 +0100737 // lr
738 // sp is not part of the allocatable registers, so we don't need to block it.
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +0000739 // TODO: Avoid blocking callee-saved registers, and instead preserve them
740 // where necessary.
Alexandre Rames5319def2014-10-23 10:03:10 +0100741 CPURegList reserved_core_registers = vixl_reserved_core_registers;
742 reserved_core_registers.Combine(runtime_reserved_core_registers);
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +0000743 reserved_core_registers.Combine(quick_callee_saved_registers);
Alexandre Rames5319def2014-10-23 10:03:10 +0100744 while (!reserved_core_registers.IsEmpty()) {
745 blocked_core_registers_[reserved_core_registers.PopLowestIndex().code()] = true;
746 }
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000747 CPURegList reserved_fp_registers = vixl_reserved_fp_registers;
748 reserved_fp_registers.Combine(CPURegList::GetCalleeSavedFP());
749 while (!reserved_core_registers.IsEmpty()) {
750 blocked_fpu_registers_[reserved_fp_registers.PopLowestIndex().code()] = true;
751 }
Alexandre Rames5319def2014-10-23 10:03:10 +0100752}
753
754Location CodeGeneratorARM64::AllocateFreeRegister(Primitive::Type type) const {
755 if (type == Primitive::kPrimVoid) {
756 LOG(FATAL) << "Unreachable type " << type;
757 }
758
Alexandre Rames5319def2014-10-23 10:03:10 +0100759 if (IsFPType(type)) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000760 ssize_t reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfAllocatableFPRegisters);
761 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100762 return Location::FpuRegisterLocation(reg);
763 } else {
Alexandre Ramesa89086e2014-11-07 17:13:25 +0000764 ssize_t reg = FindFreeEntry(blocked_core_registers_, kNumberOfAllocatableRegisters);
765 DCHECK_NE(reg, -1);
Alexandre Rames5319def2014-10-23 10:03:10 +0100766 return Location::RegisterLocation(reg);
767 }
768}
769
Alexandre Rames3e69f162014-12-10 10:36:50 +0000770size_t CodeGeneratorARM64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
771 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
772 __ Str(reg, MemOperand(sp, stack_index));
773 return kArm64WordSize;
774}
775
776size_t CodeGeneratorARM64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
777 Register reg = Register(VIXLRegCodeFromART(reg_id), kXRegSize);
778 __ Ldr(reg, MemOperand(sp, stack_index));
779 return kArm64WordSize;
780}
781
782size_t CodeGeneratorARM64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
783 FPRegister reg = FPRegister(reg_id, kDRegSize);
784 __ Str(reg, MemOperand(sp, stack_index));
785 return kArm64WordSize;
786}
787
788size_t CodeGeneratorARM64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
789 FPRegister reg = FPRegister(reg_id, kDRegSize);
790 __ Ldr(reg, MemOperand(sp, stack_index));
791 return kArm64WordSize;
792}
793
Alexandre Rames5319def2014-10-23 10:03:10 +0100794void CodeGeneratorARM64::DumpCoreRegister(std::ostream& stream, int reg) const {
795 stream << Arm64ManagedRegister::FromXRegister(XRegister(reg));
796}
797
798void CodeGeneratorARM64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
799 stream << Arm64ManagedRegister::FromDRegister(DRegister(reg));
800}
801
Alexandre Rames67555f72014-11-18 10:55:16 +0000802void CodeGeneratorARM64::MoveConstant(CPURegister destination, HConstant* constant) {
803 if (constant->IsIntConstant() || constant->IsLongConstant()) {
804 __ Mov(Register(destination),
805 constant->IsIntConstant() ? constant->AsIntConstant()->GetValue()
806 : constant->AsLongConstant()->GetValue());
807 } else if (constant->IsFloatConstant()) {
808 __ Fmov(FPRegister(destination), constant->AsFloatConstant()->GetValue());
809 } else {
810 DCHECK(constant->IsDoubleConstant());
811 __ Fmov(FPRegister(destination), constant->AsDoubleConstant()->GetValue());
812 }
813}
814
Alexandre Rames3e69f162014-12-10 10:36:50 +0000815
816static bool CoherentConstantAndType(Location constant, Primitive::Type type) {
817 DCHECK(constant.IsConstant());
818 HConstant* cst = constant.GetConstant();
819 return (cst->IsIntConstant() && type == Primitive::kPrimInt) ||
820 (cst->IsLongConstant() && type == Primitive::kPrimLong) ||
821 (cst->IsFloatConstant() && type == Primitive::kPrimFloat) ||
822 (cst->IsDoubleConstant() && type == Primitive::kPrimDouble);
823}
824
825void CodeGeneratorARM64::MoveLocation(Location destination, Location source, Primitive::Type type) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000826 if (source.Equals(destination)) {
827 return;
828 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000829
830 // A valid move can always be inferred from the destination and source
831 // locations. When moving from and to a register, the argument type can be
832 // used to generate 32bit instead of 64bit moves. In debug mode we also
833 // checks the coherency of the locations and the type.
834 bool unspecified_type = (type == Primitive::kPrimVoid);
835
836 if (destination.IsRegister() || destination.IsFpuRegister()) {
837 if (unspecified_type) {
838 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
839 if (source.IsStackSlot() ||
840 (src_cst != nullptr && (src_cst->IsIntConstant() || src_cst->IsFloatConstant()))) {
841 // For stack slots and 32bit constants, a 64bit type is appropriate.
842 type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexandre Rames67555f72014-11-18 10:55:16 +0000843 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +0000844 // If the source is a double stack slot or a 64bit constant, a 64bit
845 // type is appropriate. Else the source is a register, and since the
846 // type has not been specified, we chose a 64bit type to force a 64bit
847 // move.
848 type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexandre Rames67555f72014-11-18 10:55:16 +0000849 }
Alexandre Rames3e69f162014-12-10 10:36:50 +0000850 }
851 DCHECK((destination.IsFpuRegister() && IsFPType(type)) ||
852 (destination.IsRegister() && !IsFPType(type)));
853 CPURegister dst = CPURegisterFrom(destination, type);
854 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
855 DCHECK(dst.Is64Bits() == source.IsDoubleStackSlot());
856 __ Ldr(dst, StackOperandFrom(source));
857 } else if (source.IsConstant()) {
858 DCHECK(CoherentConstantAndType(source, type));
859 MoveConstant(dst, source.GetConstant());
860 } else {
861 if (destination.IsRegister()) {
862 __ Mov(Register(dst), RegisterFrom(source, type));
863 } else {
864 __ Fmov(FPRegister(dst), FPRegisterFrom(source, type));
865 }
866 }
867
868 } else { // The destination is not a register. It must be a stack slot.
869 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
870 if (source.IsRegister() || source.IsFpuRegister()) {
871 if (unspecified_type) {
872 if (source.IsRegister()) {
873 type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
874 } else {
875 type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
876 }
877 }
878 DCHECK((destination.IsDoubleStackSlot() == Is64BitType(type)) &&
879 (source.IsFpuRegister() == IsFPType(type)));
880 __ Str(CPURegisterFrom(source, type), StackOperandFrom(destination));
881 } else if (source.IsConstant()) {
882 DCHECK(unspecified_type || CoherentConstantAndType(source, type));
883 UseScratchRegisterScope temps(GetVIXLAssembler());
884 HConstant* src_cst = source.GetConstant();
885 CPURegister temp;
886 if (src_cst->IsIntConstant()) {
887 temp = temps.AcquireW();
888 } else if (src_cst->IsLongConstant()) {
889 temp = temps.AcquireX();
890 } else if (src_cst->IsFloatConstant()) {
891 temp = temps.AcquireS();
892 } else {
893 DCHECK(src_cst->IsDoubleConstant());
894 temp = temps.AcquireD();
895 }
896 MoveConstant(temp, src_cst);
Alexandre Rames67555f72014-11-18 10:55:16 +0000897 __ Str(temp, StackOperandFrom(destination));
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000898 } else {
Alexandre Rames67555f72014-11-18 10:55:16 +0000899 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000900 DCHECK(source.IsDoubleStackSlot() == destination.IsDoubleStackSlot());
Alexandre Rames67555f72014-11-18 10:55:16 +0000901 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Rames3e69f162014-12-10 10:36:50 +0000902 // There is generally less pressure on FP registers.
903 FPRegister temp = destination.IsDoubleStackSlot() ? temps.AcquireD() : temps.AcquireS();
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000904 __ Ldr(temp, StackOperandFrom(source));
905 __ Str(temp, StackOperandFrom(destination));
906 }
907 }
908}
909
Alexandre Rames3e69f162014-12-10 10:36:50 +0000910void CodeGeneratorARM64::SwapLocations(Location loc1, Location loc2) {
911 DCHECK(!loc1.IsConstant());
912 DCHECK(!loc2.IsConstant());
913
914 if (loc1.Equals(loc2)) {
915 return;
916 }
917
918 UseScratchRegisterScope temps(GetAssembler()->vixl_masm_);
919
920 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
921 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
922 bool is_fp_reg1 = loc1.IsFpuRegister();
923 bool is_fp_reg2 = loc2.IsFpuRegister();
924
925 if (loc2.IsRegister() && loc1.IsRegister()) {
926 Register r1 = XRegisterFrom(loc1);
927 Register r2 = XRegisterFrom(loc2);
928 Register tmp = temps.AcquireSameSizeAs(r1);
929 __ Mov(tmp, r2);
930 __ Mov(r2, r1);
931 __ Mov(r1, tmp);
932 } else if (is_fp_reg2 && is_fp_reg1) {
933 FPRegister r1 = DRegisterFrom(loc1);
934 FPRegister r2 = DRegisterFrom(loc2);
935 FPRegister tmp = temps.AcquireSameSizeAs(r1);
936 __ Fmov(tmp, r2);
937 __ Fmov(r2, r1);
938 __ Fmov(r1, tmp);
939 } else if (is_slot1 != is_slot2) {
940 MemOperand mem = StackOperandFrom(is_slot1 ? loc1 : loc2);
941 Location reg_loc = is_slot1 ? loc2 : loc1;
942 CPURegister reg, tmp;
943 if (reg_loc.IsFpuRegister()) {
944 reg = DRegisterFrom(reg_loc);
945 tmp = temps.AcquireD();
946 } else {
947 reg = XRegisterFrom(reg_loc);
948 tmp = temps.AcquireX();
949 }
950 __ Ldr(tmp, mem);
951 __ Str(reg, mem);
952 if (reg_loc.IsFpuRegister()) {
953 __ Fmov(FPRegister(reg), FPRegister(tmp));
954 } else {
955 __ Mov(Register(reg), Register(tmp));
956 }
957 } else if (is_slot1 && is_slot2) {
958 MemOperand mem1 = StackOperandFrom(loc1);
959 MemOperand mem2 = StackOperandFrom(loc2);
960 Register tmp1 = loc1.IsStackSlot() ? temps.AcquireW() : temps.AcquireX();
961 Register tmp2 = temps.AcquireSameSizeAs(tmp1);
962 __ Ldr(tmp1, mem1);
963 __ Ldr(tmp2, mem2);
964 __ Str(tmp1, mem2);
965 __ Str(tmp2, mem1);
966 } else {
967 LOG(FATAL) << "Unimplemented";
968 }
969}
970
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000971void CodeGeneratorARM64::Load(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000972 CPURegister dst,
973 const MemOperand& src) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000974 switch (type) {
975 case Primitive::kPrimBoolean:
Alexandre Rames67555f72014-11-18 10:55:16 +0000976 __ Ldrb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000977 break;
978 case Primitive::kPrimByte:
Alexandre Rames67555f72014-11-18 10:55:16 +0000979 __ Ldrsb(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000980 break;
981 case Primitive::kPrimShort:
Alexandre Rames67555f72014-11-18 10:55:16 +0000982 __ Ldrsh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000983 break;
984 case Primitive::kPrimChar:
Alexandre Rames67555f72014-11-18 10:55:16 +0000985 __ Ldrh(Register(dst), src);
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000986 break;
987 case Primitive::kPrimInt:
988 case Primitive::kPrimNot:
989 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000990 case Primitive::kPrimFloat:
991 case Primitive::kPrimDouble:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +0000992 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
Alexandre Rames67555f72014-11-18 10:55:16 +0000993 __ Ldr(dst, src);
994 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +0000995 case Primitive::kPrimVoid:
996 LOG(FATAL) << "Unreachable type " << type;
997 }
998}
999
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001000void CodeGeneratorARM64::LoadAcquire(Primitive::Type type,
1001 CPURegister dst,
1002 const MemOperand& src) {
1003 UseScratchRegisterScope temps(GetVIXLAssembler());
1004 Register temp_base = temps.AcquireX();
1005
1006 DCHECK(!src.IsRegisterOffset());
1007 DCHECK(!src.IsPreIndex());
1008 DCHECK(!src.IsPostIndex());
1009
1010 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1011 __ Add(temp_base, src.base(), src.offset());
1012 MemOperand base = MemOperand(temp_base);
1013 switch (type) {
1014 case Primitive::kPrimBoolean:
1015 __ Ldarb(Register(dst), base);
1016 break;
1017 case Primitive::kPrimByte:
1018 __ Ldarb(Register(dst), base);
1019 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1020 break;
1021 case Primitive::kPrimChar:
1022 __ Ldarh(Register(dst), base);
1023 break;
1024 case Primitive::kPrimShort:
1025 __ Ldarh(Register(dst), base);
1026 __ Sbfx(Register(dst), Register(dst), 0, Primitive::ComponentSize(type) * kBitsPerByte);
1027 break;
1028 case Primitive::kPrimInt:
1029 case Primitive::kPrimNot:
1030 case Primitive::kPrimLong:
1031 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
1032 __ Ldar(Register(dst), base);
1033 break;
1034 case Primitive::kPrimFloat:
1035 case Primitive::kPrimDouble: {
1036 DCHECK(dst.IsFPRegister());
1037 DCHECK_EQ(dst.Is64Bits(), Is64BitType(type));
1038
1039 Register temp = dst.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1040 __ Ldar(temp, base);
1041 __ Fmov(FPRegister(dst), temp);
1042 break;
1043 }
1044 case Primitive::kPrimVoid:
1045 LOG(FATAL) << "Unreachable type " << type;
1046 }
1047}
1048
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001049void CodeGeneratorARM64::Store(Primitive::Type type,
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001050 CPURegister src,
1051 const MemOperand& dst) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001052 switch (type) {
1053 case Primitive::kPrimBoolean:
1054 case Primitive::kPrimByte:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001055 __ Strb(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001056 break;
1057 case Primitive::kPrimChar:
1058 case Primitive::kPrimShort:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001059 __ Strh(Register(src), dst);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001060 break;
1061 case Primitive::kPrimInt:
1062 case Primitive::kPrimNot:
1063 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001064 case Primitive::kPrimFloat:
1065 case Primitive::kPrimDouble:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001066 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
1067 __ Str(src, dst);
Alexandre Rames67555f72014-11-18 10:55:16 +00001068 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001069 case Primitive::kPrimVoid:
1070 LOG(FATAL) << "Unreachable type " << type;
1071 }
1072}
1073
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001074void CodeGeneratorARM64::StoreRelease(Primitive::Type type,
1075 CPURegister src,
1076 const MemOperand& dst) {
1077 UseScratchRegisterScope temps(GetVIXLAssembler());
1078 Register temp_base = temps.AcquireX();
1079
1080 DCHECK(!dst.IsRegisterOffset());
1081 DCHECK(!dst.IsPreIndex());
1082 DCHECK(!dst.IsPostIndex());
1083
1084 // TODO(vixl): Let the MacroAssembler handle this.
1085 __ Add(temp_base, dst.base(), dst.offset());
1086 MemOperand base = MemOperand(temp_base);
1087 switch (type) {
1088 case Primitive::kPrimBoolean:
1089 case Primitive::kPrimByte:
1090 __ Stlrb(Register(src), base);
1091 break;
1092 case Primitive::kPrimChar:
1093 case Primitive::kPrimShort:
1094 __ Stlrh(Register(src), base);
1095 break;
1096 case Primitive::kPrimInt:
1097 case Primitive::kPrimNot:
1098 case Primitive::kPrimLong:
1099 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
1100 __ Stlr(Register(src), base);
1101 break;
1102 case Primitive::kPrimFloat:
1103 case Primitive::kPrimDouble: {
1104 DCHECK(src.IsFPRegister());
1105 DCHECK_EQ(src.Is64Bits(), Is64BitType(type));
1106
1107 Register temp = src.Is64Bits() ? temps.AcquireX() : temps.AcquireW();
1108 __ Fmov(temp, FPRegister(src));
1109 __ Stlr(temp, base);
1110 break;
1111 }
1112 case Primitive::kPrimVoid:
1113 LOG(FATAL) << "Unreachable type " << type;
1114 }
1115}
1116
Alexandre Rames67555f72014-11-18 10:55:16 +00001117void CodeGeneratorARM64::LoadCurrentMethod(vixl::Register current_method) {
1118 DCHECK(current_method.IsW());
1119 __ Ldr(current_method, MemOperand(sp, kCurrentMethodStackOffset));
1120}
1121
1122void CodeGeneratorARM64::InvokeRuntime(int32_t entry_point_offset,
1123 HInstruction* instruction,
1124 uint32_t dex_pc) {
1125 __ Ldr(lr, MemOperand(tr, entry_point_offset));
1126 __ Blr(lr);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001127 if (instruction != nullptr) {
1128 RecordPcInfo(instruction, dex_pc);
1129 DCHECK(instruction->IsSuspendCheck()
1130 || instruction->IsBoundsCheck()
1131 || instruction->IsNullCheck()
1132 || instruction->IsDivZeroCheck()
1133 || !IsLeafMethod());
1134 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001135}
1136
1137void InstructionCodeGeneratorARM64::GenerateClassInitializationCheck(SlowPathCodeARM64* slow_path,
1138 vixl::Register class_reg) {
1139 UseScratchRegisterScope temps(GetVIXLAssembler());
1140 Register temp = temps.AcquireW();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001141 size_t status_offset = mirror::Class::StatusOffset().SizeValue();
1142
Serban Constantinescu02164b32014-11-13 14:05:07 +00001143 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001144 if (kUseAcquireRelease) {
1145 // TODO(vixl): Let the MacroAssembler handle MemOperand.
1146 __ Add(temp, class_reg, status_offset);
1147 __ Ldar(temp, HeapOperand(temp));
1148 __ Cmp(temp, mirror::Class::kStatusInitialized);
1149 __ B(lt, slow_path->GetEntryLabel());
1150 } else {
1151 __ Ldr(temp, HeapOperand(class_reg, status_offset));
1152 __ Cmp(temp, mirror::Class::kStatusInitialized);
1153 __ B(lt, slow_path->GetEntryLabel());
1154 __ Dmb(InnerShareable, BarrierReads);
1155 }
Alexandre Rames67555f72014-11-18 10:55:16 +00001156 __ Bind(slow_path->GetExitLabel());
1157}
Alexandre Rames5319def2014-10-23 10:03:10 +01001158
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001159void InstructionCodeGeneratorARM64::GenerateMemoryBarrier(MemBarrierKind kind) {
1160 BarrierType type = BarrierAll;
1161
1162 switch (kind) {
1163 case MemBarrierKind::kAnyAny:
1164 case MemBarrierKind::kAnyStore: {
1165 type = BarrierAll;
1166 break;
1167 }
1168 case MemBarrierKind::kLoadAny: {
1169 type = BarrierReads;
1170 break;
1171 }
1172 case MemBarrierKind::kStoreStore: {
1173 type = BarrierWrites;
1174 break;
1175 }
1176 default:
1177 LOG(FATAL) << "Unexpected memory barrier " << kind;
1178 }
1179 __ Dmb(InnerShareable, type);
1180}
1181
Serban Constantinescu02164b32014-11-13 14:05:07 +00001182void InstructionCodeGeneratorARM64::GenerateSuspendCheck(HSuspendCheck* instruction,
1183 HBasicBlock* successor) {
1184 SuspendCheckSlowPathARM64* slow_path =
1185 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM64(instruction, successor);
1186 codegen_->AddSlowPath(slow_path);
1187 UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
1188 Register temp = temps.AcquireW();
1189
1190 __ Ldrh(temp, MemOperand(tr, Thread::ThreadFlagsOffset<kArm64WordSize>().SizeValue()));
1191 if (successor == nullptr) {
1192 __ Cbnz(temp, slow_path->GetEntryLabel());
1193 __ Bind(slow_path->GetReturnLabel());
1194 } else {
1195 __ Cbz(temp, codegen_->GetLabelOf(successor));
1196 __ B(slow_path->GetEntryLabel());
1197 // slow_path will return to GetLabelOf(successor).
1198 }
1199}
1200
Alexandre Rames5319def2014-10-23 10:03:10 +01001201InstructionCodeGeneratorARM64::InstructionCodeGeneratorARM64(HGraph* graph,
1202 CodeGeneratorARM64* codegen)
1203 : HGraphVisitor(graph),
1204 assembler_(codegen->GetAssembler()),
1205 codegen_(codegen) {}
1206
1207#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
Alexandre Rames3e69f162014-12-10 10:36:50 +00001208 /* No unimplemented IR. */
Alexandre Rames5319def2014-10-23 10:03:10 +01001209
1210#define UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name) name##UnimplementedInstructionBreakCode
1211
1212enum UnimplementedInstructionBreakCode {
Alexandre Rames67555f72014-11-18 10:55:16 +00001213 // Using a base helps identify when we hit such breakpoints.
1214 UnimplementedInstructionBreakCodeBaseCode = 0x900,
Alexandre Rames5319def2014-10-23 10:03:10 +01001215#define ENUM_UNIMPLEMENTED_INSTRUCTION(name) UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name),
1216 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(ENUM_UNIMPLEMENTED_INSTRUCTION)
1217#undef ENUM_UNIMPLEMENTED_INSTRUCTION
1218};
1219
1220#define DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS(name) \
1221 void InstructionCodeGeneratorARM64::Visit##name(H##name* instr) { \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001222 UNUSED(instr); \
Alexandre Rames5319def2014-10-23 10:03:10 +01001223 __ Brk(UNIMPLEMENTED_INSTRUCTION_BREAK_CODE(name)); \
1224 } \
1225 void LocationsBuilderARM64::Visit##name(H##name* instr) { \
1226 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr); \
1227 locations->SetOut(Location::Any()); \
1228 }
1229 FOR_EACH_UNIMPLEMENTED_INSTRUCTION(DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS)
1230#undef DEFINE_UNIMPLEMENTED_INSTRUCTION_VISITORS
1231
1232#undef UNIMPLEMENTED_INSTRUCTION_BREAK_CODE
Alexandre Rames67555f72014-11-18 10:55:16 +00001233#undef FOR_EACH_UNIMPLEMENTED_INSTRUCTION
Alexandre Rames5319def2014-10-23 10:03:10 +01001234
Alexandre Rames67555f72014-11-18 10:55:16 +00001235void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001236 DCHECK_EQ(instr->InputCount(), 2U);
1237 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1238 Primitive::Type type = instr->GetResultType();
1239 switch (type) {
1240 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001241 case Primitive::kPrimLong:
Alexandre Rames5319def2014-10-23 10:03:10 +01001242 locations->SetInAt(0, Location::RequiresRegister());
1243 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001244 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001245 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001246
1247 case Primitive::kPrimFloat:
1248 case Primitive::kPrimDouble:
1249 locations->SetInAt(0, Location::RequiresFpuRegister());
1250 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001251 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001252 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001253
Alexandre Rames5319def2014-10-23 10:03:10 +01001254 default:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001255 LOG(FATAL) << "Unexpected " << instr->DebugName() << " type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001256 }
1257}
1258
Alexandre Rames67555f72014-11-18 10:55:16 +00001259void InstructionCodeGeneratorARM64::HandleBinaryOp(HBinaryOperation* instr) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001260 Primitive::Type type = instr->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001261
1262 switch (type) {
1263 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001264 case Primitive::kPrimLong: {
1265 Register dst = OutputRegister(instr);
1266 Register lhs = InputRegisterAt(instr, 0);
1267 Operand rhs = InputOperandAt(instr, 1);
Alexandre Rames5319def2014-10-23 10:03:10 +01001268 if (instr->IsAdd()) {
1269 __ Add(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001270 } else if (instr->IsAnd()) {
1271 __ And(dst, lhs, rhs);
1272 } else if (instr->IsOr()) {
1273 __ Orr(dst, lhs, rhs);
1274 } else if (instr->IsSub()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001275 __ Sub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001276 } else {
1277 DCHECK(instr->IsXor());
1278 __ Eor(dst, lhs, rhs);
Alexandre Rames5319def2014-10-23 10:03:10 +01001279 }
1280 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001281 }
1282 case Primitive::kPrimFloat:
1283 case Primitive::kPrimDouble: {
1284 FPRegister dst = OutputFPRegister(instr);
1285 FPRegister lhs = InputFPRegisterAt(instr, 0);
1286 FPRegister rhs = InputFPRegisterAt(instr, 1);
1287 if (instr->IsAdd()) {
1288 __ Fadd(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001289 } else if (instr->IsSub()) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001290 __ Fsub(dst, lhs, rhs);
Alexandre Rames67555f72014-11-18 10:55:16 +00001291 } else {
1292 LOG(FATAL) << "Unexpected floating-point binary operation";
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001293 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001294 break;
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001295 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001296 default:
Alexandre Rames67555f72014-11-18 10:55:16 +00001297 LOG(FATAL) << "Unexpected binary operation type " << type;
Alexandre Rames5319def2014-10-23 10:03:10 +01001298 }
1299}
1300
Serban Constantinescu02164b32014-11-13 14:05:07 +00001301void LocationsBuilderARM64::HandleShift(HBinaryOperation* instr) {
1302 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1303
1304 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1305 Primitive::Type type = instr->GetResultType();
1306 switch (type) {
1307 case Primitive::kPrimInt:
1308 case Primitive::kPrimLong: {
1309 locations->SetInAt(0, Location::RequiresRegister());
1310 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
1311 locations->SetOut(Location::RequiresRegister());
1312 break;
1313 }
1314 default:
1315 LOG(FATAL) << "Unexpected shift type " << type;
1316 }
1317}
1318
1319void InstructionCodeGeneratorARM64::HandleShift(HBinaryOperation* instr) {
1320 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr());
1321
1322 Primitive::Type type = instr->GetType();
1323 switch (type) {
1324 case Primitive::kPrimInt:
1325 case Primitive::kPrimLong: {
1326 Register dst = OutputRegister(instr);
1327 Register lhs = InputRegisterAt(instr, 0);
1328 Operand rhs = InputOperandAt(instr, 1);
1329 if (rhs.IsImmediate()) {
1330 uint32_t shift_value = (type == Primitive::kPrimInt)
1331 ? static_cast<uint32_t>(rhs.immediate() & kMaxIntShiftValue)
1332 : static_cast<uint32_t>(rhs.immediate() & kMaxLongShiftValue);
1333 if (instr->IsShl()) {
1334 __ Lsl(dst, lhs, shift_value);
1335 } else if (instr->IsShr()) {
1336 __ Asr(dst, lhs, shift_value);
1337 } else {
1338 __ Lsr(dst, lhs, shift_value);
1339 }
1340 } else {
1341 Register rhs_reg = dst.IsX() ? rhs.reg().X() : rhs.reg().W();
1342
1343 if (instr->IsShl()) {
1344 __ Lsl(dst, lhs, rhs_reg);
1345 } else if (instr->IsShr()) {
1346 __ Asr(dst, lhs, rhs_reg);
1347 } else {
1348 __ Lsr(dst, lhs, rhs_reg);
1349 }
1350 }
1351 break;
1352 }
1353 default:
1354 LOG(FATAL) << "Unexpected shift operation type " << type;
1355 }
1356}
1357
Alexandre Rames5319def2014-10-23 10:03:10 +01001358void LocationsBuilderARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001359 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001360}
1361
1362void InstructionCodeGeneratorARM64::VisitAdd(HAdd* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001363 HandleBinaryOp(instruction);
1364}
1365
1366void LocationsBuilderARM64::VisitAnd(HAnd* instruction) {
1367 HandleBinaryOp(instruction);
1368}
1369
1370void InstructionCodeGeneratorARM64::VisitAnd(HAnd* instruction) {
1371 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01001372}
1373
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001374void LocationsBuilderARM64::VisitArrayGet(HArrayGet* instruction) {
1375 LocationSummary* locations =
1376 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1377 locations->SetInAt(0, Location::RequiresRegister());
1378 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1379 locations->SetOut(Location::RequiresRegister());
1380}
1381
1382void InstructionCodeGeneratorARM64::VisitArrayGet(HArrayGet* instruction) {
1383 LocationSummary* locations = instruction->GetLocations();
1384 Primitive::Type type = instruction->GetType();
1385 Register obj = InputRegisterAt(instruction, 0);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001386 Location index = locations->InAt(1);
1387 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001388 MemOperand source = HeapOperand(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00001389 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001390
1391 if (index.IsConstant()) {
1392 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001393 source = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001394 } else {
1395 Register temp = temps.AcquireSameSizeAs(obj);
1396 Register index_reg = RegisterFrom(index, Primitive::kPrimInt);
1397 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001398 source = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001399 }
1400
Alexandre Rames67555f72014-11-18 10:55:16 +00001401 codegen_->Load(type, OutputCPURegister(instruction), source);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001402}
1403
Alexandre Rames5319def2014-10-23 10:03:10 +01001404void LocationsBuilderARM64::VisitArrayLength(HArrayLength* instruction) {
1405 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1406 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001407 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001408}
1409
1410void InstructionCodeGeneratorARM64::VisitArrayLength(HArrayLength* instruction) {
1411 __ Ldr(OutputRegister(instruction),
1412 HeapOperand(InputRegisterAt(instruction, 0), mirror::Array::LengthOffset()));
1413}
1414
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001415void LocationsBuilderARM64::VisitArraySet(HArraySet* instruction) {
1416 Primitive::Type value_type = instruction->GetComponentType();
1417 bool is_object = value_type == Primitive::kPrimNot;
1418 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1419 instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
1420 if (is_object) {
1421 InvokeRuntimeCallingConvention calling_convention;
1422 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1423 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1424 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1425 } else {
1426 locations->SetInAt(0, Location::RequiresRegister());
1427 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1428 locations->SetInAt(2, Location::RequiresRegister());
1429 }
1430}
1431
1432void InstructionCodeGeneratorARM64::VisitArraySet(HArraySet* instruction) {
1433 Primitive::Type value_type = instruction->GetComponentType();
1434 if (value_type == Primitive::kPrimNot) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001435 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001436 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001437 } else {
1438 LocationSummary* locations = instruction->GetLocations();
1439 Register obj = InputRegisterAt(instruction, 0);
Alexandre Rames67555f72014-11-18 10:55:16 +00001440 CPURegister value = InputCPURegisterAt(instruction, 2);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001441 Location index = locations->InAt(1);
1442 size_t offset = mirror::Array::DataOffset(Primitive::ComponentSize(value_type)).Uint32Value();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001443 MemOperand destination = HeapOperand(obj);
Alexandre Rames67555f72014-11-18 10:55:16 +00001444 UseScratchRegisterScope temps(GetVIXLAssembler());
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001445
1446 if (index.IsConstant()) {
1447 offset += Int64ConstantFrom(index) << Primitive::ComponentSizeShift(value_type);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001448 destination = HeapOperand(obj, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001449 } else {
1450 Register temp = temps.AcquireSameSizeAs(obj);
1451 Register index_reg = InputRegisterAt(instruction, 1);
1452 __ Add(temp, obj, Operand(index_reg, LSL, Primitive::ComponentSizeShift(value_type)));
Serban Constantinescu02164b32014-11-13 14:05:07 +00001453 destination = HeapOperand(temp, offset);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001454 }
1455
1456 codegen_->Store(value_type, value, destination);
1457 }
1458}
1459
Alexandre Rames67555f72014-11-18 10:55:16 +00001460void LocationsBuilderARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
1461 LocationSummary* locations =
1462 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1463 locations->SetInAt(0, Location::RequiresRegister());
1464 locations->SetInAt(1, Location::RequiresRegister());
1465 if (instruction->HasUses()) {
1466 locations->SetOut(Location::SameAsFirstInput());
1467 }
1468}
1469
1470void InstructionCodeGeneratorARM64::VisitBoundsCheck(HBoundsCheck* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001471 LocationSummary* locations = instruction->GetLocations();
1472 BoundsCheckSlowPathARM64* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM64(
1473 instruction, locations->InAt(0), locations->InAt(1));
Alexandre Rames67555f72014-11-18 10:55:16 +00001474 codegen_->AddSlowPath(slow_path);
1475
1476 __ Cmp(InputRegisterAt(instruction, 0), InputOperandAt(instruction, 1));
1477 __ B(slow_path->GetEntryLabel(), hs);
1478}
1479
1480void LocationsBuilderARM64::VisitCheckCast(HCheckCast* instruction) {
1481 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1482 instruction, LocationSummary::kCallOnSlowPath);
1483 locations->SetInAt(0, Location::RequiresRegister());
1484 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames3e69f162014-12-10 10:36:50 +00001485 locations->AddTemp(Location::RequiresRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00001486}
1487
1488void InstructionCodeGeneratorARM64::VisitCheckCast(HCheckCast* instruction) {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001489 LocationSummary* locations = instruction->GetLocations();
Alexandre Rames67555f72014-11-18 10:55:16 +00001490 Register obj = InputRegisterAt(instruction, 0);;
1491 Register cls = InputRegisterAt(instruction, 1);;
Alexandre Rames3e69f162014-12-10 10:36:50 +00001492 Register obj_cls = WRegisterFrom(instruction->GetLocations()->GetTemp(0));
Alexandre Rames67555f72014-11-18 10:55:16 +00001493
Alexandre Rames3e69f162014-12-10 10:36:50 +00001494 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1495 instruction, locations->InAt(1), LocationFrom(obj_cls), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001496 codegen_->AddSlowPath(slow_path);
1497
1498 // TODO: avoid this check if we know obj is not null.
1499 __ Cbz(obj, slow_path->GetExitLabel());
1500 // Compare the class of `obj` with `cls`.
Alexandre Rames3e69f162014-12-10 10:36:50 +00001501 __ Ldr(obj_cls, HeapOperand(obj, mirror::Object::ClassOffset()));
1502 __ Cmp(obj_cls, cls);
Alexandre Rames67555f72014-11-18 10:55:16 +00001503 __ B(ne, slow_path->GetEntryLabel());
1504 __ Bind(slow_path->GetExitLabel());
1505}
1506
1507void LocationsBuilderARM64::VisitClinitCheck(HClinitCheck* check) {
1508 LocationSummary* locations =
1509 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1510 locations->SetInAt(0, Location::RequiresRegister());
1511 if (check->HasUses()) {
1512 locations->SetOut(Location::SameAsFirstInput());
1513 }
1514}
1515
1516void InstructionCodeGeneratorARM64::VisitClinitCheck(HClinitCheck* check) {
1517 // We assume the class is not null.
1518 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
1519 check->GetLoadClass(), check, check->GetDexPc(), true);
1520 codegen_->AddSlowPath(slow_path);
1521 GenerateClassInitializationCheck(slow_path, InputRegisterAt(check, 0));
1522}
1523
Serban Constantinescu02164b32014-11-13 14:05:07 +00001524void LocationsBuilderARM64::VisitCompare(HCompare* compare) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001525 LocationSummary* locations =
Serban Constantinescu02164b32014-11-13 14:05:07 +00001526 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1527 Primitive::Type in_type = compare->InputAt(0)->GetType();
Alexandre Rames5319def2014-10-23 10:03:10 +01001528 switch (in_type) {
1529 case Primitive::kPrimLong: {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001530 locations->SetInAt(0, Location::RequiresRegister());
1531 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
1532 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1533 break;
1534 }
1535 case Primitive::kPrimFloat:
1536 case Primitive::kPrimDouble: {
1537 locations->SetInAt(0, Location::RequiresFpuRegister());
1538 locations->SetInAt(1, Location::RequiresFpuRegister());
1539 locations->SetOut(Location::RequiresRegister());
1540 break;
1541 }
1542 default:
1543 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1544 }
1545}
1546
1547void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) {
1548 Primitive::Type in_type = compare->InputAt(0)->GetType();
1549
1550 // 0 if: left == right
1551 // 1 if: left > right
1552 // -1 if: left < right
1553 switch (in_type) {
1554 case Primitive::kPrimLong: {
1555 Register result = OutputRegister(compare);
1556 Register left = InputRegisterAt(compare, 0);
1557 Operand right = InputOperandAt(compare, 1);
1558
1559 __ Cmp(left, right);
1560 __ Cset(result, ne);
1561 __ Cneg(result, result, lt);
1562 break;
1563 }
1564 case Primitive::kPrimFloat:
1565 case Primitive::kPrimDouble: {
1566 Register result = OutputRegister(compare);
1567 FPRegister left = InputFPRegisterAt(compare, 0);
1568 FPRegister right = InputFPRegisterAt(compare, 1);
1569
1570 __ Fcmp(left, right);
1571 if (compare->IsGtBias()) {
1572 __ Cset(result, ne);
1573 } else {
1574 __ Csetm(result, ne);
1575 }
1576 __ Cneg(result, result, compare->IsGtBias() ? mi : gt);
Alexandre Rames5319def2014-10-23 10:03:10 +01001577 break;
1578 }
1579 default:
1580 LOG(FATAL) << "Unimplemented compare type " << in_type;
1581 }
1582}
1583
1584void LocationsBuilderARM64::VisitCondition(HCondition* instruction) {
1585 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1586 locations->SetInAt(0, Location::RequiresRegister());
1587 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1588 if (instruction->NeedsMaterialization()) {
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001589 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001590 }
1591}
1592
1593void InstructionCodeGeneratorARM64::VisitCondition(HCondition* instruction) {
1594 if (!instruction->NeedsMaterialization()) {
1595 return;
1596 }
1597
1598 LocationSummary* locations = instruction->GetLocations();
1599 Register lhs = InputRegisterAt(instruction, 0);
1600 Operand rhs = InputOperandAt(instruction, 1);
1601 Register res = RegisterFrom(locations->Out(), instruction->GetType());
1602 Condition cond = ARM64Condition(instruction->GetCondition());
1603
1604 __ Cmp(lhs, rhs);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001605 __ Cset(res, cond);
Alexandre Rames5319def2014-10-23 10:03:10 +01001606}
1607
1608#define FOR_EACH_CONDITION_INSTRUCTION(M) \
1609 M(Equal) \
1610 M(NotEqual) \
1611 M(LessThan) \
1612 M(LessThanOrEqual) \
1613 M(GreaterThan) \
1614 M(GreaterThanOrEqual)
1615#define DEFINE_CONDITION_VISITORS(Name) \
1616void LocationsBuilderARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); } \
1617void InstructionCodeGeneratorARM64::Visit##Name(H##Name* comp) { VisitCondition(comp); }
1618FOR_EACH_CONDITION_INSTRUCTION(DEFINE_CONDITION_VISITORS)
Alexandre Rames67555f72014-11-18 10:55:16 +00001619#undef DEFINE_CONDITION_VISITORS
Alexandre Rames5319def2014-10-23 10:03:10 +01001620#undef FOR_EACH_CONDITION_INSTRUCTION
1621
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001622void LocationsBuilderARM64::VisitDiv(HDiv* div) {
1623 LocationSummary* locations =
1624 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1625 switch (div->GetResultType()) {
1626 case Primitive::kPrimInt:
1627 case Primitive::kPrimLong:
1628 locations->SetInAt(0, Location::RequiresRegister());
1629 locations->SetInAt(1, Location::RequiresRegister());
1630 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1631 break;
1632
1633 case Primitive::kPrimFloat:
1634 case Primitive::kPrimDouble:
1635 locations->SetInAt(0, Location::RequiresFpuRegister());
1636 locations->SetInAt(1, Location::RequiresFpuRegister());
1637 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1638 break;
1639
1640 default:
1641 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1642 }
1643}
1644
1645void InstructionCodeGeneratorARM64::VisitDiv(HDiv* div) {
1646 Primitive::Type type = div->GetResultType();
1647 switch (type) {
1648 case Primitive::kPrimInt:
1649 case Primitive::kPrimLong:
1650 __ Sdiv(OutputRegister(div), InputRegisterAt(div, 0), InputRegisterAt(div, 1));
1651 break;
1652
1653 case Primitive::kPrimFloat:
1654 case Primitive::kPrimDouble:
1655 __ Fdiv(OutputFPRegister(div), InputFPRegisterAt(div, 0), InputFPRegisterAt(div, 1));
1656 break;
1657
1658 default:
1659 LOG(FATAL) << "Unexpected div type " << type;
1660 }
1661}
1662
Alexandre Rames67555f72014-11-18 10:55:16 +00001663void LocationsBuilderARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1664 LocationSummary* locations =
1665 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1666 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1667 if (instruction->HasUses()) {
1668 locations->SetOut(Location::SameAsFirstInput());
1669 }
1670}
1671
1672void InstructionCodeGeneratorARM64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1673 SlowPathCodeARM64* slow_path =
1674 new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM64(instruction);
1675 codegen_->AddSlowPath(slow_path);
1676 Location value = instruction->GetLocations()->InAt(0);
1677
Alexandre Rames3e69f162014-12-10 10:36:50 +00001678 Primitive::Type type = instruction->GetType();
1679
1680 if ((type != Primitive::kPrimInt) && (type != Primitive::kPrimLong)) {
1681 LOG(FATAL) << "Unexpected type " << type << "for DivZeroCheck.";
1682 return;
1683 }
1684
Alexandre Rames67555f72014-11-18 10:55:16 +00001685 if (value.IsConstant()) {
1686 int64_t divisor = Int64ConstantFrom(value);
1687 if (divisor == 0) {
1688 __ B(slow_path->GetEntryLabel());
1689 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00001690 // A division by a non-null constant is valid. We don't need to perform
1691 // any check, so simply fall through.
Alexandre Rames67555f72014-11-18 10:55:16 +00001692 }
1693 } else {
1694 __ Cbz(InputRegisterAt(instruction, 0), slow_path->GetEntryLabel());
1695 }
1696}
1697
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001698void LocationsBuilderARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1699 LocationSummary* locations =
1700 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1701 locations->SetOut(Location::ConstantLocation(constant));
1702}
1703
1704void InstructionCodeGeneratorARM64::VisitDoubleConstant(HDoubleConstant* constant) {
1705 UNUSED(constant);
1706 // Will be generated at use site.
1707}
1708
Alexandre Rames5319def2014-10-23 10:03:10 +01001709void LocationsBuilderARM64::VisitExit(HExit* exit) {
1710 exit->SetLocations(nullptr);
1711}
1712
1713void InstructionCodeGeneratorARM64::VisitExit(HExit* exit) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001714 UNUSED(exit);
Alexandre Rames5319def2014-10-23 10:03:10 +01001715 if (kIsDebugBuild) {
1716 down_cast<Arm64Assembler*>(GetAssembler())->Comment("Unreachable");
Alexandre Rames67555f72014-11-18 10:55:16 +00001717 __ Brk(__LINE__); // TODO: Introduce special markers for such code locations.
Alexandre Rames5319def2014-10-23 10:03:10 +01001718 }
1719}
1720
Alexandre Ramesa89086e2014-11-07 17:13:25 +00001721void LocationsBuilderARM64::VisitFloatConstant(HFloatConstant* constant) {
1722 LocationSummary* locations =
1723 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1724 locations->SetOut(Location::ConstantLocation(constant));
1725}
1726
1727void InstructionCodeGeneratorARM64::VisitFloatConstant(HFloatConstant* constant) {
1728 UNUSED(constant);
1729 // Will be generated at use site.
1730}
1731
Alexandre Rames5319def2014-10-23 10:03:10 +01001732void LocationsBuilderARM64::VisitGoto(HGoto* got) {
1733 got->SetLocations(nullptr);
1734}
1735
1736void InstructionCodeGeneratorARM64::VisitGoto(HGoto* got) {
1737 HBasicBlock* successor = got->GetSuccessor();
Serban Constantinescu02164b32014-11-13 14:05:07 +00001738 DCHECK(!successor->IsExitBlock());
1739 HBasicBlock* block = got->GetBlock();
1740 HInstruction* previous = got->GetPrevious();
1741 HLoopInformation* info = block->GetLoopInformation();
1742
1743 if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
1744 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
1745 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1746 return;
1747 }
1748 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1749 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1750 }
1751 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001752 __ B(codegen_->GetLabelOf(successor));
1753 }
1754}
1755
1756void LocationsBuilderARM64::VisitIf(HIf* if_instr) {
1757 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1758 HInstruction* cond = if_instr->InputAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00001759 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001760 locations->SetInAt(0, Location::RequiresRegister());
1761 }
1762}
1763
1764void InstructionCodeGeneratorARM64::VisitIf(HIf* if_instr) {
1765 HInstruction* cond = if_instr->InputAt(0);
Alexandre Rames5319def2014-10-23 10:03:10 +01001766 HCondition* condition = cond->AsCondition();
1767 vixl::Label* true_target = codegen_->GetLabelOf(if_instr->IfTrueSuccessor());
1768 vixl::Label* false_target = codegen_->GetLabelOf(if_instr->IfFalseSuccessor());
1769
Serban Constantinescu02164b32014-11-13 14:05:07 +00001770 if (cond->IsIntConstant()) {
1771 int32_t cond_value = cond->AsIntConstant()->GetValue();
1772 if (cond_value == 1) {
1773 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfTrueSuccessor())) {
1774 __ B(true_target);
1775 }
1776 return;
1777 } else {
1778 DCHECK_EQ(cond_value, 0);
1779 }
1780 } else if (!cond->IsCondition() || condition->NeedsMaterialization()) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001781 // The condition instruction has been materialized, compare the output to 0.
1782 Location cond_val = if_instr->GetLocations()->InAt(0);
1783 DCHECK(cond_val.IsRegister());
1784 __ Cbnz(InputRegisterAt(if_instr, 0), true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001785 } else {
1786 // The condition instruction has not been materialized, use its inputs as
1787 // the comparison and its condition as the branch condition.
1788 Register lhs = InputRegisterAt(condition, 0);
1789 Operand rhs = InputOperandAt(condition, 1);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001790 Condition arm64_cond = ARM64Condition(condition->GetCondition());
1791 if ((arm64_cond == eq || arm64_cond == ne) && rhs.IsImmediate() && (rhs.immediate() == 0)) {
1792 if (arm64_cond == eq) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001793 __ Cbz(lhs, true_target);
1794 } else {
1795 __ Cbnz(lhs, true_target);
1796 }
1797 } else {
1798 __ Cmp(lhs, rhs);
Andreas Gampe277ccbd2014-11-03 21:36:10 -08001799 __ B(arm64_cond, true_target);
Alexandre Rames5319def2014-10-23 10:03:10 +01001800 }
1801 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001802 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
1803 __ B(false_target);
1804 }
1805}
1806
1807void LocationsBuilderARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001808 LocationSummary* locations =
1809 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames5319def2014-10-23 10:03:10 +01001810 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00001811 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01001812}
1813
1814void InstructionCodeGeneratorARM64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00001815 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001816
1817 if (instruction->IsVolatile()) {
1818 if (kUseAcquireRelease) {
1819 codegen_->LoadAcquire(instruction->GetType(), OutputCPURegister(instruction), field);
1820 } else {
1821 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
1822 // For IRIW sequential consistency kLoadAny is not sufficient.
1823 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1824 }
1825 } else {
1826 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
1827 }
Alexandre Rames5319def2014-10-23 10:03:10 +01001828}
1829
1830void LocationsBuilderARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001831 LocationSummary* locations =
1832 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexandre Rames5319def2014-10-23 10:03:10 +01001833 locations->SetInAt(0, Location::RequiresRegister());
1834 locations->SetInAt(1, Location::RequiresRegister());
1835}
1836
1837void InstructionCodeGeneratorARM64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Alexandre Ramesfc19de82014-11-07 17:13:31 +00001838 Register obj = InputRegisterAt(instruction, 0);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00001839 CPURegister value = InputCPURegisterAt(instruction, 1);
1840 Offset offset = instruction->GetFieldOffset();
1841 Primitive::Type field_type = instruction->GetFieldType();
1842
1843 if (instruction->IsVolatile()) {
1844 if (kUseAcquireRelease) {
1845 codegen_->StoreRelease(field_type, value, HeapOperand(obj, offset));
1846 } else {
1847 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
1848 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1849 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
1850 }
1851 } else {
1852 codegen_->Store(field_type, value, HeapOperand(obj, offset));
1853 }
1854
1855 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001856 codegen_->MarkGCCard(obj, Register(value));
Alexandre Rames5319def2014-10-23 10:03:10 +01001857 }
1858}
1859
Alexandre Rames67555f72014-11-18 10:55:16 +00001860void LocationsBuilderARM64::VisitInstanceOf(HInstanceOf* instruction) {
1861 LocationSummary::CallKind call_kind =
1862 instruction->IsClassFinal() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
1863 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
1864 locations->SetInAt(0, Location::RequiresRegister());
1865 locations->SetInAt(1, Location::RequiresRegister());
1866 locations->SetOut(Location::RequiresRegister(), true); // The output does overlap inputs.
1867}
1868
1869void InstructionCodeGeneratorARM64::VisitInstanceOf(HInstanceOf* instruction) {
1870 LocationSummary* locations = instruction->GetLocations();
1871 Register obj = InputRegisterAt(instruction, 0);;
1872 Register cls = InputRegisterAt(instruction, 1);;
1873 Register out = OutputRegister(instruction);
1874
1875 vixl::Label done;
1876
1877 // Return 0 if `obj` is null.
1878 // TODO: Avoid this check if we know `obj` is not null.
1879 __ Mov(out, 0);
1880 __ Cbz(obj, &done);
1881
1882 // Compare the class of `obj` with `cls`.
Serban Constantinescu02164b32014-11-13 14:05:07 +00001883 __ Ldr(out, HeapOperand(obj, mirror::Object::ClassOffset()));
Alexandre Rames67555f72014-11-18 10:55:16 +00001884 __ Cmp(out, cls);
1885 if (instruction->IsClassFinal()) {
1886 // Classes must be equal for the instanceof to succeed.
1887 __ Cset(out, eq);
1888 } else {
1889 // If the classes are not equal, we go into a slow path.
1890 DCHECK(locations->OnlyCallsOnSlowPath());
1891 SlowPathCodeARM64* slow_path =
Alexandre Rames3e69f162014-12-10 10:36:50 +00001892 new (GetGraph()->GetArena()) TypeCheckSlowPathARM64(
1893 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
Alexandre Rames67555f72014-11-18 10:55:16 +00001894 codegen_->AddSlowPath(slow_path);
1895 __ B(ne, slow_path->GetEntryLabel());
1896 __ Mov(out, 1);
1897 __ Bind(slow_path->GetExitLabel());
1898 }
1899
1900 __ Bind(&done);
1901}
1902
Alexandre Rames5319def2014-10-23 10:03:10 +01001903void LocationsBuilderARM64::VisitIntConstant(HIntConstant* constant) {
1904 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
1905 locations->SetOut(Location::ConstantLocation(constant));
1906}
1907
1908void InstructionCodeGeneratorARM64::VisitIntConstant(HIntConstant* constant) {
1909 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001910 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01001911}
1912
Alexandre Rames5319def2014-10-23 10:03:10 +01001913void LocationsBuilderARM64::HandleInvoke(HInvoke* invoke) {
1914 LocationSummary* locations =
1915 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1916 locations->AddTemp(LocationFrom(x0));
1917
1918 InvokeDexCallingConventionVisitor calling_convention_visitor;
1919 for (size_t i = 0; i < invoke->InputCount(); i++) {
1920 HInstruction* input = invoke->InputAt(i);
1921 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1922 }
1923
1924 Primitive::Type return_type = invoke->GetType();
1925 if (return_type != Primitive::kPrimVoid) {
1926 locations->SetOut(calling_convention_visitor.GetReturnLocation(return_type));
1927 }
1928}
1929
Alexandre Rames67555f72014-11-18 10:55:16 +00001930void LocationsBuilderARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1931 HandleInvoke(invoke);
1932}
1933
1934void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invoke) {
1935 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1936 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1937 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1938 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1939 Location receiver = invoke->GetLocations()->InAt(0);
1940 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00001941 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames67555f72014-11-18 10:55:16 +00001942
1943 // The register ip1 is required to be used for the hidden argument in
1944 // art_quick_imt_conflict_trampoline, so prevent VIXL from using it.
1945 UseScratchRegisterScope scratch_scope(GetVIXLAssembler());
1946 scratch_scope.Exclude(ip1);
1947 __ Mov(ip1, invoke->GetDexMethodIndex());
1948
1949 // temp = object->GetClass();
1950 if (receiver.IsStackSlot()) {
1951 __ Ldr(temp, StackOperandFrom(receiver));
1952 __ Ldr(temp, HeapOperand(temp, class_offset));
1953 } else {
1954 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
1955 }
1956 // temp = temp->GetImtEntryAt(method_offset);
1957 __ Ldr(temp, HeapOperand(temp, method_offset));
1958 // lr = temp->GetEntryPoint();
1959 __ Ldr(lr, HeapOperand(temp, entry_point));
1960 // lr();
1961 __ Blr(lr);
1962 DCHECK(!codegen_->IsLeafMethod());
1963 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1964}
1965
1966void LocationsBuilderARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1967 HandleInvoke(invoke);
1968}
1969
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00001970void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Alexandre Rames67555f72014-11-18 10:55:16 +00001971 HandleInvoke(invoke);
1972}
1973
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00001974void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
Alexandre Rames5319def2014-10-23 10:03:10 +01001975 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
1976 // Make sure that ArtMethod* is passed in W0 as per the calling convention
1977 DCHECK(temp.Is(w0));
1978 size_t index_in_cache = mirror::Array::DataOffset(kHeapRefSize).SizeValue() +
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001979 invoke->GetDexMethodIndex() * kHeapRefSize;
Alexandre Rames5319def2014-10-23 10:03:10 +01001980
1981 // TODO: Implement all kinds of calls:
1982 // 1) boot -> boot
1983 // 2) app -> boot
1984 // 3) app -> app
1985 //
1986 // Currently we implement the app -> app logic, which looks up in the resolve cache.
1987
1988 // temp = method;
Alexandre Rames67555f72014-11-18 10:55:16 +00001989 codegen_->LoadCurrentMethod(temp);
Nicolas Geoffray4e44c822014-12-17 12:25:12 +00001990 // temp = temp->dex_cache_resolved_methods_;
1991 __ Ldr(temp, HeapOperand(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset()));
1992 // temp = temp[index_in_cache];
1993 __ Ldr(temp, HeapOperand(temp, index_in_cache));
Alexandre Rames5319def2014-10-23 10:03:10 +01001994 // lr = temp->entry_point_from_quick_compiled_code_;
Serban Constantinescu02164b32014-11-13 14:05:07 +00001995 __ Ldr(lr, HeapOperand(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1996 kArm64WordSize)));
Alexandre Rames5319def2014-10-23 10:03:10 +01001997 // lr();
1998 __ Blr(lr);
1999
2000 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2001 DCHECK(!codegen_->IsLeafMethod());
2002}
2003
2004void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
2005 LocationSummary* locations = invoke->GetLocations();
2006 Location receiver = locations->InAt(0);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002007 Register temp = WRegisterFrom(invoke->GetLocations()->GetTemp(0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002008 size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
2009 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
2010 Offset class_offset = mirror::Object::ClassOffset();
Nicolas Geoffray86a8d7a2014-11-19 08:47:18 +00002011 Offset entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize);
Alexandre Rames5319def2014-10-23 10:03:10 +01002012
2013 // temp = object->GetClass();
2014 if (receiver.IsStackSlot()) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002015 __ Ldr(temp, MemOperand(sp, receiver.GetStackIndex()));
2016 __ Ldr(temp, HeapOperand(temp, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002017 } else {
2018 DCHECK(receiver.IsRegister());
Serban Constantinescu02164b32014-11-13 14:05:07 +00002019 __ Ldr(temp, HeapOperandFrom(receiver, class_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002020 }
2021 // temp = temp->GetMethodAt(method_offset);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002022 __ Ldr(temp, HeapOperand(temp, method_offset));
Alexandre Rames5319def2014-10-23 10:03:10 +01002023 // lr = temp->GetEntryPoint();
Serban Constantinescu02164b32014-11-13 14:05:07 +00002024 __ Ldr(lr, HeapOperand(temp, entry_point.SizeValue()));
Alexandre Rames5319def2014-10-23 10:03:10 +01002025 // lr();
2026 __ Blr(lr);
2027 DCHECK(!codegen_->IsLeafMethod());
2028 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2029}
2030
Alexandre Rames67555f72014-11-18 10:55:16 +00002031void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
2032 LocationSummary::CallKind call_kind = cls->CanCallRuntime() ? LocationSummary::kCallOnSlowPath
2033 : LocationSummary::kNoCall;
2034 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2035 locations->SetOut(Location::RequiresRegister());
2036}
2037
2038void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
2039 Register out = OutputRegister(cls);
2040 if (cls->IsReferrersClass()) {
2041 DCHECK(!cls->CanCallRuntime());
2042 DCHECK(!cls->MustGenerateClinitCheck());
2043 codegen_->LoadCurrentMethod(out);
2044 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2045 } else {
2046 DCHECK(cls->CanCallRuntime());
2047 codegen_->LoadCurrentMethod(out);
2048 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DexCacheResolvedTypesOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002049 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002050
2051 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
2052 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2053 codegen_->AddSlowPath(slow_path);
2054 __ Cbz(out, slow_path->GetEntryLabel());
2055 if (cls->MustGenerateClinitCheck()) {
2056 GenerateClassInitializationCheck(slow_path, out);
2057 } else {
2058 __ Bind(slow_path->GetExitLabel());
2059 }
2060 }
2061}
2062
2063void LocationsBuilderARM64::VisitLoadException(HLoadException* load) {
2064 LocationSummary* locations =
2065 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2066 locations->SetOut(Location::RequiresRegister());
2067}
2068
2069void InstructionCodeGeneratorARM64::VisitLoadException(HLoadException* instruction) {
2070 MemOperand exception = MemOperand(tr, Thread::ExceptionOffset<kArm64WordSize>().Int32Value());
2071 __ Ldr(OutputRegister(instruction), exception);
2072 __ Str(wzr, exception);
2073}
2074
Alexandre Rames5319def2014-10-23 10:03:10 +01002075void LocationsBuilderARM64::VisitLoadLocal(HLoadLocal* load) {
2076 load->SetLocations(nullptr);
2077}
2078
2079void InstructionCodeGeneratorARM64::VisitLoadLocal(HLoadLocal* load) {
2080 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002081 UNUSED(load);
Alexandre Rames5319def2014-10-23 10:03:10 +01002082}
2083
Alexandre Rames67555f72014-11-18 10:55:16 +00002084void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
2085 LocationSummary* locations =
2086 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2087 locations->SetOut(Location::RequiresRegister());
2088}
2089
2090void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) {
2091 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load);
2092 codegen_->AddSlowPath(slow_path);
2093
2094 Register out = OutputRegister(load);
2095 codegen_->LoadCurrentMethod(out);
Mathieu Chartiereace4582014-11-24 18:29:54 -08002096 __ Ldr(out, HeapOperand(out, mirror::ArtMethod::DeclaringClassOffset()));
2097 __ Ldr(out, HeapOperand(out, mirror::Class::DexCacheStringsOffset()));
Serban Constantinescu02164b32014-11-13 14:05:07 +00002098 __ Ldr(out, HeapOperand(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
Alexandre Rames67555f72014-11-18 10:55:16 +00002099 __ Cbz(out, slow_path->GetEntryLabel());
2100 __ Bind(slow_path->GetExitLabel());
2101}
2102
Alexandre Rames5319def2014-10-23 10:03:10 +01002103void LocationsBuilderARM64::VisitLocal(HLocal* local) {
2104 local->SetLocations(nullptr);
2105}
2106
2107void InstructionCodeGeneratorARM64::VisitLocal(HLocal* local) {
2108 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
2109}
2110
2111void LocationsBuilderARM64::VisitLongConstant(HLongConstant* constant) {
2112 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2113 locations->SetOut(Location::ConstantLocation(constant));
2114}
2115
2116void InstructionCodeGeneratorARM64::VisitLongConstant(HLongConstant* constant) {
2117 // Will be generated at use site.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002118 UNUSED(constant);
Alexandre Rames5319def2014-10-23 10:03:10 +01002119}
2120
Alexandre Rames67555f72014-11-18 10:55:16 +00002121void LocationsBuilderARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2122 LocationSummary* locations =
2123 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2124 InvokeRuntimeCallingConvention calling_convention;
2125 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2126}
2127
2128void InstructionCodeGeneratorARM64::VisitMonitorOperation(HMonitorOperation* instruction) {
2129 codegen_->InvokeRuntime(instruction->IsEnter()
2130 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject),
2131 instruction,
2132 instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002133 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002134}
2135
Alexandre Rames42d641b2014-10-27 14:00:51 +00002136void LocationsBuilderARM64::VisitMul(HMul* mul) {
2137 LocationSummary* locations =
2138 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
2139 switch (mul->GetResultType()) {
2140 case Primitive::kPrimInt:
2141 case Primitive::kPrimLong:
2142 locations->SetInAt(0, Location::RequiresRegister());
2143 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002144 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002145 break;
2146
2147 case Primitive::kPrimFloat:
2148 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002149 locations->SetInAt(0, Location::RequiresFpuRegister());
2150 locations->SetInAt(1, Location::RequiresFpuRegister());
Alexandre Rames67555f72014-11-18 10:55:16 +00002151 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Rames42d641b2014-10-27 14:00:51 +00002152 break;
2153
2154 default:
2155 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2156 }
2157}
2158
2159void InstructionCodeGeneratorARM64::VisitMul(HMul* mul) {
2160 switch (mul->GetResultType()) {
2161 case Primitive::kPrimInt:
2162 case Primitive::kPrimLong:
2163 __ Mul(OutputRegister(mul), InputRegisterAt(mul, 0), InputRegisterAt(mul, 1));
2164 break;
2165
2166 case Primitive::kPrimFloat:
2167 case Primitive::kPrimDouble:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002168 __ Fmul(OutputFPRegister(mul), InputFPRegisterAt(mul, 0), InputFPRegisterAt(mul, 1));
Alexandre Rames42d641b2014-10-27 14:00:51 +00002169 break;
2170
2171 default:
2172 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2173 }
2174}
2175
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002176void LocationsBuilderARM64::VisitNeg(HNeg* neg) {
2177 LocationSummary* locations =
2178 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2179 switch (neg->GetResultType()) {
2180 case Primitive::kPrimInt:
Alexandre Rames67555f72014-11-18 10:55:16 +00002181 case Primitive::kPrimLong:
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002182 locations->SetInAt(0, Location::RegisterOrConstant(neg->InputAt(0)));
Alexandre Rames67555f72014-11-18 10:55:16 +00002183 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002184 break;
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002185
2186 case Primitive::kPrimFloat:
2187 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002188 locations->SetInAt(0, Location::RequiresFpuRegister());
2189 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002190 break;
2191
2192 default:
2193 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2194 }
2195}
2196
2197void InstructionCodeGeneratorARM64::VisitNeg(HNeg* neg) {
2198 switch (neg->GetResultType()) {
2199 case Primitive::kPrimInt:
2200 case Primitive::kPrimLong:
2201 __ Neg(OutputRegister(neg), InputOperandAt(neg, 0));
2202 break;
2203
2204 case Primitive::kPrimFloat:
2205 case Primitive::kPrimDouble:
Alexandre Rames67555f72014-11-18 10:55:16 +00002206 __ Fneg(OutputFPRegister(neg), InputFPRegisterAt(neg, 0));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002207 break;
2208
2209 default:
2210 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2211 }
2212}
2213
2214void LocationsBuilderARM64::VisitNewArray(HNewArray* instruction) {
2215 LocationSummary* locations =
2216 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2217 InvokeRuntimeCallingConvention calling_convention;
2218 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002219 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002220 locations->SetOut(LocationFrom(x0));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002221 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(1)));
2222 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2223 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002224}
2225
2226void InstructionCodeGeneratorARM64::VisitNewArray(HNewArray* instruction) {
2227 LocationSummary* locations = instruction->GetLocations();
2228 InvokeRuntimeCallingConvention calling_convention;
2229 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2230 DCHECK(type_index.Is(w0));
2231 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002232 DCHECK(current_method.Is(w2));
Alexandre Rames67555f72014-11-18 10:55:16 +00002233 codegen_->LoadCurrentMethod(current_method);
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002234 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002235 codegen_->InvokeRuntime(
2236 QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002237 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck,
2238 void*, uint32_t, int32_t, mirror::ArtMethod*>();
Alexandre Ramesfc19de82014-11-07 17:13:31 +00002239}
2240
Alexandre Rames5319def2014-10-23 10:03:10 +01002241void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
2242 LocationSummary* locations =
2243 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2244 InvokeRuntimeCallingConvention calling_convention;
2245 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2246 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2247 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002248 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002249}
2250
2251void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction) {
2252 LocationSummary* locations = instruction->GetLocations();
2253 Register type_index = RegisterFrom(locations->GetTemp(0), Primitive::kPrimInt);
2254 DCHECK(type_index.Is(w0));
2255 Register current_method = RegisterFrom(locations->GetTemp(1), Primitive::kPrimNot);
2256 DCHECK(current_method.Is(w1));
Alexandre Rames67555f72014-11-18 10:55:16 +00002257 codegen_->LoadCurrentMethod(current_method);
Alexandre Rames5319def2014-10-23 10:03:10 +01002258 __ Mov(type_index, instruction->GetTypeIndex());
Alexandre Rames67555f72014-11-18 10:55:16 +00002259 codegen_->InvokeRuntime(
2260 QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002261 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, mirror::ArtMethod*>();
Alexandre Rames5319def2014-10-23 10:03:10 +01002262}
2263
2264void LocationsBuilderARM64::VisitNot(HNot* instruction) {
2265 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexandre Rames4e596512014-11-07 15:56:50 +00002266 locations->SetInAt(0, Location::RequiresRegister());
Alexandre Ramesfb4e5fa2014-11-06 12:41:16 +00002267 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexandre Rames5319def2014-10-23 10:03:10 +01002268}
2269
2270void InstructionCodeGeneratorARM64::VisitNot(HNot* instruction) {
2271 switch (instruction->InputAt(0)->GetType()) {
2272 case Primitive::kPrimBoolean:
2273 __ Eor(OutputRegister(instruction), InputRegisterAt(instruction, 0), Operand(1));
2274 break;
2275
2276 case Primitive::kPrimInt:
Alexandre Rames5319def2014-10-23 10:03:10 +01002277 case Primitive::kPrimLong:
Roland Levillain55dcfb52014-10-24 18:09:09 +01002278 __ Mvn(OutputRegister(instruction), InputOperandAt(instruction, 0));
Alexandre Rames5319def2014-10-23 10:03:10 +01002279 break;
2280
2281 default:
2282 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
2283 }
2284}
2285
2286void LocationsBuilderARM64::VisitNullCheck(HNullCheck* instruction) {
2287 LocationSummary* locations =
2288 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2289 locations->SetInAt(0, Location::RequiresRegister());
2290 if (instruction->HasUses()) {
2291 locations->SetOut(Location::SameAsFirstInput());
2292 }
2293}
2294
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002295void InstructionCodeGeneratorARM64::GenerateImplicitNullCheck(HNullCheck* instruction) {
2296 Location obj = instruction->GetLocations()->InAt(0);
2297
2298 __ Ldr(wzr, HeapOperandFrom(obj, Offset(0)));
2299 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2300}
2301
2302void InstructionCodeGeneratorARM64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002303 SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM64(instruction);
2304 codegen_->AddSlowPath(slow_path);
2305
2306 LocationSummary* locations = instruction->GetLocations();
2307 Location obj = locations->InAt(0);
2308 if (obj.IsRegister()) {
2309 __ Cbz(RegisterFrom(obj, instruction->InputAt(0)->GetType()), slow_path->GetEntryLabel());
2310 } else {
2311 DCHECK(obj.IsConstant()) << obj;
2312 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2313 __ B(slow_path->GetEntryLabel());
2314 }
2315}
2316
Calin Juravlecd6dffe2015-01-08 17:35:35 +00002317void InstructionCodeGeneratorARM64::VisitNullCheck(HNullCheck* instruction) {
2318 if (codegen_->GetCompilerOptions().GetImplicitNullChecks()) {
2319 GenerateImplicitNullCheck(instruction);
2320 } else {
2321 GenerateExplicitNullCheck(instruction);
2322 }
2323}
2324
Alexandre Rames67555f72014-11-18 10:55:16 +00002325void LocationsBuilderARM64::VisitOr(HOr* instruction) {
2326 HandleBinaryOp(instruction);
2327}
2328
2329void InstructionCodeGeneratorARM64::VisitOr(HOr* instruction) {
2330 HandleBinaryOp(instruction);
2331}
2332
Alexandre Rames3e69f162014-12-10 10:36:50 +00002333void LocationsBuilderARM64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
2334 LOG(FATAL) << "Unreachable";
2335}
2336
2337void InstructionCodeGeneratorARM64::VisitParallelMove(HParallelMove* instruction) {
2338 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2339}
2340
Alexandre Rames5319def2014-10-23 10:03:10 +01002341void LocationsBuilderARM64::VisitParameterValue(HParameterValue* instruction) {
2342 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2343 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2344 if (location.IsStackSlot()) {
2345 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2346 } else if (location.IsDoubleStackSlot()) {
2347 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2348 }
2349 locations->SetOut(location);
2350}
2351
2352void InstructionCodeGeneratorARM64::VisitParameterValue(HParameterValue* instruction) {
2353 // Nothing to do, the parameter is already at its location.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002354 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002355}
2356
2357void LocationsBuilderARM64::VisitPhi(HPhi* instruction) {
2358 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2359 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2360 locations->SetInAt(i, Location::Any());
2361 }
2362 locations->SetOut(Location::Any());
2363}
2364
2365void InstructionCodeGeneratorARM64::VisitPhi(HPhi* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002366 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002367 LOG(FATAL) << "Unreachable";
2368}
2369
Serban Constantinescu02164b32014-11-13 14:05:07 +00002370void LocationsBuilderARM64::VisitRem(HRem* rem) {
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002371 Primitive::Type type = rem->GetResultType();
2372 LocationSummary::CallKind call_kind = IsFPType(type) ? LocationSummary::kCall
2373 : LocationSummary::kNoCall;
2374 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2375
2376 switch (type) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002377 case Primitive::kPrimInt:
2378 case Primitive::kPrimLong:
2379 locations->SetInAt(0, Location::RequiresRegister());
2380 locations->SetInAt(1, Location::RequiresRegister());
2381 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2382 break;
2383
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002384 case Primitive::kPrimFloat:
2385 case Primitive::kPrimDouble: {
2386 InvokeRuntimeCallingConvention calling_convention;
2387 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
2388 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
2389 locations->SetOut(calling_convention.GetReturnLocation(type));
2390
2391 break;
2392 }
2393
Serban Constantinescu02164b32014-11-13 14:05:07 +00002394 default:
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002395 LOG(FATAL) << "Unexpected rem type " << type;
Serban Constantinescu02164b32014-11-13 14:05:07 +00002396 }
2397}
2398
2399void InstructionCodeGeneratorARM64::VisitRem(HRem* rem) {
2400 Primitive::Type type = rem->GetResultType();
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002401
Serban Constantinescu02164b32014-11-13 14:05:07 +00002402 switch (type) {
2403 case Primitive::kPrimInt:
2404 case Primitive::kPrimLong: {
2405 UseScratchRegisterScope temps(GetVIXLAssembler());
2406 Register dividend = InputRegisterAt(rem, 0);
2407 Register divisor = InputRegisterAt(rem, 1);
2408 Register output = OutputRegister(rem);
2409 Register temp = temps.AcquireSameSizeAs(output);
2410
2411 __ Sdiv(temp, dividend, divisor);
2412 __ Msub(output, temp, divisor, dividend);
2413 break;
2414 }
2415
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002416 case Primitive::kPrimFloat:
2417 case Primitive::kPrimDouble: {
2418 int32_t entry_offset = (type == Primitive::kPrimFloat) ? QUICK_ENTRY_POINT(pFmodf)
2419 : QUICK_ENTRY_POINT(pFmod);
2420 codegen_->InvokeRuntime(entry_offset, rem, rem->GetDexPc());
2421 break;
2422 }
2423
Serban Constantinescu02164b32014-11-13 14:05:07 +00002424 default:
2425 LOG(FATAL) << "Unexpected rem type " << type;
2426 }
2427}
2428
Alexandre Rames5319def2014-10-23 10:03:10 +01002429void LocationsBuilderARM64::VisitReturn(HReturn* instruction) {
2430 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2431 Primitive::Type return_type = instruction->InputAt(0)->GetType();
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002432 locations->SetInAt(0, ARM64ReturnLocation(return_type));
Alexandre Rames5319def2014-10-23 10:03:10 +01002433}
2434
2435void InstructionCodeGeneratorARM64::VisitReturn(HReturn* instruction) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002436 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002437 codegen_->GenerateFrameExit();
Alexandre Rames3e69f162014-12-10 10:36:50 +00002438 __ Ret();
Alexandre Rames5319def2014-10-23 10:03:10 +01002439}
2440
2441void LocationsBuilderARM64::VisitReturnVoid(HReturnVoid* instruction) {
2442 instruction->SetLocations(nullptr);
2443}
2444
2445void InstructionCodeGeneratorARM64::VisitReturnVoid(HReturnVoid* instruction) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002446 UNUSED(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002447 codegen_->GenerateFrameExit();
Alexandre Rames3e69f162014-12-10 10:36:50 +00002448 __ Ret();
Alexandre Rames5319def2014-10-23 10:03:10 +01002449}
2450
Serban Constantinescu02164b32014-11-13 14:05:07 +00002451void LocationsBuilderARM64::VisitShl(HShl* shl) {
2452 HandleShift(shl);
2453}
2454
2455void InstructionCodeGeneratorARM64::VisitShl(HShl* shl) {
2456 HandleShift(shl);
2457}
2458
2459void LocationsBuilderARM64::VisitShr(HShr* shr) {
2460 HandleShift(shr);
2461}
2462
2463void InstructionCodeGeneratorARM64::VisitShr(HShr* shr) {
2464 HandleShift(shr);
2465}
2466
Alexandre Rames5319def2014-10-23 10:03:10 +01002467void LocationsBuilderARM64::VisitStoreLocal(HStoreLocal* store) {
2468 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
2469 Primitive::Type field_type = store->InputAt(1)->GetType();
2470 switch (field_type) {
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002471 case Primitive::kPrimNot:
Alexandre Rames5319def2014-10-23 10:03:10 +01002472 case Primitive::kPrimBoolean:
2473 case Primitive::kPrimByte:
2474 case Primitive::kPrimChar:
2475 case Primitive::kPrimShort:
2476 case Primitive::kPrimInt:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002477 case Primitive::kPrimFloat:
Alexandre Rames5319def2014-10-23 10:03:10 +01002478 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
2479 break;
2480
2481 case Primitive::kPrimLong:
Alexandre Ramesa89086e2014-11-07 17:13:25 +00002482 case Primitive::kPrimDouble:
Alexandre Rames5319def2014-10-23 10:03:10 +01002483 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
2484 break;
2485
2486 default:
2487 LOG(FATAL) << "Unimplemented local type " << field_type;
2488 }
2489}
2490
2491void InstructionCodeGeneratorARM64::VisitStoreLocal(HStoreLocal* store) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002492 UNUSED(store);
Alexandre Rames5319def2014-10-23 10:03:10 +01002493}
2494
2495void LocationsBuilderARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002496 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002497}
2498
2499void InstructionCodeGeneratorARM64::VisitSub(HSub* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002500 HandleBinaryOp(instruction);
Alexandre Rames5319def2014-10-23 10:03:10 +01002501}
2502
Alexandre Rames67555f72014-11-18 10:55:16 +00002503void LocationsBuilderARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2504 LocationSummary* locations =
2505 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2506 locations->SetInAt(0, Location::RequiresRegister());
2507 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2508}
2509
2510void InstructionCodeGeneratorARM64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002511 MemOperand field = HeapOperand(InputRegisterAt(instruction, 0), instruction->GetFieldOffset());
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002512
2513 if (instruction->IsVolatile()) {
2514 if (kUseAcquireRelease) {
2515 codegen_->LoadAcquire(instruction->GetType(), OutputCPURegister(instruction), field);
2516 } else {
2517 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2518 // For IRIW sequential consistency kLoadAny is not sufficient.
2519 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2520 }
2521 } else {
2522 codegen_->Load(instruction->GetType(), OutputCPURegister(instruction), field);
2523 }
Alexandre Rames67555f72014-11-18 10:55:16 +00002524}
2525
2526void LocationsBuilderARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames5319def2014-10-23 10:03:10 +01002527 LocationSummary* locations =
2528 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2529 locations->SetInAt(0, Location::RequiresRegister());
2530 locations->SetInAt(1, Location::RequiresRegister());
Alexandre Rames5319def2014-10-23 10:03:10 +01002531}
2532
Alexandre Rames67555f72014-11-18 10:55:16 +00002533void InstructionCodeGeneratorARM64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002534 Register cls = InputRegisterAt(instruction, 0);
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002535 CPURegister value = InputCPURegisterAt(instruction, 1);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002536 Offset offset = instruction->GetFieldOffset();
Alexandre Rames67555f72014-11-18 10:55:16 +00002537 Primitive::Type field_type = instruction->GetFieldType();
Alexandre Rames5319def2014-10-23 10:03:10 +01002538
Serban Constantinescu02d81cc2015-01-05 16:08:49 +00002539 if (instruction->IsVolatile()) {
2540 if (kUseAcquireRelease) {
2541 codegen_->StoreRelease(field_type, value, HeapOperand(cls, offset));
2542 } else {
2543 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
2544 codegen_->Store(field_type, value, HeapOperand(cls, offset));
2545 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
2546 }
2547 } else {
2548 codegen_->Store(field_type, value, HeapOperand(cls, offset));
2549 }
2550
2551 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
Alexandre Rames67555f72014-11-18 10:55:16 +00002552 codegen_->MarkGCCard(cls, Register(value));
2553 }
Alexandre Rames5319def2014-10-23 10:03:10 +01002554}
2555
2556void LocationsBuilderARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
2557 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2558}
2559
2560void InstructionCodeGeneratorARM64::VisitSuspendCheck(HSuspendCheck* instruction) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002561 HBasicBlock* block = instruction->GetBlock();
2562 if (block->GetLoopInformation() != nullptr) {
2563 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2564 // The back edge will generate the suspend check.
2565 return;
2566 }
2567 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2568 // The goto will generate the suspend check.
2569 return;
2570 }
2571 GenerateSuspendCheck(instruction, nullptr);
Alexandre Rames5319def2014-10-23 10:03:10 +01002572}
2573
2574void LocationsBuilderARM64::VisitTemporary(HTemporary* temp) {
2575 temp->SetLocations(nullptr);
2576}
2577
2578void InstructionCodeGeneratorARM64::VisitTemporary(HTemporary* temp) {
2579 // Nothing to do, this is driven by the code generator.
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002580 UNUSED(temp);
Alexandre Rames5319def2014-10-23 10:03:10 +01002581}
2582
Alexandre Rames67555f72014-11-18 10:55:16 +00002583void LocationsBuilderARM64::VisitThrow(HThrow* instruction) {
2584 LocationSummary* locations =
2585 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2586 InvokeRuntimeCallingConvention calling_convention;
2587 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
2588}
2589
2590void InstructionCodeGeneratorARM64::VisitThrow(HThrow* instruction) {
2591 codegen_->InvokeRuntime(
2592 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc());
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002593 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Alexandre Rames67555f72014-11-18 10:55:16 +00002594}
2595
2596void LocationsBuilderARM64::VisitTypeConversion(HTypeConversion* conversion) {
2597 LocationSummary* locations =
2598 new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
2599 Primitive::Type input_type = conversion->GetInputType();
2600 Primitive::Type result_type = conversion->GetResultType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002601 DCHECK_NE(input_type, result_type);
Alexandre Rames67555f72014-11-18 10:55:16 +00002602 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
2603 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
2604 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
2605 }
2606
2607 if (IsFPType(input_type)) {
2608 locations->SetInAt(0, Location::RequiresFpuRegister());
2609 } else {
2610 locations->SetInAt(0, Location::RequiresRegister());
2611 }
2612
2613 if (IsFPType(result_type)) {
2614 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2615 } else {
2616 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2617 }
2618}
2619
2620void InstructionCodeGeneratorARM64::VisitTypeConversion(HTypeConversion* conversion) {
2621 Primitive::Type result_type = conversion->GetResultType();
2622 Primitive::Type input_type = conversion->GetInputType();
2623
2624 DCHECK_NE(input_type, result_type);
2625
2626 if (IsIntegralType(result_type) && IsIntegralType(input_type)) {
2627 int result_size = Primitive::ComponentSize(result_type);
2628 int input_size = Primitive::ComponentSize(input_type);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002629 int min_size = std::min(result_size, input_size);
Serban Constantinescu02164b32014-11-13 14:05:07 +00002630 Register output = OutputRegister(conversion);
2631 Register source = InputRegisterAt(conversion, 0);
Alexandre Rames3e69f162014-12-10 10:36:50 +00002632 if ((result_type == Primitive::kPrimChar) && (input_size < result_size)) {
2633 __ Ubfx(output, source, 0, result_size * kBitsPerByte);
2634 } else if ((result_type == Primitive::kPrimChar) ||
2635 ((input_type == Primitive::kPrimChar) && (result_size > input_size))) {
2636 __ Ubfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002637 } else {
Alexandre Rames3e69f162014-12-10 10:36:50 +00002638 __ Sbfx(output, output.IsX() ? source.X() : source.W(), 0, min_size * kBitsPerByte);
Alexandre Rames67555f72014-11-18 10:55:16 +00002639 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002640 } else if (IsFPType(result_type) && IsIntegralType(input_type)) {
Serban Constantinescu02164b32014-11-13 14:05:07 +00002641 __ Scvtf(OutputFPRegister(conversion), InputRegisterAt(conversion, 0));
2642 } else if (IsIntegralType(result_type) && IsFPType(input_type)) {
2643 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
2644 __ Fcvtzs(OutputRegister(conversion), InputFPRegisterAt(conversion, 0));
2645 } else if (IsFPType(result_type) && IsFPType(input_type)) {
2646 __ Fcvt(OutputFPRegister(conversion), InputFPRegisterAt(conversion, 0));
2647 } else {
2648 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
2649 << " to " << result_type;
Alexandre Rames67555f72014-11-18 10:55:16 +00002650 }
Serban Constantinescu02164b32014-11-13 14:05:07 +00002651}
Alexandre Rames67555f72014-11-18 10:55:16 +00002652
Serban Constantinescu02164b32014-11-13 14:05:07 +00002653void LocationsBuilderARM64::VisitUShr(HUShr* ushr) {
2654 HandleShift(ushr);
2655}
2656
2657void InstructionCodeGeneratorARM64::VisitUShr(HUShr* ushr) {
2658 HandleShift(ushr);
Alexandre Rames67555f72014-11-18 10:55:16 +00002659}
2660
2661void LocationsBuilderARM64::VisitXor(HXor* instruction) {
2662 HandleBinaryOp(instruction);
2663}
2664
2665void InstructionCodeGeneratorARM64::VisitXor(HXor* instruction) {
2666 HandleBinaryOp(instruction);
2667}
2668
2669#undef __
2670#undef QUICK_ENTRY_POINT
2671
Alexandre Rames5319def2014-10-23 10:03:10 +01002672} // namespace arm64
2673} // namespace art