blob: f91bcfa92e674e491a64f289aa1e88e1b5c61f7b [file] [log] [blame]
Serban Constantinescued8dd492014-02-11 14:15:10 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_arm64.h"
18#include "base/logging.h"
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "offsets.h"
21#include "thread.h"
Serban Constantinescued8dd492014-02-11 14:15:10 +000022
Scott Wakeling97c72b72016-06-24 16:19:36 +010023using namespace vixl::aarch64; // NOLINT(build/namespaces)
Alexandre Ramesba9388c2014-08-22 14:08:36 +010024
Serban Constantinescued8dd492014-02-11 14:15:10 +000025namespace art {
26namespace arm64 {
27
28#ifdef ___
29#error "ARM64 Assembler macro already defined."
30#else
Alexandre Rames087930f2016-08-02 13:45:28 +010031#define ___ vixl_masm_.
Serban Constantinescued8dd492014-02-11 14:15:10 +000032#endif
33
Vladimir Markocf93a5c2015-06-16 11:33:24 +000034void Arm64Assembler::FinalizeCode() {
Serban Constantinescued8dd492014-02-11 14:15:10 +000035 ___ FinalizeCode();
36}
37
38size_t Arm64Assembler::CodeSize() const {
Artem Serovaf4e42a2016-08-08 15:11:24 +010039 return vixl_masm_.GetSizeOfCodeGenerated();
Serban Constantinescued8dd492014-02-11 14:15:10 +000040}
41
Alexandre Rameseb7b7392015-06-19 14:47:01 +010042const uint8_t* Arm64Assembler::CodeBufferBaseAddress() const {
Alexandre Rames087930f2016-08-02 13:45:28 +010043 return vixl_masm_.GetStartAddress<uint8_t*>();
Alexandre Rameseb7b7392015-06-19 14:47:01 +010044}
45
Serban Constantinescued8dd492014-02-11 14:15:10 +000046void Arm64Assembler::FinalizeInstructions(const MemoryRegion& region) {
47 // Copy the instructions from the buffer.
Alexandre Rames087930f2016-08-02 13:45:28 +010048 MemoryRegion from(vixl_masm_.GetStartAddress<void*>(), CodeSize());
Serban Constantinescued8dd492014-02-11 14:15:10 +000049 region.CopyFrom(0, from);
50}
51
Serban Constantinescued8dd492014-02-11 14:15:10 +000052void Arm64Assembler::LoadRawPtr(ManagedRegister m_dst, ManagedRegister m_base, Offset offs) {
53 Arm64ManagedRegister dst = m_dst.AsArm64();
54 Arm64ManagedRegister base = m_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +010055 CHECK(dst.IsXRegister() && base.IsXRegister());
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010056 // Remove dst and base form the temp list - higher level API uses IP1, IP0.
Alexandre Rames087930f2016-08-02 13:45:28 +010057 UseScratchRegisterScope temps(&vixl_masm_);
Alexandre Rames37c92df2014-10-17 14:35:27 +010058 temps.Exclude(reg_x(dst.AsXRegister()), reg_x(base.AsXRegister()));
59 ___ Ldr(reg_x(dst.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value()));
Serban Constantinescued8dd492014-02-11 14:15:10 +000060}
61
Andreas Gampec6ee54e2014-03-24 16:45:44 -070062void Arm64Assembler::JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
63 Arm64ManagedRegister base = m_base.AsArm64();
64 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +010065 CHECK(base.IsXRegister()) << base;
66 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010067 // Remove base and scratch form the temp list - higher level API uses IP1, IP0.
Alexandre Rames087930f2016-08-02 13:45:28 +010068 UseScratchRegisterScope temps(&vixl_masm_);
Alexandre Rames37c92df2014-10-17 14:35:27 +010069 temps.Exclude(reg_x(base.AsXRegister()), reg_x(scratch.AsXRegister()));
70 ___ Ldr(reg_x(scratch.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value()));
71 ___ Br(reg_x(scratch.AsXRegister()));
Andreas Gampec6ee54e2014-03-24 16:45:44 -070072}
73
Zheng Xu69a50302015-04-14 20:04:41 +080074static inline dwarf::Reg DWARFReg(CPURegister reg) {
75 if (reg.IsFPRegister()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +010076 return dwarf::Reg::Arm64Fp(reg.GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +080077 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +010078 DCHECK_LT(reg.GetCode(), 31u); // X0 - X30.
79 return dwarf::Reg::Arm64Core(reg.GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +080080 }
David Srbeckydd973932015-04-07 20:29:48 +010081}
82
Scott Wakeling97c72b72016-06-24 16:19:36 +010083void Arm64Assembler::SpillRegisters(CPURegList registers, int offset) {
84 int size = registers.GetRegisterSizeInBytes();
Alexandre Rames087930f2016-08-02 13:45:28 +010085 const Register sp = vixl_masm_.StackPointer();
Anton Kirilovbde6ae12016-06-10 17:46:12 +010086 // Since we are operating on register pairs, we would like to align on
87 // double the standard size; on the other hand, we don't want to insert
88 // an extra store, which will happen if the number of registers is even.
Scott Wakeling97c72b72016-06-24 16:19:36 +010089 if (!IsAlignedParam(offset, 2 * size) && registers.GetCount() % 2 != 0) {
Anton Kirilovbde6ae12016-06-10 17:46:12 +010090 const CPURegister& dst0 = registers.PopLowestIndex();
91 ___ Str(dst0, MemOperand(sp, offset));
92 cfi_.RelOffset(DWARFReg(dst0), offset);
93 offset += size;
94 }
Scott Wakeling97c72b72016-06-24 16:19:36 +010095 while (registers.GetCount() >= 2) {
Zheng Xu69a50302015-04-14 20:04:41 +080096 const CPURegister& dst0 = registers.PopLowestIndex();
97 const CPURegister& dst1 = registers.PopLowestIndex();
98 ___ Stp(dst0, dst1, MemOperand(sp, offset));
99 cfi_.RelOffset(DWARFReg(dst0), offset);
100 cfi_.RelOffset(DWARFReg(dst1), offset + size);
101 offset += 2 * size;
102 }
103 if (!registers.IsEmpty()) {
104 const CPURegister& dst0 = registers.PopLowestIndex();
105 ___ Str(dst0, MemOperand(sp, offset));
106 cfi_.RelOffset(DWARFReg(dst0), offset);
107 }
108 DCHECK(registers.IsEmpty());
David Srbeckydd973932015-04-07 20:29:48 +0100109}
110
Scott Wakeling97c72b72016-06-24 16:19:36 +0100111void Arm64Assembler::UnspillRegisters(CPURegList registers, int offset) {
112 int size = registers.GetRegisterSizeInBytes();
Alexandre Rames087930f2016-08-02 13:45:28 +0100113 const Register sp = vixl_masm_.StackPointer();
Anton Kirilovbde6ae12016-06-10 17:46:12 +0100114 // Be consistent with the logic for spilling registers.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100115 if (!IsAlignedParam(offset, 2 * size) && registers.GetCount() % 2 != 0) {
Anton Kirilovbde6ae12016-06-10 17:46:12 +0100116 const CPURegister& dst0 = registers.PopLowestIndex();
117 ___ Ldr(dst0, MemOperand(sp, offset));
118 cfi_.Restore(DWARFReg(dst0));
119 offset += size;
120 }
Scott Wakeling97c72b72016-06-24 16:19:36 +0100121 while (registers.GetCount() >= 2) {
Zheng Xu69a50302015-04-14 20:04:41 +0800122 const CPURegister& dst0 = registers.PopLowestIndex();
123 const CPURegister& dst1 = registers.PopLowestIndex();
124 ___ Ldp(dst0, dst1, MemOperand(sp, offset));
125 cfi_.Restore(DWARFReg(dst0));
126 cfi_.Restore(DWARFReg(dst1));
127 offset += 2 * size;
128 }
129 if (!registers.IsEmpty()) {
130 const CPURegister& dst0 = registers.PopLowestIndex();
131 ___ Ldr(dst0, MemOperand(sp, offset));
132 cfi_.Restore(DWARFReg(dst0));
133 }
134 DCHECK(registers.IsEmpty());
135}
Ian Rogers790a6b72014-04-01 10:36:00 -0700136
Scott Wakeling97c72b72016-06-24 16:19:36 +0100137void Arm64Assembler::PoisonHeapReference(Register reg) {
Roland Levillain4d027112015-07-01 15:41:14 +0100138 DCHECK(reg.IsW());
139 // reg = -reg.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100140 ___ Neg(reg, Operand(reg));
Roland Levillain4d027112015-07-01 15:41:14 +0100141}
142
Scott Wakeling97c72b72016-06-24 16:19:36 +0100143void Arm64Assembler::UnpoisonHeapReference(Register reg) {
Roland Levillain4d027112015-07-01 15:41:14 +0100144 DCHECK(reg.IsW());
145 // reg = -reg.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100146 ___ Neg(reg, Operand(reg));
Roland Levillain4d027112015-07-01 15:41:14 +0100147}
148
Roland Levillain0b671c02016-08-19 12:02:34 +0100149void Arm64Assembler::MaybePoisonHeapReference(Register reg) {
150 if (kPoisonHeapReferences) {
151 PoisonHeapReference(reg);
152 }
153}
154
Scott Wakeling97c72b72016-06-24 16:19:36 +0100155void Arm64Assembler::MaybeUnpoisonHeapReference(Register reg) {
Roland Levillain4d027112015-07-01 15:41:14 +0100156 if (kPoisonHeapReferences) {
157 UnpoisonHeapReference(reg);
158 }
159}
160
161#undef ___
162
Serban Constantinescued8dd492014-02-11 14:15:10 +0000163} // namespace arm64
164} // namespace art