blob: dc1f24a152a22af99544ae62a143f8337951a9ef [file] [log] [blame]
Serban Constantinescued8dd492014-02-11 14:15:10 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_arm64.h"
18#include "base/logging.h"
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "offsets.h"
21#include "thread.h"
Serban Constantinescued8dd492014-02-11 14:15:10 +000022
Scott Wakeling97c72b72016-06-24 16:19:36 +010023using namespace vixl::aarch64; // NOLINT(build/namespaces)
Alexandre Ramesba9388c2014-08-22 14:08:36 +010024
Serban Constantinescued8dd492014-02-11 14:15:10 +000025namespace art {
26namespace arm64 {
27
28#ifdef ___
29#error "ARM64 Assembler macro already defined."
30#else
Alexandre Rames087930f2016-08-02 13:45:28 +010031#define ___ vixl_masm_.
Serban Constantinescued8dd492014-02-11 14:15:10 +000032#endif
33
Vladimir Markocf93a5c2015-06-16 11:33:24 +000034void Arm64Assembler::FinalizeCode() {
Alexandre Ramesc01a6642016-04-15 11:54:06 +010035 for (const std::unique_ptr<Arm64Exception>& exception : exception_blocks_) {
36 EmitExceptionPoll(exception.get());
Serban Constantinescued8dd492014-02-11 14:15:10 +000037 }
38 ___ FinalizeCode();
39}
40
41size_t Arm64Assembler::CodeSize() const {
Alexandre Rames087930f2016-08-02 13:45:28 +010042 return vixl_masm_.GetBufferCapacity() - vixl_masm_.GetRemainingBufferSpace();
Serban Constantinescued8dd492014-02-11 14:15:10 +000043}
44
Alexandre Rameseb7b7392015-06-19 14:47:01 +010045const uint8_t* Arm64Assembler::CodeBufferBaseAddress() const {
Alexandre Rames087930f2016-08-02 13:45:28 +010046 return vixl_masm_.GetStartAddress<uint8_t*>();
Alexandre Rameseb7b7392015-06-19 14:47:01 +010047}
48
Serban Constantinescued8dd492014-02-11 14:15:10 +000049void Arm64Assembler::FinalizeInstructions(const MemoryRegion& region) {
50 // Copy the instructions from the buffer.
Alexandre Rames087930f2016-08-02 13:45:28 +010051 MemoryRegion from(vixl_masm_.GetStartAddress<void*>(), CodeSize());
Serban Constantinescued8dd492014-02-11 14:15:10 +000052 region.CopyFrom(0, from);
53}
54
55void Arm64Assembler::GetCurrentThread(ManagedRegister tr) {
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010056 ___ Mov(reg_x(tr.AsArm64().AsXRegister()), reg_x(TR));
Serban Constantinescued8dd492014-02-11 14:15:10 +000057}
58
59void Arm64Assembler::GetCurrentThread(FrameOffset offset, ManagedRegister /* scratch */) {
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010060 StoreToOffset(TR, SP, offset.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +000061}
62
63// See Arm64 PCS Section 5.2.2.1.
64void Arm64Assembler::IncreaseFrameSize(size_t adjust) {
65 CHECK_ALIGNED(adjust, kStackAlignment);
66 AddConstant(SP, -adjust);
David Srbeckydd973932015-04-07 20:29:48 +010067 cfi().AdjustCFAOffset(adjust);
Serban Constantinescued8dd492014-02-11 14:15:10 +000068}
69
70// See Arm64 PCS Section 5.2.2.1.
71void Arm64Assembler::DecreaseFrameSize(size_t adjust) {
72 CHECK_ALIGNED(adjust, kStackAlignment);
73 AddConstant(SP, adjust);
David Srbeckydd973932015-04-07 20:29:48 +010074 cfi().AdjustCFAOffset(-adjust);
Serban Constantinescued8dd492014-02-11 14:15:10 +000075}
76
Alexandre Rames37c92df2014-10-17 14:35:27 +010077void Arm64Assembler::AddConstant(XRegister rd, int32_t value, Condition cond) {
Serban Constantinescued8dd492014-02-11 14:15:10 +000078 AddConstant(rd, rd, value, cond);
79}
80
Alexandre Rames37c92df2014-10-17 14:35:27 +010081void Arm64Assembler::AddConstant(XRegister rd, XRegister rn, int32_t value,
Serban Constantinescued8dd492014-02-11 14:15:10 +000082 Condition cond) {
Alexandre Ramesba9388c2014-08-22 14:08:36 +010083 if ((cond == al) || (cond == nv)) {
Serban Constantinescued8dd492014-02-11 14:15:10 +000084 // VIXL macro-assembler handles all variants.
85 ___ Add(reg_x(rd), reg_x(rn), value);
86 } else {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010087 // temp = rd + value
88 // rd = cond ? temp : rn
Alexandre Rames087930f2016-08-02 13:45:28 +010089 UseScratchRegisterScope temps(&vixl_masm_);
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010090 temps.Exclude(reg_x(rd), reg_x(rn));
Scott Wakeling97c72b72016-06-24 16:19:36 +010091 Register temp = temps.AcquireX();
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010092 ___ Add(temp, reg_x(rn), value);
Alexandre Ramesba9388c2014-08-22 14:08:36 +010093 ___ Csel(reg_x(rd), temp, reg_x(rd), cond);
Serban Constantinescued8dd492014-02-11 14:15:10 +000094 }
95}
96
97void Arm64Assembler::StoreWToOffset(StoreOperandType type, WRegister source,
Alexandre Rames37c92df2014-10-17 14:35:27 +010098 XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +000099 switch (type) {
100 case kStoreByte:
101 ___ Strb(reg_w(source), MEM_OP(reg_x(base), offset));
102 break;
103 case kStoreHalfword:
104 ___ Strh(reg_w(source), MEM_OP(reg_x(base), offset));
105 break;
106 case kStoreWord:
107 ___ Str(reg_w(source), MEM_OP(reg_x(base), offset));
108 break;
109 default:
110 LOG(FATAL) << "UNREACHABLE";
111 }
112}
113
Alexandre Rames37c92df2014-10-17 14:35:27 +0100114void Arm64Assembler::StoreToOffset(XRegister source, XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000115 CHECK_NE(source, SP);
116 ___ Str(reg_x(source), MEM_OP(reg_x(base), offset));
117}
118
Alexandre Rames37c92df2014-10-17 14:35:27 +0100119void Arm64Assembler::StoreSToOffset(SRegister source, XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000120 ___ Str(reg_s(source), MEM_OP(reg_x(base), offset));
121}
122
Alexandre Rames37c92df2014-10-17 14:35:27 +0100123void Arm64Assembler::StoreDToOffset(DRegister source, XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000124 ___ Str(reg_d(source), MEM_OP(reg_x(base), offset));
125}
126
127void Arm64Assembler::Store(FrameOffset offs, ManagedRegister m_src, size_t size) {
128 Arm64ManagedRegister src = m_src.AsArm64();
129 if (src.IsNoRegister()) {
130 CHECK_EQ(0u, size);
131 } else if (src.IsWRegister()) {
132 CHECK_EQ(4u, size);
133 StoreWToOffset(kStoreWord, src.AsWRegister(), SP, offs.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100134 } else if (src.IsXRegister()) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000135 CHECK_EQ(8u, size);
Alexandre Rames37c92df2014-10-17 14:35:27 +0100136 StoreToOffset(src.AsXRegister(), SP, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000137 } else if (src.IsSRegister()) {
138 StoreSToOffset(src.AsSRegister(), SP, offs.Int32Value());
139 } else {
140 CHECK(src.IsDRegister()) << src;
141 StoreDToOffset(src.AsDRegister(), SP, offs.Int32Value());
142 }
143}
144
145void Arm64Assembler::StoreRef(FrameOffset offs, ManagedRegister m_src) {
146 Arm64ManagedRegister src = m_src.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100147 CHECK(src.IsXRegister()) << src;
148 StoreWToOffset(kStoreWord, src.AsOverlappingWRegister(), SP,
Serban Constantinescu75b91132014-04-09 18:39:10 +0100149 offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000150}
151
152void Arm64Assembler::StoreRawPtr(FrameOffset offs, ManagedRegister m_src) {
153 Arm64ManagedRegister src = m_src.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100154 CHECK(src.IsXRegister()) << src;
155 StoreToOffset(src.AsXRegister(), SP, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000156}
157
158void Arm64Assembler::StoreImmediateToFrame(FrameOffset offs, uint32_t imm,
159 ManagedRegister m_scratch) {
160 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100161 CHECK(scratch.IsXRegister()) << scratch;
162 LoadImmediate(scratch.AsXRegister(), imm);
163 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), SP,
Serban Constantinescu75b91132014-04-09 18:39:10 +0100164 offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000165}
166
Andreas Gampe542451c2016-07-26 09:02:02 -0700167void Arm64Assembler::StoreImmediateToThread64(ThreadOffset64 offs,
168 uint32_t imm,
169 ManagedRegister m_scratch) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000170 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100171 CHECK(scratch.IsXRegister()) << scratch;
172 LoadImmediate(scratch.AsXRegister(), imm);
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100173 StoreToOffset(scratch.AsXRegister(), TR, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000174}
175
Andreas Gampe542451c2016-07-26 09:02:02 -0700176void Arm64Assembler::StoreStackOffsetToThread64(ThreadOffset64 tr_offs,
177 FrameOffset fr_offs,
178 ManagedRegister m_scratch) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000179 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100180 CHECK(scratch.IsXRegister()) << scratch;
181 AddConstant(scratch.AsXRegister(), SP, fr_offs.Int32Value());
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100182 StoreToOffset(scratch.AsXRegister(), TR, tr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000183}
184
Andreas Gampe542451c2016-07-26 09:02:02 -0700185void Arm64Assembler::StoreStackPointerToThread64(ThreadOffset64 tr_offs) {
Alexandre Rames087930f2016-08-02 13:45:28 +0100186 UseScratchRegisterScope temps(&vixl_masm_);
Scott Wakeling97c72b72016-06-24 16:19:36 +0100187 Register temp = temps.AcquireX();
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100188 ___ Mov(temp, reg_x(SP));
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100189 ___ Str(temp, MEM_OP(reg_x(TR), tr_offs.Int32Value()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000190}
191
192void Arm64Assembler::StoreSpanning(FrameOffset dest_off, ManagedRegister m_source,
193 FrameOffset in_off, ManagedRegister m_scratch) {
194 Arm64ManagedRegister source = m_source.AsArm64();
195 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100196 StoreToOffset(source.AsXRegister(), SP, dest_off.Int32Value());
197 LoadFromOffset(scratch.AsXRegister(), SP, in_off.Int32Value());
198 StoreToOffset(scratch.AsXRegister(), SP, dest_off.Int32Value() + 8);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000199}
200
201// Load routines.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100202void Arm64Assembler::LoadImmediate(XRegister dest, int32_t value,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000203 Condition cond) {
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100204 if ((cond == al) || (cond == nv)) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000205 ___ Mov(reg_x(dest), value);
206 } else {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100207 // temp = value
208 // rd = cond ? temp : rd
Serban Constantinescued8dd492014-02-11 14:15:10 +0000209 if (value != 0) {
Alexandre Rames087930f2016-08-02 13:45:28 +0100210 UseScratchRegisterScope temps(&vixl_masm_);
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100211 temps.Exclude(reg_x(dest));
Scott Wakeling97c72b72016-06-24 16:19:36 +0100212 Register temp = temps.AcquireX();
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100213 ___ Mov(temp, value);
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100214 ___ Csel(reg_x(dest), temp, reg_x(dest), cond);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000215 } else {
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100216 ___ Csel(reg_x(dest), reg_x(XZR), reg_x(dest), cond);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000217 }
218 }
219}
220
221void Arm64Assembler::LoadWFromOffset(LoadOperandType type, WRegister dest,
Alexandre Rames37c92df2014-10-17 14:35:27 +0100222 XRegister base, int32_t offset) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000223 switch (type) {
224 case kLoadSignedByte:
225 ___ Ldrsb(reg_w(dest), MEM_OP(reg_x(base), offset));
226 break;
227 case kLoadSignedHalfword:
228 ___ Ldrsh(reg_w(dest), MEM_OP(reg_x(base), offset));
229 break;
230 case kLoadUnsignedByte:
231 ___ Ldrb(reg_w(dest), MEM_OP(reg_x(base), offset));
232 break;
233 case kLoadUnsignedHalfword:
234 ___ Ldrh(reg_w(dest), MEM_OP(reg_x(base), offset));
235 break;
236 case kLoadWord:
237 ___ Ldr(reg_w(dest), MEM_OP(reg_x(base), offset));
238 break;
239 default:
240 LOG(FATAL) << "UNREACHABLE";
241 }
242}
243
244// Note: We can extend this member by adding load type info - see
245// sign extended A64 load variants.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100246void Arm64Assembler::LoadFromOffset(XRegister dest, XRegister base,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000247 int32_t offset) {
248 CHECK_NE(dest, SP);
249 ___ Ldr(reg_x(dest), MEM_OP(reg_x(base), offset));
250}
251
Alexandre Rames37c92df2014-10-17 14:35:27 +0100252void Arm64Assembler::LoadSFromOffset(SRegister dest, XRegister base,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000253 int32_t offset) {
254 ___ Ldr(reg_s(dest), MEM_OP(reg_x(base), offset));
255}
256
Alexandre Rames37c92df2014-10-17 14:35:27 +0100257void Arm64Assembler::LoadDFromOffset(DRegister dest, XRegister base,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000258 int32_t offset) {
259 ___ Ldr(reg_d(dest), MEM_OP(reg_x(base), offset));
260}
261
Alexandre Rames37c92df2014-10-17 14:35:27 +0100262void Arm64Assembler::Load(Arm64ManagedRegister dest, XRegister base,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000263 int32_t offset, size_t size) {
264 if (dest.IsNoRegister()) {
265 CHECK_EQ(0u, size) << dest;
266 } else if (dest.IsWRegister()) {
267 CHECK_EQ(4u, size) << dest;
268 ___ Ldr(reg_w(dest.AsWRegister()), MEM_OP(reg_x(base), offset));
Alexandre Rames37c92df2014-10-17 14:35:27 +0100269 } else if (dest.IsXRegister()) {
270 CHECK_NE(dest.AsXRegister(), SP) << dest;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100271 if (size == 4u) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100272 ___ Ldr(reg_w(dest.AsOverlappingWRegister()), MEM_OP(reg_x(base), offset));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100273 } else {
274 CHECK_EQ(8u, size) << dest;
Alexandre Rames37c92df2014-10-17 14:35:27 +0100275 ___ Ldr(reg_x(dest.AsXRegister()), MEM_OP(reg_x(base), offset));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100276 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000277 } else if (dest.IsSRegister()) {
278 ___ Ldr(reg_s(dest.AsSRegister()), MEM_OP(reg_x(base), offset));
279 } else {
280 CHECK(dest.IsDRegister()) << dest;
281 ___ Ldr(reg_d(dest.AsDRegister()), MEM_OP(reg_x(base), offset));
282 }
283}
284
285void Arm64Assembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
286 return Load(m_dst.AsArm64(), SP, src.Int32Value(), size);
287}
288
Andreas Gampe542451c2016-07-26 09:02:02 -0700289void Arm64Assembler::LoadFromThread64(ManagedRegister m_dst, ThreadOffset64 src, size_t size) {
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100290 return Load(m_dst.AsArm64(), TR, src.Int32Value(), size);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000291}
292
293void Arm64Assembler::LoadRef(ManagedRegister m_dst, FrameOffset offs) {
294 Arm64ManagedRegister dst = m_dst.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100295 CHECK(dst.IsXRegister()) << dst;
296 LoadWFromOffset(kLoadWord, dst.AsOverlappingWRegister(), SP, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000297}
298
Mathieu Chartiere401d142015-04-22 13:56:20 -0700299void Arm64Assembler::LoadRef(ManagedRegister m_dst, ManagedRegister m_base, MemberOffset offs,
Roland Levillain4d027112015-07-01 15:41:14 +0100300 bool unpoison_reference) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000301 Arm64ManagedRegister dst = m_dst.AsArm64();
302 Arm64ManagedRegister base = m_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100303 CHECK(dst.IsXRegister() && base.IsXRegister());
304 LoadWFromOffset(kLoadWord, dst.AsOverlappingWRegister(), base.AsXRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100305 offs.Int32Value());
Roland Levillain4d027112015-07-01 15:41:14 +0100306 if (unpoison_reference) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100307 WRegister ref_reg = dst.AsOverlappingWRegister();
Roland Levillain4d027112015-07-01 15:41:14 +0100308 MaybeUnpoisonHeapReference(reg_w(ref_reg));
Hiroshi Yamauchib88f0b12014-09-26 14:55:38 -0700309 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000310}
311
312void Arm64Assembler::LoadRawPtr(ManagedRegister m_dst, ManagedRegister m_base, Offset offs) {
313 Arm64ManagedRegister dst = m_dst.AsArm64();
314 Arm64ManagedRegister base = m_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100315 CHECK(dst.IsXRegister() && base.IsXRegister());
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100316 // Remove dst and base form the temp list - higher level API uses IP1, IP0.
Alexandre Rames087930f2016-08-02 13:45:28 +0100317 UseScratchRegisterScope temps(&vixl_masm_);
Alexandre Rames37c92df2014-10-17 14:35:27 +0100318 temps.Exclude(reg_x(dst.AsXRegister()), reg_x(base.AsXRegister()));
319 ___ Ldr(reg_x(dst.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000320}
321
Andreas Gampe542451c2016-07-26 09:02:02 -0700322void Arm64Assembler::LoadRawPtrFromThread64(ManagedRegister m_dst, ThreadOffset64 offs) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000323 Arm64ManagedRegister dst = m_dst.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100324 CHECK(dst.IsXRegister()) << dst;
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100325 LoadFromOffset(dst.AsXRegister(), TR, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000326}
327
328// Copying routines.
329void Arm64Assembler::Move(ManagedRegister m_dst, ManagedRegister m_src, size_t size) {
330 Arm64ManagedRegister dst = m_dst.AsArm64();
331 Arm64ManagedRegister src = m_src.AsArm64();
332 if (!dst.Equals(src)) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100333 if (dst.IsXRegister()) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100334 if (size == 4) {
335 CHECK(src.IsWRegister());
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000336 ___ Mov(reg_w(dst.AsOverlappingWRegister()), reg_w(src.AsWRegister()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100337 } else {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100338 if (src.IsXRegister()) {
339 ___ Mov(reg_x(dst.AsXRegister()), reg_x(src.AsXRegister()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100340 } else {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000341 ___ Mov(reg_x(dst.AsXRegister()), reg_x(src.AsOverlappingXRegister()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100342 }
343 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000344 } else if (dst.IsWRegister()) {
345 CHECK(src.IsWRegister()) << src;
346 ___ Mov(reg_w(dst.AsWRegister()), reg_w(src.AsWRegister()));
347 } else if (dst.IsSRegister()) {
348 CHECK(src.IsSRegister()) << src;
349 ___ Fmov(reg_s(dst.AsSRegister()), reg_s(src.AsSRegister()));
350 } else {
351 CHECK(dst.IsDRegister()) << dst;
352 CHECK(src.IsDRegister()) << src;
353 ___ Fmov(reg_d(dst.AsDRegister()), reg_d(src.AsDRegister()));
354 }
355 }
356}
357
Serban Constantinescu75b91132014-04-09 18:39:10 +0100358void Arm64Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs,
Andreas Gampe542451c2016-07-26 09:02:02 -0700359 ThreadOffset64 tr_offs,
360 ManagedRegister m_scratch) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000361 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100362 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100363 LoadFromOffset(scratch.AsXRegister(), TR, tr_offs.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100364 StoreToOffset(scratch.AsXRegister(), SP, fr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000365}
366
Andreas Gampe542451c2016-07-26 09:02:02 -0700367void Arm64Assembler::CopyRawPtrToThread64(ThreadOffset64 tr_offs,
368 FrameOffset fr_offs,
369 ManagedRegister m_scratch) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000370 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100371 CHECK(scratch.IsXRegister()) << scratch;
372 LoadFromOffset(scratch.AsXRegister(), SP, fr_offs.Int32Value());
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100373 StoreToOffset(scratch.AsXRegister(), TR, tr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000374}
375
376void Arm64Assembler::CopyRef(FrameOffset dest, FrameOffset src,
377 ManagedRegister m_scratch) {
378 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100379 CHECK(scratch.IsXRegister()) << scratch;
380 LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100381 SP, src.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100382 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100383 SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000384}
385
386void Arm64Assembler::Copy(FrameOffset dest, FrameOffset src,
387 ManagedRegister m_scratch, size_t size) {
388 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100389 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000390 CHECK(size == 4 || size == 8) << size;
391 if (size == 4) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100392 LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(), SP, src.Int32Value());
393 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000394 } else if (size == 8) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100395 LoadFromOffset(scratch.AsXRegister(), SP, src.Int32Value());
396 StoreToOffset(scratch.AsXRegister(), SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000397 } else {
398 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
399 }
400}
401
402void Arm64Assembler::Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset,
403 ManagedRegister m_scratch, size_t size) {
404 Arm64ManagedRegister scratch = m_scratch.AsArm64();
405 Arm64ManagedRegister base = src_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100406 CHECK(base.IsXRegister()) << base;
407 CHECK(scratch.IsXRegister() || scratch.IsWRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000408 CHECK(size == 4 || size == 8) << size;
409 if (size == 4) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100410 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), base.AsXRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000411 src_offset.Int32Value());
412 StoreWToOffset(kStoreWord, scratch.AsWRegister(), SP, dest.Int32Value());
413 } else if (size == 8) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100414 LoadFromOffset(scratch.AsXRegister(), base.AsXRegister(), src_offset.Int32Value());
415 StoreToOffset(scratch.AsXRegister(), SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000416 } else {
417 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
418 }
419}
420
421void Arm64Assembler::Copy(ManagedRegister m_dest_base, Offset dest_offs, FrameOffset src,
422 ManagedRegister m_scratch, size_t size) {
423 Arm64ManagedRegister scratch = m_scratch.AsArm64();
424 Arm64ManagedRegister base = m_dest_base.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100425 CHECK(base.IsXRegister()) << base;
426 CHECK(scratch.IsXRegister() || scratch.IsWRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000427 CHECK(size == 4 || size == 8) << size;
428 if (size == 4) {
429 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), SP, src.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100430 StoreWToOffset(kStoreWord, scratch.AsWRegister(), base.AsXRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000431 dest_offs.Int32Value());
432 } else if (size == 8) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100433 LoadFromOffset(scratch.AsXRegister(), SP, src.Int32Value());
434 StoreToOffset(scratch.AsXRegister(), base.AsXRegister(), dest_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000435 } else {
436 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
437 }
438}
439
440void Arm64Assembler::Copy(FrameOffset /*dst*/, FrameOffset /*src_base*/, Offset /*src_offset*/,
441 ManagedRegister /*mscratch*/, size_t /*size*/) {
442 UNIMPLEMENTED(FATAL) << "Unimplemented Copy() variant";
443}
444
445void Arm64Assembler::Copy(ManagedRegister m_dest, Offset dest_offset,
446 ManagedRegister m_src, Offset src_offset,
447 ManagedRegister m_scratch, size_t size) {
448 Arm64ManagedRegister scratch = m_scratch.AsArm64();
449 Arm64ManagedRegister src = m_src.AsArm64();
450 Arm64ManagedRegister dest = m_dest.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100451 CHECK(dest.IsXRegister()) << dest;
452 CHECK(src.IsXRegister()) << src;
453 CHECK(scratch.IsXRegister() || scratch.IsWRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000454 CHECK(size == 4 || size == 8) << size;
455 if (size == 4) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100456 if (scratch.IsWRegister()) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100457 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), src.AsXRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000458 src_offset.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100459 StoreWToOffset(kStoreWord, scratch.AsWRegister(), dest.AsXRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000460 dest_offset.Int32Value());
Serban Constantinescu75b91132014-04-09 18:39:10 +0100461 } else {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100462 LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(), src.AsXRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100463 src_offset.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100464 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), dest.AsXRegister(),
Serban Constantinescu75b91132014-04-09 18:39:10 +0100465 dest_offset.Int32Value());
466 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000467 } else if (size == 8) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100468 LoadFromOffset(scratch.AsXRegister(), src.AsXRegister(), src_offset.Int32Value());
469 StoreToOffset(scratch.AsXRegister(), dest.AsXRegister(), dest_offset.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000470 } else {
471 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
472 }
473}
474
475void Arm64Assembler::Copy(FrameOffset /*dst*/, Offset /*dest_offset*/,
476 FrameOffset /*src*/, Offset /*src_offset*/,
477 ManagedRegister /*scratch*/, size_t /*size*/) {
478 UNIMPLEMENTED(FATAL) << "Unimplemented Copy() variant";
479}
480
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700481void Arm64Assembler::MemoryBarrier(ManagedRegister m_scratch ATTRIBUTE_UNUSED) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000482 // TODO: Should we check that m_scratch is IP? - see arm.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100483 ___ Dmb(InnerShareable, BarrierAll);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000484}
485
Andreas Gamped1104322014-05-01 14:38:56 -0700486void Arm64Assembler::SignExtend(ManagedRegister mreg, size_t size) {
487 Arm64ManagedRegister reg = mreg.AsArm64();
488 CHECK(size == 1 || size == 2) << size;
489 CHECK(reg.IsWRegister()) << reg;
490 if (size == 1) {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000491 ___ Sxtb(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
Andreas Gamped1104322014-05-01 14:38:56 -0700492 } else {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000493 ___ Sxth(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
Andreas Gamped1104322014-05-01 14:38:56 -0700494 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000495}
496
Andreas Gamped1104322014-05-01 14:38:56 -0700497void Arm64Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
498 Arm64ManagedRegister reg = mreg.AsArm64();
499 CHECK(size == 1 || size == 2) << size;
500 CHECK(reg.IsWRegister()) << reg;
501 if (size == 1) {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000502 ___ Uxtb(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
Andreas Gamped1104322014-05-01 14:38:56 -0700503 } else {
Serban Constantinescu32f5b4d2014-11-25 20:05:46 +0000504 ___ Uxth(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
Andreas Gamped1104322014-05-01 14:38:56 -0700505 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000506}
507
508void Arm64Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
509 // TODO: not validating references.
510}
511
512void Arm64Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
513 // TODO: not validating references.
514}
515
516void Arm64Assembler::Call(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
517 Arm64ManagedRegister base = m_base.AsArm64();
518 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100519 CHECK(base.IsXRegister()) << base;
520 CHECK(scratch.IsXRegister()) << scratch;
521 LoadFromOffset(scratch.AsXRegister(), base.AsXRegister(), offs.Int32Value());
522 ___ Blr(reg_x(scratch.AsXRegister()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000523}
524
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700525void Arm64Assembler::JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
526 Arm64ManagedRegister base = m_base.AsArm64();
527 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100528 CHECK(base.IsXRegister()) << base;
529 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100530 // Remove base and scratch form the temp list - higher level API uses IP1, IP0.
Alexandre Rames087930f2016-08-02 13:45:28 +0100531 UseScratchRegisterScope temps(&vixl_masm_);
Alexandre Rames37c92df2014-10-17 14:35:27 +0100532 temps.Exclude(reg_x(base.AsXRegister()), reg_x(scratch.AsXRegister()));
533 ___ Ldr(reg_x(scratch.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value()));
534 ___ Br(reg_x(scratch.AsXRegister()));
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700535}
536
Serban Constantinescued8dd492014-02-11 14:15:10 +0000537void Arm64Assembler::Call(FrameOffset base, Offset offs, ManagedRegister m_scratch) {
538 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100539 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000540 // Call *(*(SP + base) + offset)
Mathieu Chartiere401d142015-04-22 13:56:20 -0700541 LoadFromOffset(scratch.AsXRegister(), SP, base.Int32Value());
Alexandre Rames37c92df2014-10-17 14:35:27 +0100542 LoadFromOffset(scratch.AsXRegister(), scratch.AsXRegister(), offs.Int32Value());
543 ___ Blr(reg_x(scratch.AsXRegister()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000544}
545
Andreas Gampe542451c2016-07-26 09:02:02 -0700546void Arm64Assembler::CallFromThread64(ThreadOffset64 offset ATTRIBUTE_UNUSED,
547 ManagedRegister scratch ATTRIBUTE_UNUSED) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000548 UNIMPLEMENTED(FATAL) << "Unimplemented Call() variant";
549}
550
Mathieu Chartiere401d142015-04-22 13:56:20 -0700551void Arm64Assembler::CreateHandleScopeEntry(
552 ManagedRegister m_out_reg, FrameOffset handle_scope_offs, ManagedRegister m_in_reg,
553 bool null_allowed) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000554 Arm64ManagedRegister out_reg = m_out_reg.AsArm64();
555 Arm64ManagedRegister in_reg = m_in_reg.AsArm64();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700556 // For now we only hold stale handle scope entries in x registers.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100557 CHECK(in_reg.IsNoRegister() || in_reg.IsXRegister()) << in_reg;
558 CHECK(out_reg.IsXRegister()) << out_reg;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000559 if (null_allowed) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700560 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
561 // the address in the handle scope holding the reference.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000562 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
563 if (in_reg.IsNoRegister()) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100564 LoadWFromOffset(kLoadWord, out_reg.AsOverlappingWRegister(), SP,
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700565 handle_scope_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000566 in_reg = out_reg;
567 }
Alexandre Rames37c92df2014-10-17 14:35:27 +0100568 ___ Cmp(reg_w(in_reg.AsOverlappingWRegister()), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000569 if (!out_reg.Equals(in_reg)) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100570 LoadImmediate(out_reg.AsXRegister(), 0, eq);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000571 }
Alexandre Rames37c92df2014-10-17 14:35:27 +0100572 AddConstant(out_reg.AsXRegister(), SP, handle_scope_offs.Int32Value(), ne);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000573 } else {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100574 AddConstant(out_reg.AsXRegister(), SP, handle_scope_offs.Int32Value(), al);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000575 }
576}
577
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700578void Arm64Assembler::CreateHandleScopeEntry(FrameOffset out_off, FrameOffset handle_scope_offset,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700579 ManagedRegister m_scratch, bool null_allowed) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000580 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100581 CHECK(scratch.IsXRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000582 if (null_allowed) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100583 LoadWFromOffset(kLoadWord, scratch.AsOverlappingWRegister(), SP,
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700584 handle_scope_offset.Int32Value());
585 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
586 // the address in the handle scope holding the reference.
587 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
Alexandre Rames37c92df2014-10-17 14:35:27 +0100588 ___ Cmp(reg_w(scratch.AsOverlappingWRegister()), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000589 // Move this logic in add constants with flags.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100590 AddConstant(scratch.AsXRegister(), SP, handle_scope_offset.Int32Value(), ne);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000591 } else {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100592 AddConstant(scratch.AsXRegister(), SP, handle_scope_offset.Int32Value(), al);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000593 }
Alexandre Rames37c92df2014-10-17 14:35:27 +0100594 StoreToOffset(scratch.AsXRegister(), SP, out_off.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000595}
596
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700597void Arm64Assembler::LoadReferenceFromHandleScope(ManagedRegister m_out_reg,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700598 ManagedRegister m_in_reg) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000599 Arm64ManagedRegister out_reg = m_out_reg.AsArm64();
600 Arm64ManagedRegister in_reg = m_in_reg.AsArm64();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100601 CHECK(out_reg.IsXRegister()) << out_reg;
602 CHECK(in_reg.IsXRegister()) << in_reg;
Scott Wakeling97c72b72016-06-24 16:19:36 +0100603 vixl::aarch64::Label exit;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000604 if (!out_reg.Equals(in_reg)) {
605 // FIXME: Who sets the flags here?
Alexandre Rames37c92df2014-10-17 14:35:27 +0100606 LoadImmediate(out_reg.AsXRegister(), 0, eq);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000607 }
Alexandre Rames37c92df2014-10-17 14:35:27 +0100608 ___ Cbz(reg_x(in_reg.AsXRegister()), &exit);
609 LoadFromOffset(out_reg.AsXRegister(), in_reg.AsXRegister(), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000610 ___ Bind(&exit);
611}
612
613void Arm64Assembler::ExceptionPoll(ManagedRegister m_scratch, size_t stack_adjust) {
614 CHECK_ALIGNED(stack_adjust, kStackAlignment);
615 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100616 exception_blocks_.emplace_back(new Arm64Exception(scratch, stack_adjust));
Andreas Gampe542451c2016-07-26 09:02:02 -0700617 LoadFromOffset(scratch.AsXRegister(),
618 TR,
619 Thread::ExceptionOffset<kArm64PointerSize>().Int32Value());
Alexandre Ramesc01a6642016-04-15 11:54:06 +0100620 ___ Cbnz(reg_x(scratch.AsXRegister()), exception_blocks_.back()->Entry());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000621}
622
623void Arm64Assembler::EmitExceptionPoll(Arm64Exception *exception) {
Alexandre Rames087930f2016-08-02 13:45:28 +0100624 UseScratchRegisterScope temps(&vixl_masm_);
Alexandre Rames37c92df2014-10-17 14:35:27 +0100625 temps.Exclude(reg_x(exception->scratch_.AsXRegister()));
Scott Wakeling97c72b72016-06-24 16:19:36 +0100626 Register temp = temps.AcquireX();
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100627
628 // Bind exception poll entry.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000629 ___ Bind(exception->Entry());
630 if (exception->stack_adjust_ != 0) { // Fix up the frame.
631 DecreaseFrameSize(exception->stack_adjust_);
632 }
633 // Pass exception object as argument.
634 // Don't care about preserving X0 as this won't return.
Alexandre Rames37c92df2014-10-17 14:35:27 +0100635 ___ Mov(reg_x(X0), reg_x(exception->scratch_.AsXRegister()));
Andreas Gampe542451c2016-07-26 09:02:02 -0700636 ___ Ldr(temp,
637 MEM_OP(reg_x(TR),
638 QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, pDeliverException).Int32Value()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100639
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100640 ___ Blr(temp);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000641 // Call should never return.
642 ___ Brk();
643}
644
Zheng Xu69a50302015-04-14 20:04:41 +0800645static inline dwarf::Reg DWARFReg(CPURegister reg) {
646 if (reg.IsFPRegister()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100647 return dwarf::Reg::Arm64Fp(reg.GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +0800648 } else {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100649 DCHECK_LT(reg.GetCode(), 31u); // X0 - X30.
650 return dwarf::Reg::Arm64Core(reg.GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +0800651 }
David Srbeckydd973932015-04-07 20:29:48 +0100652}
653
Scott Wakeling97c72b72016-06-24 16:19:36 +0100654void Arm64Assembler::SpillRegisters(CPURegList registers, int offset) {
655 int size = registers.GetRegisterSizeInBytes();
Alexandre Rames087930f2016-08-02 13:45:28 +0100656 const Register sp = vixl_masm_.StackPointer();
Anton Kirilovbde6ae12016-06-10 17:46:12 +0100657 // Since we are operating on register pairs, we would like to align on
658 // double the standard size; on the other hand, we don't want to insert
659 // an extra store, which will happen if the number of registers is even.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100660 if (!IsAlignedParam(offset, 2 * size) && registers.GetCount() % 2 != 0) {
Anton Kirilovbde6ae12016-06-10 17:46:12 +0100661 const CPURegister& dst0 = registers.PopLowestIndex();
662 ___ Str(dst0, MemOperand(sp, offset));
663 cfi_.RelOffset(DWARFReg(dst0), offset);
664 offset += size;
665 }
Scott Wakeling97c72b72016-06-24 16:19:36 +0100666 while (registers.GetCount() >= 2) {
Zheng Xu69a50302015-04-14 20:04:41 +0800667 const CPURegister& dst0 = registers.PopLowestIndex();
668 const CPURegister& dst1 = registers.PopLowestIndex();
669 ___ Stp(dst0, dst1, MemOperand(sp, offset));
670 cfi_.RelOffset(DWARFReg(dst0), offset);
671 cfi_.RelOffset(DWARFReg(dst1), offset + size);
672 offset += 2 * size;
673 }
674 if (!registers.IsEmpty()) {
675 const CPURegister& dst0 = registers.PopLowestIndex();
676 ___ Str(dst0, MemOperand(sp, offset));
677 cfi_.RelOffset(DWARFReg(dst0), offset);
678 }
679 DCHECK(registers.IsEmpty());
David Srbeckydd973932015-04-07 20:29:48 +0100680}
681
Scott Wakeling97c72b72016-06-24 16:19:36 +0100682void Arm64Assembler::UnspillRegisters(CPURegList registers, int offset) {
683 int size = registers.GetRegisterSizeInBytes();
Alexandre Rames087930f2016-08-02 13:45:28 +0100684 const Register sp = vixl_masm_.StackPointer();
Anton Kirilovbde6ae12016-06-10 17:46:12 +0100685 // Be consistent with the logic for spilling registers.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100686 if (!IsAlignedParam(offset, 2 * size) && registers.GetCount() % 2 != 0) {
Anton Kirilovbde6ae12016-06-10 17:46:12 +0100687 const CPURegister& dst0 = registers.PopLowestIndex();
688 ___ Ldr(dst0, MemOperand(sp, offset));
689 cfi_.Restore(DWARFReg(dst0));
690 offset += size;
691 }
Scott Wakeling97c72b72016-06-24 16:19:36 +0100692 while (registers.GetCount() >= 2) {
Zheng Xu69a50302015-04-14 20:04:41 +0800693 const CPURegister& dst0 = registers.PopLowestIndex();
694 const CPURegister& dst1 = registers.PopLowestIndex();
695 ___ Ldp(dst0, dst1, MemOperand(sp, offset));
696 cfi_.Restore(DWARFReg(dst0));
697 cfi_.Restore(DWARFReg(dst1));
698 offset += 2 * size;
699 }
700 if (!registers.IsEmpty()) {
701 const CPURegister& dst0 = registers.PopLowestIndex();
702 ___ Ldr(dst0, MemOperand(sp, offset));
703 cfi_.Restore(DWARFReg(dst0));
704 }
705 DCHECK(registers.IsEmpty());
706}
Ian Rogers790a6b72014-04-01 10:36:00 -0700707
Vladimir Marko32248382016-05-19 10:37:24 +0100708void Arm64Assembler::BuildFrame(size_t frame_size,
709 ManagedRegister method_reg,
710 ArrayRef<const ManagedRegister> callee_save_regs,
Zheng Xu69a50302015-04-14 20:04:41 +0800711 const ManagedRegisterEntrySpills& entry_spills) {
712 // Setup VIXL CPURegList for callee-saves.
713 CPURegList core_reg_list(CPURegister::kRegister, kXRegSize, 0);
714 CPURegList fp_reg_list(CPURegister::kFPRegister, kDRegSize, 0);
715 for (auto r : callee_save_regs) {
716 Arm64ManagedRegister reg = r.AsArm64();
717 if (reg.IsXRegister()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100718 core_reg_list.Combine(reg_x(reg.AsXRegister()).GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +0800719 } else {
720 DCHECK(reg.IsDRegister());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100721 fp_reg_list.Combine(reg_d(reg.AsDRegister()).GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +0800722 }
723 }
Scott Wakeling97c72b72016-06-24 16:19:36 +0100724 size_t core_reg_size = core_reg_list.GetTotalSizeInBytes();
725 size_t fp_reg_size = fp_reg_list.GetTotalSizeInBytes();
Serban Constantinescued8dd492014-02-11 14:15:10 +0000726
Zheng Xu69a50302015-04-14 20:04:41 +0800727 // Increase frame to required size.
728 DCHECK_ALIGNED(frame_size, kStackAlignment);
Andreas Gampe542451c2016-07-26 09:02:02 -0700729 DCHECK_GE(frame_size, core_reg_size + fp_reg_size + static_cast<size_t>(kArm64PointerSize));
Zheng Xub551fdc2014-07-25 11:49:42 +0800730 IncreaseFrameSize(frame_size);
731
Zheng Xu69a50302015-04-14 20:04:41 +0800732 // Save callee-saves.
733 SpillRegisters(core_reg_list, frame_size - core_reg_size);
734 SpillRegisters(fp_reg_list, frame_size - core_reg_size - fp_reg_size);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100735
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100736 DCHECK(core_reg_list.IncludesAliasOf(reg_x(TR)));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000737
Mathieu Chartiere401d142015-04-22 13:56:20 -0700738 // Write ArtMethod*
Zheng Xu69a50302015-04-14 20:04:41 +0800739 DCHECK(X0 == method_reg.AsArm64().AsXRegister());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700740 StoreToOffset(X0, SP, 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000741
Serban Constantinescu75b91132014-04-09 18:39:10 +0100742 // Write out entry spills
Andreas Gampe542451c2016-07-26 09:02:02 -0700743 int32_t offset = frame_size + static_cast<size_t>(kArm64PointerSize);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000744 for (size_t i = 0; i < entry_spills.size(); ++i) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100745 Arm64ManagedRegister reg = entry_spills.at(i).AsArm64();
746 if (reg.IsNoRegister()) {
747 // only increment stack offset.
748 ManagedRegisterSpill spill = entry_spills.at(i);
749 offset += spill.getSize();
Alexandre Rames37c92df2014-10-17 14:35:27 +0100750 } else if (reg.IsXRegister()) {
751 StoreToOffset(reg.AsXRegister(), SP, offset);
Serban Constantinescu75b91132014-04-09 18:39:10 +0100752 offset += 8;
753 } else if (reg.IsWRegister()) {
754 StoreWToOffset(kStoreWord, reg.AsWRegister(), SP, offset);
755 offset += 4;
756 } else if (reg.IsDRegister()) {
757 StoreDToOffset(reg.AsDRegister(), SP, offset);
758 offset += 8;
759 } else if (reg.IsSRegister()) {
760 StoreSToOffset(reg.AsSRegister(), SP, offset);
761 offset += 4;
762 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000763 }
764}
765
Zheng Xu69a50302015-04-14 20:04:41 +0800766void Arm64Assembler::RemoveFrame(size_t frame_size,
Vladimir Marko32248382016-05-19 10:37:24 +0100767 ArrayRef<const ManagedRegister> callee_save_regs) {
Zheng Xu69a50302015-04-14 20:04:41 +0800768 // Setup VIXL CPURegList for callee-saves.
769 CPURegList core_reg_list(CPURegister::kRegister, kXRegSize, 0);
770 CPURegList fp_reg_list(CPURegister::kFPRegister, kDRegSize, 0);
771 for (auto r : callee_save_regs) {
772 Arm64ManagedRegister reg = r.AsArm64();
773 if (reg.IsXRegister()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100774 core_reg_list.Combine(reg_x(reg.AsXRegister()).GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +0800775 } else {
776 DCHECK(reg.IsDRegister());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100777 fp_reg_list.Combine(reg_d(reg.AsDRegister()).GetCode());
Zheng Xu69a50302015-04-14 20:04:41 +0800778 }
779 }
Scott Wakeling97c72b72016-06-24 16:19:36 +0100780 size_t core_reg_size = core_reg_list.GetTotalSizeInBytes();
781 size_t fp_reg_size = fp_reg_list.GetTotalSizeInBytes();
Serban Constantinescued8dd492014-02-11 14:15:10 +0000782
Zheng Xu69a50302015-04-14 20:04:41 +0800783 // For now we only check that the size of the frame is large enough to hold spills and method
784 // reference.
Andreas Gampe542451c2016-07-26 09:02:02 -0700785 DCHECK_GE(frame_size, core_reg_size + fp_reg_size + static_cast<size_t>(kArm64PointerSize));
Zheng Xu69a50302015-04-14 20:04:41 +0800786 DCHECK_ALIGNED(frame_size, kStackAlignment);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000787
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100788 DCHECK(core_reg_list.IncludesAliasOf(reg_x(TR)));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100789
Zheng Xu69a50302015-04-14 20:04:41 +0800790 cfi_.RememberState();
791
792 // Restore callee-saves.
793 UnspillRegisters(core_reg_list, frame_size - core_reg_size);
794 UnspillRegisters(fp_reg_list, frame_size - core_reg_size - fp_reg_size);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100795
Zheng Xub551fdc2014-07-25 11:49:42 +0800796 // Decrease frame size to start of callee saved regs.
797 DecreaseFrameSize(frame_size);
798
Serban Constantinescued8dd492014-02-11 14:15:10 +0000799 // Pop callee saved and return to LR.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000800 ___ Ret();
David Srbeckydd973932015-04-07 20:29:48 +0100801
802 // The CFI should be restored for any code that follows the exit block.
803 cfi_.RestoreState();
804 cfi_.DefCFAOffset(frame_size);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000805}
806
Scott Wakeling97c72b72016-06-24 16:19:36 +0100807void Arm64Assembler::PoisonHeapReference(Register reg) {
Roland Levillain4d027112015-07-01 15:41:14 +0100808 DCHECK(reg.IsW());
809 // reg = -reg.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100810 ___ Neg(reg, Operand(reg));
Roland Levillain4d027112015-07-01 15:41:14 +0100811}
812
Scott Wakeling97c72b72016-06-24 16:19:36 +0100813void Arm64Assembler::UnpoisonHeapReference(Register reg) {
Roland Levillain4d027112015-07-01 15:41:14 +0100814 DCHECK(reg.IsW());
815 // reg = -reg.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100816 ___ Neg(reg, Operand(reg));
Roland Levillain4d027112015-07-01 15:41:14 +0100817}
818
Scott Wakeling97c72b72016-06-24 16:19:36 +0100819void Arm64Assembler::MaybeUnpoisonHeapReference(Register reg) {
Roland Levillain4d027112015-07-01 15:41:14 +0100820 if (kPoisonHeapReferences) {
821 UnpoisonHeapReference(reg);
822 }
823}
824
825#undef ___
826
Serban Constantinescued8dd492014-02-11 14:15:10 +0000827} // namespace arm64
828} // namespace art