blob: 3edf59be2a01e4d4a51f5f2ac95f237efd5cb897 [file] [log] [blame]
Serban Constantinescued8dd492014-02-11 14:15:10 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_arm64.h"
18#include "base/logging.h"
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "offsets.h"
21#include "thread.h"
22#include "utils.h"
23
Alexandre Ramesba9388c2014-08-22 14:08:36 +010024using namespace vixl; // NOLINT(build/namespaces)
25
Serban Constantinescued8dd492014-02-11 14:15:10 +000026namespace art {
27namespace arm64 {
28
29#ifdef ___
30#error "ARM64 Assembler macro already defined."
31#else
32#define ___ vixl_masm_->
33#endif
34
35void Arm64Assembler::EmitSlowPaths() {
36 if (!exception_blocks_.empty()) {
37 for (size_t i = 0; i < exception_blocks_.size(); i++) {
38 EmitExceptionPoll(exception_blocks_.at(i));
39 }
40 }
41 ___ FinalizeCode();
42}
43
44size_t Arm64Assembler::CodeSize() const {
45 return ___ SizeOfCodeGenerated();
46}
47
48void Arm64Assembler::FinalizeInstructions(const MemoryRegion& region) {
49 // Copy the instructions from the buffer.
50 MemoryRegion from(reinterpret_cast<void*>(vixl_buf_), CodeSize());
51 region.CopyFrom(0, from);
52}
53
54void Arm64Assembler::GetCurrentThread(ManagedRegister tr) {
Serban Constantinescu63206f32014-05-07 18:40:49 +010055 ___ Mov(reg_x(tr.AsArm64().AsCoreRegister()), reg_x(ETR));
Serban Constantinescued8dd492014-02-11 14:15:10 +000056}
57
58void Arm64Assembler::GetCurrentThread(FrameOffset offset, ManagedRegister /* scratch */) {
Serban Constantinescu63206f32014-05-07 18:40:49 +010059 StoreToOffset(ETR, SP, offset.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +000060}
61
62// See Arm64 PCS Section 5.2.2.1.
63void Arm64Assembler::IncreaseFrameSize(size_t adjust) {
64 CHECK_ALIGNED(adjust, kStackAlignment);
65 AddConstant(SP, -adjust);
66}
67
68// See Arm64 PCS Section 5.2.2.1.
69void Arm64Assembler::DecreaseFrameSize(size_t adjust) {
70 CHECK_ALIGNED(adjust, kStackAlignment);
71 AddConstant(SP, adjust);
72}
73
74void Arm64Assembler::AddConstant(Register rd, int32_t value, Condition cond) {
75 AddConstant(rd, rd, value, cond);
76}
77
78void Arm64Assembler::AddConstant(Register rd, Register rn, int32_t value,
79 Condition cond) {
Alexandre Ramesba9388c2014-08-22 14:08:36 +010080 if ((cond == al) || (cond == nv)) {
Serban Constantinescued8dd492014-02-11 14:15:10 +000081 // VIXL macro-assembler handles all variants.
82 ___ Add(reg_x(rd), reg_x(rn), value);
83 } else {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010084 // temp = rd + value
85 // rd = cond ? temp : rn
86 vixl::UseScratchRegisterScope temps(vixl_masm_);
87 temps.Exclude(reg_x(rd), reg_x(rn));
88 vixl::Register temp = temps.AcquireX();
89 ___ Add(temp, reg_x(rn), value);
Alexandre Ramesba9388c2014-08-22 14:08:36 +010090 ___ Csel(reg_x(rd), temp, reg_x(rd), cond);
Serban Constantinescued8dd492014-02-11 14:15:10 +000091 }
92}
93
94void Arm64Assembler::StoreWToOffset(StoreOperandType type, WRegister source,
95 Register base, int32_t offset) {
96 switch (type) {
97 case kStoreByte:
98 ___ Strb(reg_w(source), MEM_OP(reg_x(base), offset));
99 break;
100 case kStoreHalfword:
101 ___ Strh(reg_w(source), MEM_OP(reg_x(base), offset));
102 break;
103 case kStoreWord:
104 ___ Str(reg_w(source), MEM_OP(reg_x(base), offset));
105 break;
106 default:
107 LOG(FATAL) << "UNREACHABLE";
108 }
109}
110
111void Arm64Assembler::StoreToOffset(Register source, Register base, int32_t offset) {
112 CHECK_NE(source, SP);
113 ___ Str(reg_x(source), MEM_OP(reg_x(base), offset));
114}
115
116void Arm64Assembler::StoreSToOffset(SRegister source, Register base, int32_t offset) {
117 ___ Str(reg_s(source), MEM_OP(reg_x(base), offset));
118}
119
120void Arm64Assembler::StoreDToOffset(DRegister source, Register base, int32_t offset) {
121 ___ Str(reg_d(source), MEM_OP(reg_x(base), offset));
122}
123
124void Arm64Assembler::Store(FrameOffset offs, ManagedRegister m_src, size_t size) {
125 Arm64ManagedRegister src = m_src.AsArm64();
126 if (src.IsNoRegister()) {
127 CHECK_EQ(0u, size);
128 } else if (src.IsWRegister()) {
129 CHECK_EQ(4u, size);
130 StoreWToOffset(kStoreWord, src.AsWRegister(), SP, offs.Int32Value());
131 } else if (src.IsCoreRegister()) {
132 CHECK_EQ(8u, size);
133 StoreToOffset(src.AsCoreRegister(), SP, offs.Int32Value());
134 } else if (src.IsSRegister()) {
135 StoreSToOffset(src.AsSRegister(), SP, offs.Int32Value());
136 } else {
137 CHECK(src.IsDRegister()) << src;
138 StoreDToOffset(src.AsDRegister(), SP, offs.Int32Value());
139 }
140}
141
142void Arm64Assembler::StoreRef(FrameOffset offs, ManagedRegister m_src) {
143 Arm64ManagedRegister src = m_src.AsArm64();
144 CHECK(src.IsCoreRegister()) << src;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100145 StoreWToOffset(kStoreWord, src.AsOverlappingCoreRegisterLow(), SP,
146 offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000147}
148
149void Arm64Assembler::StoreRawPtr(FrameOffset offs, ManagedRegister m_src) {
150 Arm64ManagedRegister src = m_src.AsArm64();
151 CHECK(src.IsCoreRegister()) << src;
152 StoreToOffset(src.AsCoreRegister(), SP, offs.Int32Value());
153}
154
155void Arm64Assembler::StoreImmediateToFrame(FrameOffset offs, uint32_t imm,
156 ManagedRegister m_scratch) {
157 Arm64ManagedRegister scratch = m_scratch.AsArm64();
158 CHECK(scratch.IsCoreRegister()) << scratch;
159 LoadImmediate(scratch.AsCoreRegister(), imm);
Serban Constantinescu75b91132014-04-09 18:39:10 +0100160 StoreWToOffset(kStoreWord, scratch.AsOverlappingCoreRegisterLow(), SP,
161 offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000162}
163
Serban Constantinescu75b91132014-04-09 18:39:10 +0100164void Arm64Assembler::StoreImmediateToThread64(ThreadOffset<8> offs, uint32_t imm,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000165 ManagedRegister m_scratch) {
166 Arm64ManagedRegister scratch = m_scratch.AsArm64();
167 CHECK(scratch.IsCoreRegister()) << scratch;
168 LoadImmediate(scratch.AsCoreRegister(), imm);
Serban Constantinescu63206f32014-05-07 18:40:49 +0100169 StoreToOffset(scratch.AsCoreRegister(), ETR, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000170}
171
Serban Constantinescu75b91132014-04-09 18:39:10 +0100172void Arm64Assembler::StoreStackOffsetToThread64(ThreadOffset<8> tr_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000173 FrameOffset fr_offs,
174 ManagedRegister m_scratch) {
175 Arm64ManagedRegister scratch = m_scratch.AsArm64();
176 CHECK(scratch.IsCoreRegister()) << scratch;
177 AddConstant(scratch.AsCoreRegister(), SP, fr_offs.Int32Value());
Serban Constantinescu63206f32014-05-07 18:40:49 +0100178 StoreToOffset(scratch.AsCoreRegister(), ETR, tr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000179}
180
Serban Constantinescu75b91132014-04-09 18:39:10 +0100181void Arm64Assembler::StoreStackPointerToThread64(ThreadOffset<8> tr_offs) {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100182 vixl::UseScratchRegisterScope temps(vixl_masm_);
183 vixl::Register temp = temps.AcquireX();
184 ___ Mov(temp, reg_x(SP));
185 ___ Str(temp, MEM_OP(reg_x(ETR), tr_offs.Int32Value()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000186}
187
188void Arm64Assembler::StoreSpanning(FrameOffset dest_off, ManagedRegister m_source,
189 FrameOffset in_off, ManagedRegister m_scratch) {
190 Arm64ManagedRegister source = m_source.AsArm64();
191 Arm64ManagedRegister scratch = m_scratch.AsArm64();
192 StoreToOffset(source.AsCoreRegister(), SP, dest_off.Int32Value());
193 LoadFromOffset(scratch.AsCoreRegister(), SP, in_off.Int32Value());
194 StoreToOffset(scratch.AsCoreRegister(), SP, dest_off.Int32Value() + 8);
195}
196
197// Load routines.
198void Arm64Assembler::LoadImmediate(Register dest, int32_t value,
199 Condition cond) {
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100200 if ((cond == al) || (cond == nv)) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000201 ___ Mov(reg_x(dest), value);
202 } else {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100203 // temp = value
204 // rd = cond ? temp : rd
Serban Constantinescued8dd492014-02-11 14:15:10 +0000205 if (value != 0) {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100206 vixl::UseScratchRegisterScope temps(vixl_masm_);
207 temps.Exclude(reg_x(dest));
208 vixl::Register temp = temps.AcquireX();
209 ___ Mov(temp, value);
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100210 ___ Csel(reg_x(dest), temp, reg_x(dest), cond);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000211 } else {
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100212 ___ Csel(reg_x(dest), reg_x(XZR), reg_x(dest), cond);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000213 }
214 }
215}
216
217void Arm64Assembler::LoadWFromOffset(LoadOperandType type, WRegister dest,
218 Register base, int32_t offset) {
219 switch (type) {
220 case kLoadSignedByte:
221 ___ Ldrsb(reg_w(dest), MEM_OP(reg_x(base), offset));
222 break;
223 case kLoadSignedHalfword:
224 ___ Ldrsh(reg_w(dest), MEM_OP(reg_x(base), offset));
225 break;
226 case kLoadUnsignedByte:
227 ___ Ldrb(reg_w(dest), MEM_OP(reg_x(base), offset));
228 break;
229 case kLoadUnsignedHalfword:
230 ___ Ldrh(reg_w(dest), MEM_OP(reg_x(base), offset));
231 break;
232 case kLoadWord:
233 ___ Ldr(reg_w(dest), MEM_OP(reg_x(base), offset));
234 break;
235 default:
236 LOG(FATAL) << "UNREACHABLE";
237 }
238}
239
240// Note: We can extend this member by adding load type info - see
241// sign extended A64 load variants.
242void Arm64Assembler::LoadFromOffset(Register dest, Register base,
243 int32_t offset) {
244 CHECK_NE(dest, SP);
245 ___ Ldr(reg_x(dest), MEM_OP(reg_x(base), offset));
246}
247
248void Arm64Assembler::LoadSFromOffset(SRegister dest, Register base,
249 int32_t offset) {
250 ___ Ldr(reg_s(dest), MEM_OP(reg_x(base), offset));
251}
252
253void Arm64Assembler::LoadDFromOffset(DRegister dest, Register base,
254 int32_t offset) {
255 ___ Ldr(reg_d(dest), MEM_OP(reg_x(base), offset));
256}
257
258void Arm64Assembler::Load(Arm64ManagedRegister dest, Register base,
259 int32_t offset, size_t size) {
260 if (dest.IsNoRegister()) {
261 CHECK_EQ(0u, size) << dest;
262 } else if (dest.IsWRegister()) {
263 CHECK_EQ(4u, size) << dest;
264 ___ Ldr(reg_w(dest.AsWRegister()), MEM_OP(reg_x(base), offset));
265 } else if (dest.IsCoreRegister()) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000266 CHECK_NE(dest.AsCoreRegister(), SP) << dest;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100267 if (size == 4u) {
268 ___ Ldr(reg_w(dest.AsOverlappingCoreRegisterLow()), MEM_OP(reg_x(base), offset));
269 } else {
270 CHECK_EQ(8u, size) << dest;
271 ___ Ldr(reg_x(dest.AsCoreRegister()), MEM_OP(reg_x(base), offset));
272 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000273 } else if (dest.IsSRegister()) {
274 ___ Ldr(reg_s(dest.AsSRegister()), MEM_OP(reg_x(base), offset));
275 } else {
276 CHECK(dest.IsDRegister()) << dest;
277 ___ Ldr(reg_d(dest.AsDRegister()), MEM_OP(reg_x(base), offset));
278 }
279}
280
281void Arm64Assembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
282 return Load(m_dst.AsArm64(), SP, src.Int32Value(), size);
283}
284
Serban Constantinescu75b91132014-04-09 18:39:10 +0100285void Arm64Assembler::LoadFromThread64(ManagedRegister m_dst, ThreadOffset<8> src, size_t size) {
Serban Constantinescu63206f32014-05-07 18:40:49 +0100286 return Load(m_dst.AsArm64(), ETR, src.Int32Value(), size);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000287}
288
289void Arm64Assembler::LoadRef(ManagedRegister m_dst, FrameOffset offs) {
290 Arm64ManagedRegister dst = m_dst.AsArm64();
291 CHECK(dst.IsCoreRegister()) << dst;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100292 LoadWFromOffset(kLoadWord, dst.AsOverlappingCoreRegisterLow(), SP, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000293}
294
295void Arm64Assembler::LoadRef(ManagedRegister m_dst, ManagedRegister m_base,
296 MemberOffset offs) {
297 Arm64ManagedRegister dst = m_dst.AsArm64();
298 Arm64ManagedRegister base = m_base.AsArm64();
299 CHECK(dst.IsCoreRegister() && base.IsCoreRegister());
Serban Constantinescu75b91132014-04-09 18:39:10 +0100300 LoadWFromOffset(kLoadWord, dst.AsOverlappingCoreRegisterLow(), base.AsCoreRegister(),
301 offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000302}
303
304void Arm64Assembler::LoadRawPtr(ManagedRegister m_dst, ManagedRegister m_base, Offset offs) {
305 Arm64ManagedRegister dst = m_dst.AsArm64();
306 Arm64ManagedRegister base = m_base.AsArm64();
307 CHECK(dst.IsCoreRegister() && base.IsCoreRegister());
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100308 // Remove dst and base form the temp list - higher level API uses IP1, IP0.
309 vixl::UseScratchRegisterScope temps(vixl_masm_);
310 temps.Exclude(reg_x(dst.AsCoreRegister()), reg_x(base.AsCoreRegister()));
311 ___ Ldr(reg_x(dst.AsCoreRegister()), MEM_OP(reg_x(base.AsCoreRegister()), offs.Int32Value()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000312}
313
Serban Constantinescu75b91132014-04-09 18:39:10 +0100314void Arm64Assembler::LoadRawPtrFromThread64(ManagedRegister m_dst, ThreadOffset<8> offs) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000315 Arm64ManagedRegister dst = m_dst.AsArm64();
316 CHECK(dst.IsCoreRegister()) << dst;
Serban Constantinescu63206f32014-05-07 18:40:49 +0100317 LoadFromOffset(dst.AsCoreRegister(), ETR, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000318}
319
320// Copying routines.
321void Arm64Assembler::Move(ManagedRegister m_dst, ManagedRegister m_src, size_t size) {
322 Arm64ManagedRegister dst = m_dst.AsArm64();
323 Arm64ManagedRegister src = m_src.AsArm64();
324 if (!dst.Equals(src)) {
325 if (dst.IsCoreRegister()) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100326 if (size == 4) {
327 CHECK(src.IsWRegister());
328 ___ Mov(reg_x(dst.AsCoreRegister()), reg_w(src.AsWRegister()));
329 } else {
330 if (src.IsCoreRegister()) {
331 ___ Mov(reg_x(dst.AsCoreRegister()), reg_x(src.AsCoreRegister()));
332 } else {
333 ___ Mov(reg_x(dst.AsCoreRegister()), reg_w(src.AsWRegister()));
334 }
335 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000336 } else if (dst.IsWRegister()) {
337 CHECK(src.IsWRegister()) << src;
338 ___ Mov(reg_w(dst.AsWRegister()), reg_w(src.AsWRegister()));
339 } else if (dst.IsSRegister()) {
340 CHECK(src.IsSRegister()) << src;
341 ___ Fmov(reg_s(dst.AsSRegister()), reg_s(src.AsSRegister()));
342 } else {
343 CHECK(dst.IsDRegister()) << dst;
344 CHECK(src.IsDRegister()) << src;
345 ___ Fmov(reg_d(dst.AsDRegister()), reg_d(src.AsDRegister()));
346 }
347 }
348}
349
Serban Constantinescu75b91132014-04-09 18:39:10 +0100350void Arm64Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs,
351 ThreadOffset<8> tr_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000352 ManagedRegister m_scratch) {
353 Arm64ManagedRegister scratch = m_scratch.AsArm64();
354 CHECK(scratch.IsCoreRegister()) << scratch;
Serban Constantinescu63206f32014-05-07 18:40:49 +0100355 LoadFromOffset(scratch.AsCoreRegister(), ETR, tr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000356 StoreToOffset(scratch.AsCoreRegister(), SP, fr_offs.Int32Value());
357}
358
Serban Constantinescu75b91132014-04-09 18:39:10 +0100359void Arm64Assembler::CopyRawPtrToThread64(ThreadOffset<8> tr_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000360 FrameOffset fr_offs,
361 ManagedRegister m_scratch) {
362 Arm64ManagedRegister scratch = m_scratch.AsArm64();
363 CHECK(scratch.IsCoreRegister()) << scratch;
364 LoadFromOffset(scratch.AsCoreRegister(), SP, fr_offs.Int32Value());
Serban Constantinescu63206f32014-05-07 18:40:49 +0100365 StoreToOffset(scratch.AsCoreRegister(), ETR, tr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000366}
367
368void Arm64Assembler::CopyRef(FrameOffset dest, FrameOffset src,
369 ManagedRegister m_scratch) {
370 Arm64ManagedRegister scratch = m_scratch.AsArm64();
371 CHECK(scratch.IsCoreRegister()) << scratch;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100372 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(),
373 SP, src.Int32Value());
374 StoreWToOffset(kStoreWord, scratch.AsOverlappingCoreRegisterLow(),
375 SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000376}
377
378void Arm64Assembler::Copy(FrameOffset dest, FrameOffset src,
379 ManagedRegister m_scratch, size_t size) {
380 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Serban Constantinescu75b91132014-04-09 18:39:10 +0100381 CHECK(scratch.IsCoreRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000382 CHECK(size == 4 || size == 8) << size;
383 if (size == 4) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100384 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(), SP, src.Int32Value());
385 StoreWToOffset(kStoreWord, scratch.AsOverlappingCoreRegisterLow(), SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000386 } else if (size == 8) {
387 LoadFromOffset(scratch.AsCoreRegister(), SP, src.Int32Value());
388 StoreToOffset(scratch.AsCoreRegister(), SP, dest.Int32Value());
389 } else {
390 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
391 }
392}
393
394void Arm64Assembler::Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset,
395 ManagedRegister m_scratch, size_t size) {
396 Arm64ManagedRegister scratch = m_scratch.AsArm64();
397 Arm64ManagedRegister base = src_base.AsArm64();
398 CHECK(base.IsCoreRegister()) << base;
399 CHECK(scratch.IsCoreRegister() || scratch.IsWRegister()) << scratch;
400 CHECK(size == 4 || size == 8) << size;
401 if (size == 4) {
402 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), base.AsCoreRegister(),
403 src_offset.Int32Value());
404 StoreWToOffset(kStoreWord, scratch.AsWRegister(), SP, dest.Int32Value());
405 } else if (size == 8) {
406 LoadFromOffset(scratch.AsCoreRegister(), base.AsCoreRegister(), src_offset.Int32Value());
407 StoreToOffset(scratch.AsCoreRegister(), SP, dest.Int32Value());
408 } else {
409 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
410 }
411}
412
413void Arm64Assembler::Copy(ManagedRegister m_dest_base, Offset dest_offs, FrameOffset src,
414 ManagedRegister m_scratch, size_t size) {
415 Arm64ManagedRegister scratch = m_scratch.AsArm64();
416 Arm64ManagedRegister base = m_dest_base.AsArm64();
417 CHECK(base.IsCoreRegister()) << base;
418 CHECK(scratch.IsCoreRegister() || scratch.IsWRegister()) << scratch;
419 CHECK(size == 4 || size == 8) << size;
420 if (size == 4) {
421 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), SP, src.Int32Value());
422 StoreWToOffset(kStoreWord, scratch.AsWRegister(), base.AsCoreRegister(),
423 dest_offs.Int32Value());
424 } else if (size == 8) {
425 LoadFromOffset(scratch.AsCoreRegister(), SP, src.Int32Value());
426 StoreToOffset(scratch.AsCoreRegister(), base.AsCoreRegister(), dest_offs.Int32Value());
427 } else {
428 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
429 }
430}
431
432void Arm64Assembler::Copy(FrameOffset /*dst*/, FrameOffset /*src_base*/, Offset /*src_offset*/,
433 ManagedRegister /*mscratch*/, size_t /*size*/) {
434 UNIMPLEMENTED(FATAL) << "Unimplemented Copy() variant";
435}
436
437void Arm64Assembler::Copy(ManagedRegister m_dest, Offset dest_offset,
438 ManagedRegister m_src, Offset src_offset,
439 ManagedRegister m_scratch, size_t size) {
440 Arm64ManagedRegister scratch = m_scratch.AsArm64();
441 Arm64ManagedRegister src = m_src.AsArm64();
442 Arm64ManagedRegister dest = m_dest.AsArm64();
443 CHECK(dest.IsCoreRegister()) << dest;
444 CHECK(src.IsCoreRegister()) << src;
445 CHECK(scratch.IsCoreRegister() || scratch.IsWRegister()) << scratch;
446 CHECK(size == 4 || size == 8) << size;
447 if (size == 4) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100448 if (scratch.IsWRegister()) {
449 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), src.AsCoreRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000450 src_offset.Int32Value());
Serban Constantinescu75b91132014-04-09 18:39:10 +0100451 StoreWToOffset(kStoreWord, scratch.AsWRegister(), dest.AsCoreRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000452 dest_offset.Int32Value());
Serban Constantinescu75b91132014-04-09 18:39:10 +0100453 } else {
454 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(), src.AsCoreRegister(),
455 src_offset.Int32Value());
456 StoreWToOffset(kStoreWord, scratch.AsOverlappingCoreRegisterLow(), dest.AsCoreRegister(),
457 dest_offset.Int32Value());
458 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000459 } else if (size == 8) {
460 LoadFromOffset(scratch.AsCoreRegister(), src.AsCoreRegister(), src_offset.Int32Value());
461 StoreToOffset(scratch.AsCoreRegister(), dest.AsCoreRegister(), dest_offset.Int32Value());
462 } else {
463 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
464 }
465}
466
467void Arm64Assembler::Copy(FrameOffset /*dst*/, Offset /*dest_offset*/,
468 FrameOffset /*src*/, Offset /*src_offset*/,
469 ManagedRegister /*scratch*/, size_t /*size*/) {
470 UNIMPLEMENTED(FATAL) << "Unimplemented Copy() variant";
471}
472
473void Arm64Assembler::MemoryBarrier(ManagedRegister m_scratch) {
474 // TODO: Should we check that m_scratch is IP? - see arm.
475#if ANDROID_SMP != 0
476 ___ Dmb(vixl::InnerShareable, vixl::BarrierAll);
477#endif
478}
479
Andreas Gamped1104322014-05-01 14:38:56 -0700480void Arm64Assembler::SignExtend(ManagedRegister mreg, size_t size) {
481 Arm64ManagedRegister reg = mreg.AsArm64();
482 CHECK(size == 1 || size == 2) << size;
483 CHECK(reg.IsWRegister()) << reg;
484 if (size == 1) {
485 ___ sxtb(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
486 } else {
487 ___ sxth(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
488 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000489}
490
Andreas Gamped1104322014-05-01 14:38:56 -0700491void Arm64Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
492 Arm64ManagedRegister reg = mreg.AsArm64();
493 CHECK(size == 1 || size == 2) << size;
494 CHECK(reg.IsWRegister()) << reg;
495 if (size == 1) {
496 ___ uxtb(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
497 } else {
498 ___ uxth(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
499 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000500}
501
502void Arm64Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
503 // TODO: not validating references.
504}
505
506void Arm64Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
507 // TODO: not validating references.
508}
509
510void Arm64Assembler::Call(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
511 Arm64ManagedRegister base = m_base.AsArm64();
512 Arm64ManagedRegister scratch = m_scratch.AsArm64();
513 CHECK(base.IsCoreRegister()) << base;
514 CHECK(scratch.IsCoreRegister()) << scratch;
515 LoadFromOffset(scratch.AsCoreRegister(), base.AsCoreRegister(), offs.Int32Value());
516 ___ Blr(reg_x(scratch.AsCoreRegister()));
517}
518
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700519void Arm64Assembler::JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
520 Arm64ManagedRegister base = m_base.AsArm64();
521 Arm64ManagedRegister scratch = m_scratch.AsArm64();
522 CHECK(base.IsCoreRegister()) << base;
523 CHECK(scratch.IsCoreRegister()) << scratch;
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100524 // Remove base and scratch form the temp list - higher level API uses IP1, IP0.
525 vixl::UseScratchRegisterScope temps(vixl_masm_);
526 temps.Exclude(reg_x(base.AsCoreRegister()), reg_x(scratch.AsCoreRegister()));
527 ___ Ldr(reg_x(scratch.AsCoreRegister()), MEM_OP(reg_x(base.AsCoreRegister()), offs.Int32Value()));
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700528 ___ Br(reg_x(scratch.AsCoreRegister()));
529}
530
Serban Constantinescued8dd492014-02-11 14:15:10 +0000531void Arm64Assembler::Call(FrameOffset base, Offset offs, ManagedRegister m_scratch) {
532 Arm64ManagedRegister scratch = m_scratch.AsArm64();
533 CHECK(scratch.IsCoreRegister()) << scratch;
534 // Call *(*(SP + base) + offset)
Andreas Gampecf4035a2014-05-28 22:43:01 -0700535 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(), SP, base.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000536 LoadFromOffset(scratch.AsCoreRegister(), scratch.AsCoreRegister(), offs.Int32Value());
537 ___ Blr(reg_x(scratch.AsCoreRegister()));
538}
539
Serban Constantinescu75b91132014-04-09 18:39:10 +0100540void Arm64Assembler::CallFromThread64(ThreadOffset<8> /*offset*/, ManagedRegister /*scratch*/) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000541 UNIMPLEMENTED(FATAL) << "Unimplemented Call() variant";
542}
543
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700544void Arm64Assembler::CreateHandleScopeEntry(ManagedRegister m_out_reg, FrameOffset handle_scope_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000545 ManagedRegister m_in_reg, bool null_allowed) {
546 Arm64ManagedRegister out_reg = m_out_reg.AsArm64();
547 Arm64ManagedRegister in_reg = m_in_reg.AsArm64();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700548 // For now we only hold stale handle scope entries in x registers.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000549 CHECK(in_reg.IsNoRegister() || in_reg.IsCoreRegister()) << in_reg;
550 CHECK(out_reg.IsCoreRegister()) << out_reg;
551 if (null_allowed) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700552 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
553 // the address in the handle scope holding the reference.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000554 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
555 if (in_reg.IsNoRegister()) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100556 LoadWFromOffset(kLoadWord, out_reg.AsOverlappingCoreRegisterLow(), SP,
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700557 handle_scope_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000558 in_reg = out_reg;
559 }
Serban Constantinescu75b91132014-04-09 18:39:10 +0100560 ___ Cmp(reg_w(in_reg.AsOverlappingCoreRegisterLow()), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000561 if (!out_reg.Equals(in_reg)) {
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100562 LoadImmediate(out_reg.AsCoreRegister(), 0, eq);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000563 }
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100564 AddConstant(out_reg.AsCoreRegister(), SP, handle_scope_offs.Int32Value(), ne);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000565 } else {
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100566 AddConstant(out_reg.AsCoreRegister(), SP, handle_scope_offs.Int32Value(), al);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000567 }
568}
569
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700570void Arm64Assembler::CreateHandleScopeEntry(FrameOffset out_off, FrameOffset handle_scope_offset,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000571 ManagedRegister m_scratch, bool null_allowed) {
572 Arm64ManagedRegister scratch = m_scratch.AsArm64();
573 CHECK(scratch.IsCoreRegister()) << scratch;
574 if (null_allowed) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100575 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(), SP,
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700576 handle_scope_offset.Int32Value());
577 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
578 // the address in the handle scope holding the reference.
579 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
Serban Constantinescu75b91132014-04-09 18:39:10 +0100580 ___ Cmp(reg_w(scratch.AsOverlappingCoreRegisterLow()), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000581 // Move this logic in add constants with flags.
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100582 AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), ne);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000583 } else {
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100584 AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), al);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000585 }
586 StoreToOffset(scratch.AsCoreRegister(), SP, out_off.Int32Value());
587}
588
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700589void Arm64Assembler::LoadReferenceFromHandleScope(ManagedRegister m_out_reg,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000590 ManagedRegister m_in_reg) {
591 Arm64ManagedRegister out_reg = m_out_reg.AsArm64();
592 Arm64ManagedRegister in_reg = m_in_reg.AsArm64();
593 CHECK(out_reg.IsCoreRegister()) << out_reg;
594 CHECK(in_reg.IsCoreRegister()) << in_reg;
595 vixl::Label exit;
596 if (!out_reg.Equals(in_reg)) {
597 // FIXME: Who sets the flags here?
Alexandre Ramesba9388c2014-08-22 14:08:36 +0100598 LoadImmediate(out_reg.AsCoreRegister(), 0, eq);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000599 }
Serban Constantinescu09419242014-05-08 14:31:41 +0100600 ___ Cbz(reg_x(in_reg.AsCoreRegister()), &exit);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000601 LoadFromOffset(out_reg.AsCoreRegister(), in_reg.AsCoreRegister(), 0);
602 ___ Bind(&exit);
603}
604
605void Arm64Assembler::ExceptionPoll(ManagedRegister m_scratch, size_t stack_adjust) {
606 CHECK_ALIGNED(stack_adjust, kStackAlignment);
607 Arm64ManagedRegister scratch = m_scratch.AsArm64();
608 Arm64Exception *current_exception = new Arm64Exception(scratch, stack_adjust);
609 exception_blocks_.push_back(current_exception);
Serban Constantinescu63206f32014-05-07 18:40:49 +0100610 LoadFromOffset(scratch.AsCoreRegister(), ETR, Thread::ExceptionOffset<8>().Int32Value());
Serban Constantinescu09419242014-05-08 14:31:41 +0100611 ___ Cbnz(reg_x(scratch.AsCoreRegister()), current_exception->Entry());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000612}
613
614void Arm64Assembler::EmitExceptionPoll(Arm64Exception *exception) {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100615 vixl::UseScratchRegisterScope temps(vixl_masm_);
616 temps.Exclude(reg_x(exception->scratch_.AsCoreRegister()));
617 vixl::Register temp = temps.AcquireX();
618
619 // Bind exception poll entry.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000620 ___ Bind(exception->Entry());
621 if (exception->stack_adjust_ != 0) { // Fix up the frame.
622 DecreaseFrameSize(exception->stack_adjust_);
623 }
624 // Pass exception object as argument.
625 // Don't care about preserving X0 as this won't return.
626 ___ Mov(reg_x(X0), reg_x(exception->scratch_.AsCoreRegister()));
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100627 ___ Ldr(temp, MEM_OP(reg_x(ETR), QUICK_ENTRYPOINT_OFFSET(8, pDeliverException).Int32Value()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100628
Serban Constantinescu63206f32014-05-07 18:40:49 +0100629 // Move ETR(Callee saved) back to TR(Caller saved) reg. We use ETR on calls
630 // to external functions that might trash TR. We do not need the original
Zheng Xub551fdc2014-07-25 11:49:42 +0800631 // ETR(X21) saved in BuildFrame().
Serban Constantinescu63206f32014-05-07 18:40:49 +0100632 ___ Mov(reg_x(TR), reg_x(ETR));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100633
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100634 ___ Blr(temp);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000635 // Call should never return.
636 ___ Brk();
637}
638
Ian Rogers790a6b72014-04-01 10:36:00 -0700639constexpr size_t kFramePointerSize = 8;
640
Serban Constantinescued8dd492014-02-11 14:15:10 +0000641void Arm64Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
642 const std::vector<ManagedRegister>& callee_save_regs,
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700643 const ManagedRegisterEntrySpills& entry_spills) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000644 CHECK_ALIGNED(frame_size, kStackAlignment);
645 CHECK(X0 == method_reg.AsArm64().AsCoreRegister());
646
647 // TODO: *create APCS FP - end of FP chain;
648 // *add support for saving a different set of callee regs.
Zheng Xub551fdc2014-07-25 11:49:42 +0800649 // For now we check that the size of callee regs vector is 11.
650 CHECK_EQ(callee_save_regs.size(), kJniRefSpillRegsSize);
Andreas Gampecf4035a2014-05-28 22:43:01 -0700651 // Increase frame to required size - must be at least space to push StackReference<Method>.
Zheng Xub551fdc2014-07-25 11:49:42 +0800652 CHECK_GT(frame_size, kJniRefSpillRegsSize * kFramePointerSize);
653 IncreaseFrameSize(frame_size);
654
655 // TODO: Ugly hard code...
656 // Should generate these according to the spill mask automatically.
657 // TUNING: Use stp.
658 // Note: Must match Arm64JniCallingConvention::CoreSpillMask().
659 size_t reg_offset = frame_size;
660 reg_offset -= 8;
661 StoreToOffset(LR, SP, reg_offset);
662 reg_offset -= 8;
663 StoreToOffset(X29, SP, reg_offset);
664 reg_offset -= 8;
665 StoreToOffset(X28, SP, reg_offset);
666 reg_offset -= 8;
667 StoreToOffset(X27, SP, reg_offset);
668 reg_offset -= 8;
669 StoreToOffset(X26, SP, reg_offset);
670 reg_offset -= 8;
671 StoreToOffset(X25, SP, reg_offset);
672 reg_offset -= 8;
673 StoreToOffset(X24, SP, reg_offset);
674 reg_offset -= 8;
675 StoreToOffset(X23, SP, reg_offset);
676 reg_offset -= 8;
677 StoreToOffset(X22, SP, reg_offset);
678 reg_offset -= 8;
679 StoreToOffset(X21, SP, reg_offset);
680 reg_offset -= 8;
681 StoreToOffset(X20, SP, reg_offset);
682
683 // Move TR(Caller saved) to ETR(Callee saved). The original (ETR)X21 has been saved on stack.
684 // This way we make sure that TR is not trashed by native code.
685 ___ Mov(reg_x(ETR), reg_x(TR));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000686
Andreas Gampecf4035a2014-05-28 22:43:01 -0700687 // Write StackReference<Method>.
688 DCHECK_EQ(4U, sizeof(StackReference<mirror::ArtMethod>));
689 StoreWToOffset(StoreOperandType::kStoreWord, W0, SP, 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000690
Serban Constantinescu75b91132014-04-09 18:39:10 +0100691 // Write out entry spills
Andreas Gampecf4035a2014-05-28 22:43:01 -0700692 int32_t offset = frame_size + sizeof(StackReference<mirror::ArtMethod>);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000693 for (size_t i = 0; i < entry_spills.size(); ++i) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100694 Arm64ManagedRegister reg = entry_spills.at(i).AsArm64();
695 if (reg.IsNoRegister()) {
696 // only increment stack offset.
697 ManagedRegisterSpill spill = entry_spills.at(i);
698 offset += spill.getSize();
699 } else if (reg.IsCoreRegister()) {
700 StoreToOffset(reg.AsCoreRegister(), SP, offset);
701 offset += 8;
702 } else if (reg.IsWRegister()) {
703 StoreWToOffset(kStoreWord, reg.AsWRegister(), SP, offset);
704 offset += 4;
705 } else if (reg.IsDRegister()) {
706 StoreDToOffset(reg.AsDRegister(), SP, offset);
707 offset += 8;
708 } else if (reg.IsSRegister()) {
709 StoreSToOffset(reg.AsSRegister(), SP, offset);
710 offset += 4;
711 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000712 }
713}
714
715void Arm64Assembler::RemoveFrame(size_t frame_size, const std::vector<ManagedRegister>& callee_save_regs) {
716 CHECK_ALIGNED(frame_size, kStackAlignment);
717
Zheng Xub551fdc2014-07-25 11:49:42 +0800718 // For now we only check that the size of the frame is greater than the spill size.
719 CHECK_EQ(callee_save_regs.size(), kJniRefSpillRegsSize);
720 CHECK_GT(frame_size, kJniRefSpillRegsSize * kFramePointerSize);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000721
Zheng Xub551fdc2014-07-25 11:49:42 +0800722 // We move ETR(aapcs64 callee saved) back to TR(aapcs64 caller saved) which might have
723 // been trashed in the native call. The original ETR(X21) is restored from stack.
Serban Constantinescu63206f32014-05-07 18:40:49 +0100724 ___ Mov(reg_x(TR), reg_x(ETR));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100725
Zheng Xub551fdc2014-07-25 11:49:42 +0800726 // TODO: Ugly hard code...
727 // Should generate these according to the spill mask automatically.
728 // TUNING: Use ldp.
729 // Note: Must match Arm64JniCallingConvention::CoreSpillMask().
730 size_t reg_offset = frame_size;
731 reg_offset -= 8;
732 LoadFromOffset(LR, SP, reg_offset);
733 reg_offset -= 8;
734 LoadFromOffset(X29, SP, reg_offset);
735 reg_offset -= 8;
736 LoadFromOffset(X28, SP, reg_offset);
737 reg_offset -= 8;
738 LoadFromOffset(X27, SP, reg_offset);
739 reg_offset -= 8;
740 LoadFromOffset(X26, SP, reg_offset);
741 reg_offset -= 8;
742 LoadFromOffset(X25, SP, reg_offset);
743 reg_offset -= 8;
744 LoadFromOffset(X24, SP, reg_offset);
745 reg_offset -= 8;
746 LoadFromOffset(X23, SP, reg_offset);
747 reg_offset -= 8;
748 LoadFromOffset(X22, SP, reg_offset);
749 reg_offset -= 8;
750 LoadFromOffset(X21, SP, reg_offset);
751 reg_offset -= 8;
752 LoadFromOffset(X20, SP, reg_offset);
753
754 // Decrease frame size to start of callee saved regs.
755 DecreaseFrameSize(frame_size);
756
Serban Constantinescued8dd492014-02-11 14:15:10 +0000757 // Pop callee saved and return to LR.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000758 ___ Ret();
759}
760
761} // namespace arm64
762} // namespace art