blob: 27188b2331ebd287b181cb96cc05e63c9d75a67f [file] [log] [blame]
Serban Constantinescued8dd492014-02-11 14:15:10 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "assembler_arm64.h"
18#include "base/logging.h"
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "offsets.h"
21#include "thread.h"
22#include "utils.h"
23
24namespace art {
25namespace arm64 {
26
27#ifdef ___
28#error "ARM64 Assembler macro already defined."
29#else
30#define ___ vixl_masm_->
31#endif
32
33void Arm64Assembler::EmitSlowPaths() {
34 if (!exception_blocks_.empty()) {
35 for (size_t i = 0; i < exception_blocks_.size(); i++) {
36 EmitExceptionPoll(exception_blocks_.at(i));
37 }
38 }
39 ___ FinalizeCode();
40}
41
42size_t Arm64Assembler::CodeSize() const {
43 return ___ SizeOfCodeGenerated();
44}
45
46void Arm64Assembler::FinalizeInstructions(const MemoryRegion& region) {
47 // Copy the instructions from the buffer.
48 MemoryRegion from(reinterpret_cast<void*>(vixl_buf_), CodeSize());
49 region.CopyFrom(0, from);
50}
51
52void Arm64Assembler::GetCurrentThread(ManagedRegister tr) {
Serban Constantinescu63206f32014-05-07 18:40:49 +010053 ___ Mov(reg_x(tr.AsArm64().AsCoreRegister()), reg_x(ETR));
Serban Constantinescued8dd492014-02-11 14:15:10 +000054}
55
56void Arm64Assembler::GetCurrentThread(FrameOffset offset, ManagedRegister /* scratch */) {
Serban Constantinescu63206f32014-05-07 18:40:49 +010057 StoreToOffset(ETR, SP, offset.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +000058}
59
60// See Arm64 PCS Section 5.2.2.1.
61void Arm64Assembler::IncreaseFrameSize(size_t adjust) {
62 CHECK_ALIGNED(adjust, kStackAlignment);
63 AddConstant(SP, -adjust);
64}
65
66// See Arm64 PCS Section 5.2.2.1.
67void Arm64Assembler::DecreaseFrameSize(size_t adjust) {
68 CHECK_ALIGNED(adjust, kStackAlignment);
69 AddConstant(SP, adjust);
70}
71
72void Arm64Assembler::AddConstant(Register rd, int32_t value, Condition cond) {
73 AddConstant(rd, rd, value, cond);
74}
75
76void Arm64Assembler::AddConstant(Register rd, Register rn, int32_t value,
77 Condition cond) {
78 if ((cond == AL) || (cond == NV)) {
79 // VIXL macro-assembler handles all variants.
80 ___ Add(reg_x(rd), reg_x(rn), value);
81 } else {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010082 // temp = rd + value
83 // rd = cond ? temp : rn
84 vixl::UseScratchRegisterScope temps(vixl_masm_);
85 temps.Exclude(reg_x(rd), reg_x(rn));
86 vixl::Register temp = temps.AcquireX();
87 ___ Add(temp, reg_x(rn), value);
88 ___ Csel(reg_x(rd), temp, reg_x(rd), COND_OP(cond));
Serban Constantinescued8dd492014-02-11 14:15:10 +000089 }
90}
91
92void Arm64Assembler::StoreWToOffset(StoreOperandType type, WRegister source,
93 Register base, int32_t offset) {
94 switch (type) {
95 case kStoreByte:
96 ___ Strb(reg_w(source), MEM_OP(reg_x(base), offset));
97 break;
98 case kStoreHalfword:
99 ___ Strh(reg_w(source), MEM_OP(reg_x(base), offset));
100 break;
101 case kStoreWord:
102 ___ Str(reg_w(source), MEM_OP(reg_x(base), offset));
103 break;
104 default:
105 LOG(FATAL) << "UNREACHABLE";
106 }
107}
108
109void Arm64Assembler::StoreToOffset(Register source, Register base, int32_t offset) {
110 CHECK_NE(source, SP);
111 ___ Str(reg_x(source), MEM_OP(reg_x(base), offset));
112}
113
114void Arm64Assembler::StoreSToOffset(SRegister source, Register base, int32_t offset) {
115 ___ Str(reg_s(source), MEM_OP(reg_x(base), offset));
116}
117
118void Arm64Assembler::StoreDToOffset(DRegister source, Register base, int32_t offset) {
119 ___ Str(reg_d(source), MEM_OP(reg_x(base), offset));
120}
121
122void Arm64Assembler::Store(FrameOffset offs, ManagedRegister m_src, size_t size) {
123 Arm64ManagedRegister src = m_src.AsArm64();
124 if (src.IsNoRegister()) {
125 CHECK_EQ(0u, size);
126 } else if (src.IsWRegister()) {
127 CHECK_EQ(4u, size);
128 StoreWToOffset(kStoreWord, src.AsWRegister(), SP, offs.Int32Value());
129 } else if (src.IsCoreRegister()) {
130 CHECK_EQ(8u, size);
131 StoreToOffset(src.AsCoreRegister(), SP, offs.Int32Value());
132 } else if (src.IsSRegister()) {
133 StoreSToOffset(src.AsSRegister(), SP, offs.Int32Value());
134 } else {
135 CHECK(src.IsDRegister()) << src;
136 StoreDToOffset(src.AsDRegister(), SP, offs.Int32Value());
137 }
138}
139
140void Arm64Assembler::StoreRef(FrameOffset offs, ManagedRegister m_src) {
141 Arm64ManagedRegister src = m_src.AsArm64();
142 CHECK(src.IsCoreRegister()) << src;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100143 StoreWToOffset(kStoreWord, src.AsOverlappingCoreRegisterLow(), SP,
144 offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000145}
146
147void Arm64Assembler::StoreRawPtr(FrameOffset offs, ManagedRegister m_src) {
148 Arm64ManagedRegister src = m_src.AsArm64();
149 CHECK(src.IsCoreRegister()) << src;
150 StoreToOffset(src.AsCoreRegister(), SP, offs.Int32Value());
151}
152
153void Arm64Assembler::StoreImmediateToFrame(FrameOffset offs, uint32_t imm,
154 ManagedRegister m_scratch) {
155 Arm64ManagedRegister scratch = m_scratch.AsArm64();
156 CHECK(scratch.IsCoreRegister()) << scratch;
157 LoadImmediate(scratch.AsCoreRegister(), imm);
Serban Constantinescu75b91132014-04-09 18:39:10 +0100158 StoreWToOffset(kStoreWord, scratch.AsOverlappingCoreRegisterLow(), SP,
159 offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000160}
161
Serban Constantinescu75b91132014-04-09 18:39:10 +0100162void Arm64Assembler::StoreImmediateToThread64(ThreadOffset<8> offs, uint32_t imm,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000163 ManagedRegister m_scratch) {
164 Arm64ManagedRegister scratch = m_scratch.AsArm64();
165 CHECK(scratch.IsCoreRegister()) << scratch;
166 LoadImmediate(scratch.AsCoreRegister(), imm);
Serban Constantinescu63206f32014-05-07 18:40:49 +0100167 StoreToOffset(scratch.AsCoreRegister(), ETR, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000168}
169
Serban Constantinescu75b91132014-04-09 18:39:10 +0100170void Arm64Assembler::StoreStackOffsetToThread64(ThreadOffset<8> tr_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000171 FrameOffset fr_offs,
172 ManagedRegister m_scratch) {
173 Arm64ManagedRegister scratch = m_scratch.AsArm64();
174 CHECK(scratch.IsCoreRegister()) << scratch;
175 AddConstant(scratch.AsCoreRegister(), SP, fr_offs.Int32Value());
Serban Constantinescu63206f32014-05-07 18:40:49 +0100176 StoreToOffset(scratch.AsCoreRegister(), ETR, tr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000177}
178
Serban Constantinescu75b91132014-04-09 18:39:10 +0100179void Arm64Assembler::StoreStackPointerToThread64(ThreadOffset<8> tr_offs) {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100180 vixl::UseScratchRegisterScope temps(vixl_masm_);
181 vixl::Register temp = temps.AcquireX();
182 ___ Mov(temp, reg_x(SP));
183 ___ Str(temp, MEM_OP(reg_x(ETR), tr_offs.Int32Value()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000184}
185
186void Arm64Assembler::StoreSpanning(FrameOffset dest_off, ManagedRegister m_source,
187 FrameOffset in_off, ManagedRegister m_scratch) {
188 Arm64ManagedRegister source = m_source.AsArm64();
189 Arm64ManagedRegister scratch = m_scratch.AsArm64();
190 StoreToOffset(source.AsCoreRegister(), SP, dest_off.Int32Value());
191 LoadFromOffset(scratch.AsCoreRegister(), SP, in_off.Int32Value());
192 StoreToOffset(scratch.AsCoreRegister(), SP, dest_off.Int32Value() + 8);
193}
194
195// Load routines.
196void Arm64Assembler::LoadImmediate(Register dest, int32_t value,
197 Condition cond) {
198 if ((cond == AL) || (cond == NV)) {
199 ___ Mov(reg_x(dest), value);
200 } else {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100201 // temp = value
202 // rd = cond ? temp : rd
Serban Constantinescued8dd492014-02-11 14:15:10 +0000203 if (value != 0) {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100204 vixl::UseScratchRegisterScope temps(vixl_masm_);
205 temps.Exclude(reg_x(dest));
206 vixl::Register temp = temps.AcquireX();
207 ___ Mov(temp, value);
208 ___ Csel(reg_x(dest), temp, reg_x(dest), COND_OP(cond));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000209 } else {
210 ___ Csel(reg_x(dest), reg_x(XZR), reg_x(dest), COND_OP(cond));
211 }
212 }
213}
214
215void Arm64Assembler::LoadWFromOffset(LoadOperandType type, WRegister dest,
216 Register base, int32_t offset) {
217 switch (type) {
218 case kLoadSignedByte:
219 ___ Ldrsb(reg_w(dest), MEM_OP(reg_x(base), offset));
220 break;
221 case kLoadSignedHalfword:
222 ___ Ldrsh(reg_w(dest), MEM_OP(reg_x(base), offset));
223 break;
224 case kLoadUnsignedByte:
225 ___ Ldrb(reg_w(dest), MEM_OP(reg_x(base), offset));
226 break;
227 case kLoadUnsignedHalfword:
228 ___ Ldrh(reg_w(dest), MEM_OP(reg_x(base), offset));
229 break;
230 case kLoadWord:
231 ___ Ldr(reg_w(dest), MEM_OP(reg_x(base), offset));
232 break;
233 default:
234 LOG(FATAL) << "UNREACHABLE";
235 }
236}
237
238// Note: We can extend this member by adding load type info - see
239// sign extended A64 load variants.
240void Arm64Assembler::LoadFromOffset(Register dest, Register base,
241 int32_t offset) {
242 CHECK_NE(dest, SP);
243 ___ Ldr(reg_x(dest), MEM_OP(reg_x(base), offset));
244}
245
246void Arm64Assembler::LoadSFromOffset(SRegister dest, Register base,
247 int32_t offset) {
248 ___ Ldr(reg_s(dest), MEM_OP(reg_x(base), offset));
249}
250
251void Arm64Assembler::LoadDFromOffset(DRegister dest, Register base,
252 int32_t offset) {
253 ___ Ldr(reg_d(dest), MEM_OP(reg_x(base), offset));
254}
255
256void Arm64Assembler::Load(Arm64ManagedRegister dest, Register base,
257 int32_t offset, size_t size) {
258 if (dest.IsNoRegister()) {
259 CHECK_EQ(0u, size) << dest;
260 } else if (dest.IsWRegister()) {
261 CHECK_EQ(4u, size) << dest;
262 ___ Ldr(reg_w(dest.AsWRegister()), MEM_OP(reg_x(base), offset));
263 } else if (dest.IsCoreRegister()) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000264 CHECK_NE(dest.AsCoreRegister(), SP) << dest;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100265 if (size == 4u) {
266 ___ Ldr(reg_w(dest.AsOverlappingCoreRegisterLow()), MEM_OP(reg_x(base), offset));
267 } else {
268 CHECK_EQ(8u, size) << dest;
269 ___ Ldr(reg_x(dest.AsCoreRegister()), MEM_OP(reg_x(base), offset));
270 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000271 } else if (dest.IsSRegister()) {
272 ___ Ldr(reg_s(dest.AsSRegister()), MEM_OP(reg_x(base), offset));
273 } else {
274 CHECK(dest.IsDRegister()) << dest;
275 ___ Ldr(reg_d(dest.AsDRegister()), MEM_OP(reg_x(base), offset));
276 }
277}
278
279void Arm64Assembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
280 return Load(m_dst.AsArm64(), SP, src.Int32Value(), size);
281}
282
Serban Constantinescu75b91132014-04-09 18:39:10 +0100283void Arm64Assembler::LoadFromThread64(ManagedRegister m_dst, ThreadOffset<8> src, size_t size) {
Serban Constantinescu63206f32014-05-07 18:40:49 +0100284 return Load(m_dst.AsArm64(), ETR, src.Int32Value(), size);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000285}
286
287void Arm64Assembler::LoadRef(ManagedRegister m_dst, FrameOffset offs) {
288 Arm64ManagedRegister dst = m_dst.AsArm64();
289 CHECK(dst.IsCoreRegister()) << dst;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100290 LoadWFromOffset(kLoadWord, dst.AsOverlappingCoreRegisterLow(), SP, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000291}
292
293void Arm64Assembler::LoadRef(ManagedRegister m_dst, ManagedRegister m_base,
294 MemberOffset offs) {
295 Arm64ManagedRegister dst = m_dst.AsArm64();
296 Arm64ManagedRegister base = m_base.AsArm64();
297 CHECK(dst.IsCoreRegister() && base.IsCoreRegister());
Serban Constantinescu75b91132014-04-09 18:39:10 +0100298 LoadWFromOffset(kLoadWord, dst.AsOverlappingCoreRegisterLow(), base.AsCoreRegister(),
299 offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000300}
301
302void Arm64Assembler::LoadRawPtr(ManagedRegister m_dst, ManagedRegister m_base, Offset offs) {
303 Arm64ManagedRegister dst = m_dst.AsArm64();
304 Arm64ManagedRegister base = m_base.AsArm64();
305 CHECK(dst.IsCoreRegister() && base.IsCoreRegister());
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100306 // Remove dst and base form the temp list - higher level API uses IP1, IP0.
307 vixl::UseScratchRegisterScope temps(vixl_masm_);
308 temps.Exclude(reg_x(dst.AsCoreRegister()), reg_x(base.AsCoreRegister()));
309 ___ Ldr(reg_x(dst.AsCoreRegister()), MEM_OP(reg_x(base.AsCoreRegister()), offs.Int32Value()));
Serban Constantinescued8dd492014-02-11 14:15:10 +0000310}
311
Serban Constantinescu75b91132014-04-09 18:39:10 +0100312void Arm64Assembler::LoadRawPtrFromThread64(ManagedRegister m_dst, ThreadOffset<8> offs) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000313 Arm64ManagedRegister dst = m_dst.AsArm64();
314 CHECK(dst.IsCoreRegister()) << dst;
Serban Constantinescu63206f32014-05-07 18:40:49 +0100315 LoadFromOffset(dst.AsCoreRegister(), ETR, offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000316}
317
318// Copying routines.
319void Arm64Assembler::Move(ManagedRegister m_dst, ManagedRegister m_src, size_t size) {
320 Arm64ManagedRegister dst = m_dst.AsArm64();
321 Arm64ManagedRegister src = m_src.AsArm64();
322 if (!dst.Equals(src)) {
323 if (dst.IsCoreRegister()) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100324 if (size == 4) {
325 CHECK(src.IsWRegister());
326 ___ Mov(reg_x(dst.AsCoreRegister()), reg_w(src.AsWRegister()));
327 } else {
328 if (src.IsCoreRegister()) {
329 ___ Mov(reg_x(dst.AsCoreRegister()), reg_x(src.AsCoreRegister()));
330 } else {
331 ___ Mov(reg_x(dst.AsCoreRegister()), reg_w(src.AsWRegister()));
332 }
333 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000334 } else if (dst.IsWRegister()) {
335 CHECK(src.IsWRegister()) << src;
336 ___ Mov(reg_w(dst.AsWRegister()), reg_w(src.AsWRegister()));
337 } else if (dst.IsSRegister()) {
338 CHECK(src.IsSRegister()) << src;
339 ___ Fmov(reg_s(dst.AsSRegister()), reg_s(src.AsSRegister()));
340 } else {
341 CHECK(dst.IsDRegister()) << dst;
342 CHECK(src.IsDRegister()) << src;
343 ___ Fmov(reg_d(dst.AsDRegister()), reg_d(src.AsDRegister()));
344 }
345 }
346}
347
Serban Constantinescu75b91132014-04-09 18:39:10 +0100348void Arm64Assembler::CopyRawPtrFromThread64(FrameOffset fr_offs,
349 ThreadOffset<8> tr_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000350 ManagedRegister m_scratch) {
351 Arm64ManagedRegister scratch = m_scratch.AsArm64();
352 CHECK(scratch.IsCoreRegister()) << scratch;
Serban Constantinescu63206f32014-05-07 18:40:49 +0100353 LoadFromOffset(scratch.AsCoreRegister(), ETR, tr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000354 StoreToOffset(scratch.AsCoreRegister(), SP, fr_offs.Int32Value());
355}
356
Serban Constantinescu75b91132014-04-09 18:39:10 +0100357void Arm64Assembler::CopyRawPtrToThread64(ThreadOffset<8> tr_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000358 FrameOffset fr_offs,
359 ManagedRegister m_scratch) {
360 Arm64ManagedRegister scratch = m_scratch.AsArm64();
361 CHECK(scratch.IsCoreRegister()) << scratch;
362 LoadFromOffset(scratch.AsCoreRegister(), SP, fr_offs.Int32Value());
Serban Constantinescu63206f32014-05-07 18:40:49 +0100363 StoreToOffset(scratch.AsCoreRegister(), ETR, tr_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000364}
365
366void Arm64Assembler::CopyRef(FrameOffset dest, FrameOffset src,
367 ManagedRegister m_scratch) {
368 Arm64ManagedRegister scratch = m_scratch.AsArm64();
369 CHECK(scratch.IsCoreRegister()) << scratch;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100370 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(),
371 SP, src.Int32Value());
372 StoreWToOffset(kStoreWord, scratch.AsOverlappingCoreRegisterLow(),
373 SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000374}
375
376void Arm64Assembler::Copy(FrameOffset dest, FrameOffset src,
377 ManagedRegister m_scratch, size_t size) {
378 Arm64ManagedRegister scratch = m_scratch.AsArm64();
Serban Constantinescu75b91132014-04-09 18:39:10 +0100379 CHECK(scratch.IsCoreRegister()) << scratch;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000380 CHECK(size == 4 || size == 8) << size;
381 if (size == 4) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100382 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(), SP, src.Int32Value());
383 StoreWToOffset(kStoreWord, scratch.AsOverlappingCoreRegisterLow(), SP, dest.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000384 } else if (size == 8) {
385 LoadFromOffset(scratch.AsCoreRegister(), SP, src.Int32Value());
386 StoreToOffset(scratch.AsCoreRegister(), SP, dest.Int32Value());
387 } else {
388 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
389 }
390}
391
392void Arm64Assembler::Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset,
393 ManagedRegister m_scratch, size_t size) {
394 Arm64ManagedRegister scratch = m_scratch.AsArm64();
395 Arm64ManagedRegister base = src_base.AsArm64();
396 CHECK(base.IsCoreRegister()) << base;
397 CHECK(scratch.IsCoreRegister() || scratch.IsWRegister()) << scratch;
398 CHECK(size == 4 || size == 8) << size;
399 if (size == 4) {
400 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), base.AsCoreRegister(),
401 src_offset.Int32Value());
402 StoreWToOffset(kStoreWord, scratch.AsWRegister(), SP, dest.Int32Value());
403 } else if (size == 8) {
404 LoadFromOffset(scratch.AsCoreRegister(), base.AsCoreRegister(), src_offset.Int32Value());
405 StoreToOffset(scratch.AsCoreRegister(), SP, dest.Int32Value());
406 } else {
407 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
408 }
409}
410
411void Arm64Assembler::Copy(ManagedRegister m_dest_base, Offset dest_offs, FrameOffset src,
412 ManagedRegister m_scratch, size_t size) {
413 Arm64ManagedRegister scratch = m_scratch.AsArm64();
414 Arm64ManagedRegister base = m_dest_base.AsArm64();
415 CHECK(base.IsCoreRegister()) << base;
416 CHECK(scratch.IsCoreRegister() || scratch.IsWRegister()) << scratch;
417 CHECK(size == 4 || size == 8) << size;
418 if (size == 4) {
419 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), SP, src.Int32Value());
420 StoreWToOffset(kStoreWord, scratch.AsWRegister(), base.AsCoreRegister(),
421 dest_offs.Int32Value());
422 } else if (size == 8) {
423 LoadFromOffset(scratch.AsCoreRegister(), SP, src.Int32Value());
424 StoreToOffset(scratch.AsCoreRegister(), base.AsCoreRegister(), dest_offs.Int32Value());
425 } else {
426 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
427 }
428}
429
430void Arm64Assembler::Copy(FrameOffset /*dst*/, FrameOffset /*src_base*/, Offset /*src_offset*/,
431 ManagedRegister /*mscratch*/, size_t /*size*/) {
432 UNIMPLEMENTED(FATAL) << "Unimplemented Copy() variant";
433}
434
435void Arm64Assembler::Copy(ManagedRegister m_dest, Offset dest_offset,
436 ManagedRegister m_src, Offset src_offset,
437 ManagedRegister m_scratch, size_t size) {
438 Arm64ManagedRegister scratch = m_scratch.AsArm64();
439 Arm64ManagedRegister src = m_src.AsArm64();
440 Arm64ManagedRegister dest = m_dest.AsArm64();
441 CHECK(dest.IsCoreRegister()) << dest;
442 CHECK(src.IsCoreRegister()) << src;
443 CHECK(scratch.IsCoreRegister() || scratch.IsWRegister()) << scratch;
444 CHECK(size == 4 || size == 8) << size;
445 if (size == 4) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100446 if (scratch.IsWRegister()) {
447 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), src.AsCoreRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000448 src_offset.Int32Value());
Serban Constantinescu75b91132014-04-09 18:39:10 +0100449 StoreWToOffset(kStoreWord, scratch.AsWRegister(), dest.AsCoreRegister(),
Serban Constantinescued8dd492014-02-11 14:15:10 +0000450 dest_offset.Int32Value());
Serban Constantinescu75b91132014-04-09 18:39:10 +0100451 } else {
452 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(), src.AsCoreRegister(),
453 src_offset.Int32Value());
454 StoreWToOffset(kStoreWord, scratch.AsOverlappingCoreRegisterLow(), dest.AsCoreRegister(),
455 dest_offset.Int32Value());
456 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000457 } else if (size == 8) {
458 LoadFromOffset(scratch.AsCoreRegister(), src.AsCoreRegister(), src_offset.Int32Value());
459 StoreToOffset(scratch.AsCoreRegister(), dest.AsCoreRegister(), dest_offset.Int32Value());
460 } else {
461 UNIMPLEMENTED(FATAL) << "We only support Copy() of size 4 and 8";
462 }
463}
464
465void Arm64Assembler::Copy(FrameOffset /*dst*/, Offset /*dest_offset*/,
466 FrameOffset /*src*/, Offset /*src_offset*/,
467 ManagedRegister /*scratch*/, size_t /*size*/) {
468 UNIMPLEMENTED(FATAL) << "Unimplemented Copy() variant";
469}
470
471void Arm64Assembler::MemoryBarrier(ManagedRegister m_scratch) {
472 // TODO: Should we check that m_scratch is IP? - see arm.
473#if ANDROID_SMP != 0
474 ___ Dmb(vixl::InnerShareable, vixl::BarrierAll);
475#endif
476}
477
Andreas Gamped1104322014-05-01 14:38:56 -0700478void Arm64Assembler::SignExtend(ManagedRegister mreg, size_t size) {
479 Arm64ManagedRegister reg = mreg.AsArm64();
480 CHECK(size == 1 || size == 2) << size;
481 CHECK(reg.IsWRegister()) << reg;
482 if (size == 1) {
483 ___ sxtb(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
484 } else {
485 ___ sxth(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
486 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000487}
488
Andreas Gamped1104322014-05-01 14:38:56 -0700489void Arm64Assembler::ZeroExtend(ManagedRegister mreg, size_t size) {
490 Arm64ManagedRegister reg = mreg.AsArm64();
491 CHECK(size == 1 || size == 2) << size;
492 CHECK(reg.IsWRegister()) << reg;
493 if (size == 1) {
494 ___ uxtb(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
495 } else {
496 ___ uxth(reg_w(reg.AsWRegister()), reg_w(reg.AsWRegister()));
497 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000498}
499
500void Arm64Assembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
501 // TODO: not validating references.
502}
503
504void Arm64Assembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
505 // TODO: not validating references.
506}
507
508void Arm64Assembler::Call(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
509 Arm64ManagedRegister base = m_base.AsArm64();
510 Arm64ManagedRegister scratch = m_scratch.AsArm64();
511 CHECK(base.IsCoreRegister()) << base;
512 CHECK(scratch.IsCoreRegister()) << scratch;
513 LoadFromOffset(scratch.AsCoreRegister(), base.AsCoreRegister(), offs.Int32Value());
514 ___ Blr(reg_x(scratch.AsCoreRegister()));
515}
516
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700517void Arm64Assembler::JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
518 Arm64ManagedRegister base = m_base.AsArm64();
519 Arm64ManagedRegister scratch = m_scratch.AsArm64();
520 CHECK(base.IsCoreRegister()) << base;
521 CHECK(scratch.IsCoreRegister()) << scratch;
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100522 // Remove base and scratch form the temp list - higher level API uses IP1, IP0.
523 vixl::UseScratchRegisterScope temps(vixl_masm_);
524 temps.Exclude(reg_x(base.AsCoreRegister()), reg_x(scratch.AsCoreRegister()));
525 ___ Ldr(reg_x(scratch.AsCoreRegister()), MEM_OP(reg_x(base.AsCoreRegister()), offs.Int32Value()));
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700526 ___ Br(reg_x(scratch.AsCoreRegister()));
527}
528
Serban Constantinescued8dd492014-02-11 14:15:10 +0000529void Arm64Assembler::Call(FrameOffset base, Offset offs, ManagedRegister m_scratch) {
530 Arm64ManagedRegister scratch = m_scratch.AsArm64();
531 CHECK(scratch.IsCoreRegister()) << scratch;
532 // Call *(*(SP + base) + offset)
533 LoadFromOffset(scratch.AsCoreRegister(), SP, base.Int32Value());
534 LoadFromOffset(scratch.AsCoreRegister(), scratch.AsCoreRegister(), offs.Int32Value());
535 ___ Blr(reg_x(scratch.AsCoreRegister()));
536}
537
Serban Constantinescu75b91132014-04-09 18:39:10 +0100538void Arm64Assembler::CallFromThread64(ThreadOffset<8> /*offset*/, ManagedRegister /*scratch*/) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000539 UNIMPLEMENTED(FATAL) << "Unimplemented Call() variant";
540}
541
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700542void Arm64Assembler::CreateHandleScopeEntry(ManagedRegister m_out_reg, FrameOffset handle_scope_offs,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000543 ManagedRegister m_in_reg, bool null_allowed) {
544 Arm64ManagedRegister out_reg = m_out_reg.AsArm64();
545 Arm64ManagedRegister in_reg = m_in_reg.AsArm64();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700546 // For now we only hold stale handle scope entries in x registers.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000547 CHECK(in_reg.IsNoRegister() || in_reg.IsCoreRegister()) << in_reg;
548 CHECK(out_reg.IsCoreRegister()) << out_reg;
549 if (null_allowed) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700550 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
551 // the address in the handle scope holding the reference.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000552 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
553 if (in_reg.IsNoRegister()) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100554 LoadWFromOffset(kLoadWord, out_reg.AsOverlappingCoreRegisterLow(), SP,
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700555 handle_scope_offs.Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000556 in_reg = out_reg;
557 }
Serban Constantinescu75b91132014-04-09 18:39:10 +0100558 ___ Cmp(reg_w(in_reg.AsOverlappingCoreRegisterLow()), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000559 if (!out_reg.Equals(in_reg)) {
560 LoadImmediate(out_reg.AsCoreRegister(), 0, EQ);
561 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700562 AddConstant(out_reg.AsCoreRegister(), SP, handle_scope_offs.Int32Value(), NE);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000563 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700564 AddConstant(out_reg.AsCoreRegister(), SP, handle_scope_offs.Int32Value(), AL);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000565 }
566}
567
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700568void Arm64Assembler::CreateHandleScopeEntry(FrameOffset out_off, FrameOffset handle_scope_offset,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000569 ManagedRegister m_scratch, bool null_allowed) {
570 Arm64ManagedRegister scratch = m_scratch.AsArm64();
571 CHECK(scratch.IsCoreRegister()) << scratch;
572 if (null_allowed) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100573 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(), SP,
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700574 handle_scope_offset.Int32Value());
575 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
576 // the address in the handle scope holding the reference.
577 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
Serban Constantinescu75b91132014-04-09 18:39:10 +0100578 ___ Cmp(reg_w(scratch.AsOverlappingCoreRegisterLow()), 0);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000579 // Move this logic in add constants with flags.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700580 AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), NE);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000581 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700582 AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), AL);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000583 }
584 StoreToOffset(scratch.AsCoreRegister(), SP, out_off.Int32Value());
585}
586
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700587void Arm64Assembler::LoadReferenceFromHandleScope(ManagedRegister m_out_reg,
Serban Constantinescued8dd492014-02-11 14:15:10 +0000588 ManagedRegister m_in_reg) {
589 Arm64ManagedRegister out_reg = m_out_reg.AsArm64();
590 Arm64ManagedRegister in_reg = m_in_reg.AsArm64();
591 CHECK(out_reg.IsCoreRegister()) << out_reg;
592 CHECK(in_reg.IsCoreRegister()) << in_reg;
593 vixl::Label exit;
594 if (!out_reg.Equals(in_reg)) {
595 // FIXME: Who sets the flags here?
596 LoadImmediate(out_reg.AsCoreRegister(), 0, EQ);
597 }
598 ___ Cmp(reg_x(in_reg.AsCoreRegister()), 0);
599 ___ B(&exit, COND_OP(EQ));
600 LoadFromOffset(out_reg.AsCoreRegister(), in_reg.AsCoreRegister(), 0);
601 ___ Bind(&exit);
602}
603
604void Arm64Assembler::ExceptionPoll(ManagedRegister m_scratch, size_t stack_adjust) {
605 CHECK_ALIGNED(stack_adjust, kStackAlignment);
606 Arm64ManagedRegister scratch = m_scratch.AsArm64();
607 Arm64Exception *current_exception = new Arm64Exception(scratch, stack_adjust);
608 exception_blocks_.push_back(current_exception);
Serban Constantinescu63206f32014-05-07 18:40:49 +0100609 LoadFromOffset(scratch.AsCoreRegister(), ETR, Thread::ExceptionOffset<8>().Int32Value());
Serban Constantinescued8dd492014-02-11 14:15:10 +0000610 ___ Cmp(reg_x(scratch.AsCoreRegister()), 0);
611 ___ B(current_exception->Entry(), COND_OP(NE));
612}
613
614void Arm64Assembler::EmitExceptionPoll(Arm64Exception *exception) {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100615 vixl::UseScratchRegisterScope temps(vixl_masm_);
616 temps.Exclude(reg_x(exception->scratch_.AsCoreRegister()));
617 vixl::Register temp = temps.AcquireX();
618
619 // Bind exception poll entry.
Serban Constantinescued8dd492014-02-11 14:15:10 +0000620 ___ Bind(exception->Entry());
621 if (exception->stack_adjust_ != 0) { // Fix up the frame.
622 DecreaseFrameSize(exception->stack_adjust_);
623 }
624 // Pass exception object as argument.
625 // Don't care about preserving X0 as this won't return.
626 ___ Mov(reg_x(X0), reg_x(exception->scratch_.AsCoreRegister()));
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100627 ___ Ldr(temp, MEM_OP(reg_x(ETR), QUICK_ENTRYPOINT_OFFSET(8, pDeliverException).Int32Value()));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100628
Serban Constantinescu63206f32014-05-07 18:40:49 +0100629 // Move ETR(Callee saved) back to TR(Caller saved) reg. We use ETR on calls
630 // to external functions that might trash TR. We do not need the original
631 // X19 saved in BuildFrame().
632 ___ Mov(reg_x(TR), reg_x(ETR));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100633
Serban Constantinescu0f89dac2014-05-08 13:52:53 +0100634 ___ Blr(temp);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000635 // Call should never return.
636 ___ Brk();
637}
638
Ian Rogers790a6b72014-04-01 10:36:00 -0700639constexpr size_t kFramePointerSize = 8;
640
Serban Constantinescued8dd492014-02-11 14:15:10 +0000641void Arm64Assembler::BuildFrame(size_t frame_size, ManagedRegister method_reg,
642 const std::vector<ManagedRegister>& callee_save_regs,
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700643 const ManagedRegisterEntrySpills& entry_spills) {
Serban Constantinescued8dd492014-02-11 14:15:10 +0000644 CHECK_ALIGNED(frame_size, kStackAlignment);
645 CHECK(X0 == method_reg.AsArm64().AsCoreRegister());
646
647 // TODO: *create APCS FP - end of FP chain;
648 // *add support for saving a different set of callee regs.
649 // For now we check that the size of callee regs vector is 20
650 // equivalent to the APCS callee saved regs [X19, x30] [D8, D15].
651 CHECK_EQ(callee_save_regs.size(), kCalleeSavedRegsSize);
652 ___ PushCalleeSavedRegisters();
653
Serban Constantinescu63206f32014-05-07 18:40:49 +0100654 // Move TR(Caller saved) to ETR(Callee saved). The original X19 has been
655 // saved by PushCalleeSavedRegisters(). This way we make sure that TR is not
656 // trashed by native code.
657 ___ Mov(reg_x(ETR), reg_x(TR));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100658
Serban Constantinescued8dd492014-02-11 14:15:10 +0000659 // Increate frame to required size - must be at least space to push Method*.
Ian Rogers790a6b72014-04-01 10:36:00 -0700660 CHECK_GT(frame_size, kCalleeSavedRegsSize * kFramePointerSize);
661 size_t adjust = frame_size - (kCalleeSavedRegsSize * kFramePointerSize);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000662 IncreaseFrameSize(adjust);
663
664 // Write Method*.
665 StoreToOffset(X0, SP, 0);
666
Serban Constantinescu75b91132014-04-09 18:39:10 +0100667 // Write out entry spills
668 int32_t offset = frame_size + kFramePointerSize;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000669 for (size_t i = 0; i < entry_spills.size(); ++i) {
Serban Constantinescu75b91132014-04-09 18:39:10 +0100670 Arm64ManagedRegister reg = entry_spills.at(i).AsArm64();
671 if (reg.IsNoRegister()) {
672 // only increment stack offset.
673 ManagedRegisterSpill spill = entry_spills.at(i);
674 offset += spill.getSize();
675 } else if (reg.IsCoreRegister()) {
676 StoreToOffset(reg.AsCoreRegister(), SP, offset);
677 offset += 8;
678 } else if (reg.IsWRegister()) {
679 StoreWToOffset(kStoreWord, reg.AsWRegister(), SP, offset);
680 offset += 4;
681 } else if (reg.IsDRegister()) {
682 StoreDToOffset(reg.AsDRegister(), SP, offset);
683 offset += 8;
684 } else if (reg.IsSRegister()) {
685 StoreSToOffset(reg.AsSRegister(), SP, offset);
686 offset += 4;
687 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000688 }
689}
690
691void Arm64Assembler::RemoveFrame(size_t frame_size, const std::vector<ManagedRegister>& callee_save_regs) {
692 CHECK_ALIGNED(frame_size, kStackAlignment);
693
694 // For now we only check that the size of the frame is greater than the
695 // no of APCS callee saved regs [X19, X30] [D8, D15].
696 CHECK_EQ(callee_save_regs.size(), kCalleeSavedRegsSize);
Ian Rogers790a6b72014-04-01 10:36:00 -0700697 CHECK_GT(frame_size, kCalleeSavedRegsSize * kFramePointerSize);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000698
699 // Decrease frame size to start of callee saved regs.
Ian Rogers790a6b72014-04-01 10:36:00 -0700700 size_t adjust = frame_size - (kCalleeSavedRegsSize * kFramePointerSize);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000701 DecreaseFrameSize(adjust);
702
Serban Constantinescu63206f32014-05-07 18:40:49 +0100703 // We move ETR (Callee Saved) back to TR (Caller Saved) which might have
704 // been trashed in the native call. The original X19 (ETR) is restored as
705 // part of PopCalleeSavedRegisters().
706 ___ Mov(reg_x(TR), reg_x(ETR));
Serban Constantinescu75b91132014-04-09 18:39:10 +0100707
Serban Constantinescued8dd492014-02-11 14:15:10 +0000708 // Pop callee saved and return to LR.
709 ___ PopCalleeSavedRegisters();
710 ___ Ret();
711}
712
713} // namespace arm64
714} // namespace art