blob: 2031fe4e57abe4253a97f0368309870785ef5ff5 [file] [log] [blame]
Serban Constantinescued8dd492014-02-11 14:15:10 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13* See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
18#define ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
19
Stuart Monteithb95a5342014-03-12 13:32:32 +000020#include <stdint.h>
Ian Rogers700a4022014-05-19 16:49:03 -070021#include <memory>
22#include <vector>
Serban Constantinescued8dd492014-02-11 14:15:10 +000023
24#include "base/logging.h"
25#include "constants_arm64.h"
26#include "utils/arm64/managed_register_arm64.h"
27#include "utils/assembler.h"
28#include "offsets.h"
29#include "utils.h"
Andreas Gampe277ccbd2014-11-03 21:36:10 -080030
31// TODO: make vixl clean wrt -Wshadow.
32#pragma GCC diagnostic push
33#pragma GCC diagnostic ignored "-Wshadow"
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000034#include "vixl/a64/macro-assembler-a64.h"
35#include "vixl/a64/disasm-a64.h"
Andreas Gampe277ccbd2014-11-03 21:36:10 -080036#pragma GCC diagnostic pop
Serban Constantinescued8dd492014-02-11 14:15:10 +000037
38namespace art {
39namespace arm64 {
40
Andreas Gampec8ccf682014-09-29 20:07:43 -070041#define MEM_OP(...) vixl::MemOperand(__VA_ARGS__)
Serban Constantinescued8dd492014-02-11 14:15:10 +000042
43enum LoadOperandType {
44 kLoadSignedByte,
45 kLoadUnsignedByte,
46 kLoadSignedHalfword,
47 kLoadUnsignedHalfword,
48 kLoadWord,
49 kLoadCoreWord,
50 kLoadSWord,
51 kLoadDWord
52};
53
54enum StoreOperandType {
55 kStoreByte,
56 kStoreHalfword,
57 kStoreWord,
58 kStoreCoreWord,
59 kStoreSWord,
60 kStoreDWord
61};
62
63class Arm64Exception;
64
Ian Rogersdd7624d2014-03-14 17:43:00 -070065class Arm64Assembler FINAL : public Assembler {
Serban Constantinescued8dd492014-02-11 14:15:10 +000066 public:
Alexandre Ramescee75242014-10-08 18:41:21 +010067 // We indicate the size of the initial code generation buffer to the VIXL
68 // assembler. From there we it will automatically manage the buffer.
69 Arm64Assembler() : vixl_masm_(new vixl::MacroAssembler(kArm64BaseBufferSize)) {}
Serban Constantinescued8dd492014-02-11 14:15:10 +000070
71 virtual ~Arm64Assembler() {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010072 delete vixl_masm_;
Serban Constantinescued8dd492014-02-11 14:15:10 +000073 }
74
75 // Emit slow paths queued during assembly.
76 void EmitSlowPaths();
77
78 // Size of generated code.
79 size_t CodeSize() const;
80
81 // Copy instructions out of assembly buffer into the given region of memory.
82 void FinalizeInstructions(const MemoryRegion& region);
83
84 // Emit code that will create an activation on the stack.
85 void BuildFrame(size_t frame_size, ManagedRegister method_reg,
86 const std::vector<ManagedRegister>& callee_save_regs,
Ian Rogersdd7624d2014-03-14 17:43:00 -070087 const ManagedRegisterEntrySpills& entry_spills) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +000088
89 // Emit code that will remove an activation from the stack.
Ian Rogersdd7624d2014-03-14 17:43:00 -070090 void RemoveFrame(size_t frame_size, const std::vector<ManagedRegister>& callee_save_regs)
91 OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +000092
Ian Rogersdd7624d2014-03-14 17:43:00 -070093 void IncreaseFrameSize(size_t adjust) OVERRIDE;
94 void DecreaseFrameSize(size_t adjust) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +000095
96 // Store routines.
Ian Rogersdd7624d2014-03-14 17:43:00 -070097 void Store(FrameOffset offs, ManagedRegister src, size_t size) OVERRIDE;
98 void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE;
99 void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE;
100 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100101 void StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm, ManagedRegister scratch)
Ian Rogersdd7624d2014-03-14 17:43:00 -0700102 OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100103 void StoreStackOffsetToThread64(ThreadOffset<8> thr_offs, FrameOffset fr_offs,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700104 ManagedRegister scratch) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100105 void StoreStackPointerToThread64(ThreadOffset<8> thr_offs) OVERRIDE;
Ian Rogersdd7624d2014-03-14 17:43:00 -0700106 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off,
107 ManagedRegister scratch) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000108
109 // Load routines.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700110 void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100111 void LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size) OVERRIDE;
Ian Rogersdd7624d2014-03-14 17:43:00 -0700112 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
113 void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs) OVERRIDE;
114 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100115 void LoadRawPtrFromThread64(ManagedRegister dest, ThreadOffset<8> offs) OVERRIDE;
Ian Rogersdd7624d2014-03-14 17:43:00 -0700116
Serban Constantinescued8dd492014-02-11 14:15:10 +0000117 // Copying routines.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700118 void Move(ManagedRegister dest, ManagedRegister src, size_t size) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100119 void CopyRawPtrFromThread64(FrameOffset fr_offs, ThreadOffset<8> thr_offs,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700120 ManagedRegister scratch) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100121 void CopyRawPtrToThread64(ThreadOffset<8> thr_offs, FrameOffset fr_offs, ManagedRegister scratch)
Ian Rogersdd7624d2014-03-14 17:43:00 -0700122 OVERRIDE;
123 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) OVERRIDE;
124 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) OVERRIDE;
125 void Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset, ManagedRegister scratch,
126 size_t size) OVERRIDE;
127 void Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src, ManagedRegister scratch,
128 size_t size) OVERRIDE;
129 void Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset, ManagedRegister scratch,
130 size_t size) OVERRIDE;
131 void Copy(ManagedRegister dest, Offset dest_offset, ManagedRegister src, Offset src_offset,
132 ManagedRegister scratch, size_t size) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000133 void Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700134 ManagedRegister scratch, size_t size) OVERRIDE;
135 void MemoryBarrier(ManagedRegister scratch) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000136
137 // Sign extension.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700138 void SignExtend(ManagedRegister mreg, size_t size) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000139
140 // Zero extension.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700141 void ZeroExtend(ManagedRegister mreg, size_t size) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000142
143 // Exploit fast access in managed code to Thread::Current().
Ian Rogersdd7624d2014-03-14 17:43:00 -0700144 void GetCurrentThread(ManagedRegister tr) OVERRIDE;
145 void GetCurrentThread(FrameOffset dest_offset, ManagedRegister scratch) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000146
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700147 // Set up out_reg to hold a Object** into the handle scope, or to be NULL if the
Serban Constantinescued8dd492014-02-11 14:15:10 +0000148 // value is null and null_allowed. in_reg holds a possibly stale reference
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700149 // that can be used to avoid loading the handle scope entry to see if the value is
Serban Constantinescued8dd492014-02-11 14:15:10 +0000150 // NULL.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700151 void CreateHandleScopeEntry(ManagedRegister out_reg, FrameOffset handlescope_offset,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700152 ManagedRegister in_reg, bool null_allowed) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000153
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700154 // Set up out_off to hold a Object** into the handle scope, or to be NULL if the
Serban Constantinescued8dd492014-02-11 14:15:10 +0000155 // value is null and null_allowed.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700156 void CreateHandleScopeEntry(FrameOffset out_off, FrameOffset handlescope_offset,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700157 ManagedRegister scratch, bool null_allowed) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000158
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700159 // src holds a handle scope entry (Object**) load this into dst.
160 void LoadReferenceFromHandleScope(ManagedRegister dst, ManagedRegister src) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000161
162 // Heap::VerifyObject on src. In some cases (such as a reference to this) we
163 // know that src may not be null.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700164 void VerifyObject(ManagedRegister src, bool could_be_null) OVERRIDE;
165 void VerifyObject(FrameOffset src, bool could_be_null) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000166
167 // Call to address held at [base+offset].
Ian Rogersdd7624d2014-03-14 17:43:00 -0700168 void Call(ManagedRegister base, Offset offset, ManagedRegister scratch) OVERRIDE;
169 void Call(FrameOffset base, Offset offset, ManagedRegister scratch) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100170 void CallFromThread64(ThreadOffset<8> offset, ManagedRegister scratch) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000171
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700172 // Jump to address (not setting link register)
173 void JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch);
174
Serban Constantinescued8dd492014-02-11 14:15:10 +0000175 // Generate code to check if Thread::Current()->exception_ is non-null
176 // and branch to a ExceptionSlowPath if it is.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700177 void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000178
179 private:
180 static vixl::Register reg_x(int code) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100181 CHECK(code < kNumberOfXRegisters) << code;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000182 if (code == SP) {
183 return vixl::sp;
Serban Constantinescu15523732014-04-02 13:18:05 +0100184 } else if (code == XZR) {
185 return vixl::xzr;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000186 }
187 return vixl::Register::XRegFromCode(code);
188 }
189
190 static vixl::Register reg_w(int code) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100191 CHECK(code < kNumberOfWRegisters) << code;
Alexandre Ramesa304f972014-10-17 14:35:27 +0100192 if (code == WSP) {
193 return vixl::wsp;
194 } else if (code == WZR) {
195 return vixl::wzr;
196 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000197 return vixl::Register::WRegFromCode(code);
198 }
199
200 static vixl::FPRegister reg_d(int code) {
201 return vixl::FPRegister::DRegFromCode(code);
202 }
203
204 static vixl::FPRegister reg_s(int code) {
205 return vixl::FPRegister::SRegFromCode(code);
206 }
207
208 // Emits Exception block.
209 void EmitExceptionPoll(Arm64Exception *exception);
210
211 void StoreWToOffset(StoreOperandType type, WRegister source,
Alexandre Rames37c92df2014-10-17 14:35:27 +0100212 XRegister base, int32_t offset);
213 void StoreToOffset(XRegister source, XRegister base, int32_t offset);
214 void StoreSToOffset(SRegister source, XRegister base, int32_t offset);
215 void StoreDToOffset(DRegister source, XRegister base, int32_t offset);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000216
Alexandre Rames37c92df2014-10-17 14:35:27 +0100217 void LoadImmediate(XRegister dest, int32_t value, vixl::Condition cond = vixl::al);
218 void Load(Arm64ManagedRegister dst, XRegister src, int32_t src_offset, size_t size);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000219 void LoadWFromOffset(LoadOperandType type, WRegister dest,
Alexandre Rames37c92df2014-10-17 14:35:27 +0100220 XRegister base, int32_t offset);
221 void LoadFromOffset(XRegister dest, XRegister base, int32_t offset);
222 void LoadSFromOffset(SRegister dest, XRegister base, int32_t offset);
223 void LoadDFromOffset(DRegister dest, XRegister base, int32_t offset);
224 void AddConstant(XRegister rd, int32_t value, vixl::Condition cond = vixl::al);
225 void AddConstant(XRegister rd, XRegister rn, int32_t value, vixl::Condition cond = vixl::al);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000226
Serban Constantinescued8dd492014-02-11 14:15:10 +0000227 // List of exception blocks to generate at the end of the code cache.
228 std::vector<Arm64Exception*> exception_blocks_;
Serban Constantinescu15523732014-04-02 13:18:05 +0100229
Alexandre Rames5319def2014-10-23 10:03:10 +0100230 public:
231 // Vixl assembler.
232 vixl::MacroAssembler* const vixl_masm_;
233
Serban Constantinescu15523732014-04-02 13:18:05 +0100234 // Used for testing.
235 friend class Arm64ManagedRegister_VixlRegisters_Test;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000236};
237
238class Arm64Exception {
239 private:
240 explicit Arm64Exception(Arm64ManagedRegister scratch, size_t stack_adjust)
241 : scratch_(scratch), stack_adjust_(stack_adjust) {
242 }
243
244 vixl::Label* Entry() { return &exception_entry_; }
245
246 // Register used for passing Thread::Current()->exception_ .
247 const Arm64ManagedRegister scratch_;
248
249 // Stack adjust for ExceptionPool.
250 const size_t stack_adjust_;
251
252 vixl::Label exception_entry_;
253
254 friend class Arm64Assembler;
255 DISALLOW_COPY_AND_ASSIGN(Arm64Exception);
256};
257
258} // namespace arm64
259} // namespace art
260
261#endif // ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_