blob: 1c47e7757d4df4742f6a13183baabe4a59366542 [file] [log] [blame]
Serban Constantinescued8dd492014-02-11 14:15:10 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13* See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
18#define ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
19
20#include <vector>
Stuart Monteithb95a5342014-03-12 13:32:32 +000021#include <stdint.h>
Serban Constantinescued8dd492014-02-11 14:15:10 +000022
23#include "base/logging.h"
24#include "constants_arm64.h"
25#include "utils/arm64/managed_register_arm64.h"
26#include "utils/assembler.h"
27#include "offsets.h"
28#include "utils.h"
29#include "UniquePtr.h"
30#include "a64/macro-assembler-a64.h"
31#include "a64/disasm-a64.h"
32
33namespace art {
34namespace arm64 {
35
36#define MEM_OP(x...) vixl::MemOperand(x)
37#define COND_OP(x) static_cast<vixl::Condition>(x)
38
39enum Condition {
40 kNoCondition = -1,
41 EQ = 0,
42 NE = 1,
43 HS = 2,
44 LO = 3,
45 MI = 4,
46 PL = 5,
47 VS = 6,
48 VC = 7,
49 HI = 8,
50 LS = 9,
51 GE = 10,
52 LT = 11,
53 GT = 12,
54 LE = 13,
55 AL = 14, // Always.
56 NV = 15, // Behaves as always/al.
57 kMaxCondition = 16,
58};
59
60enum LoadOperandType {
61 kLoadSignedByte,
62 kLoadUnsignedByte,
63 kLoadSignedHalfword,
64 kLoadUnsignedHalfword,
65 kLoadWord,
66 kLoadCoreWord,
67 kLoadSWord,
68 kLoadDWord
69};
70
71enum StoreOperandType {
72 kStoreByte,
73 kStoreHalfword,
74 kStoreWord,
75 kStoreCoreWord,
76 kStoreSWord,
77 kStoreDWord
78};
79
80class Arm64Exception;
81
82class Arm64Assembler : public Assembler {
83 public:
84 Arm64Assembler() : vixl_buf_(new byte[BUF_SIZE]),
85 vixl_masm_(new vixl::MacroAssembler(vixl_buf_, BUF_SIZE)) {}
86
87 virtual ~Arm64Assembler() {
88 if (kIsDebugBuild) {
89 vixl::Decoder *decoder = new vixl::Decoder();
90 vixl::PrintDisassembler *test = new vixl::PrintDisassembler(stdout);
91 decoder->AppendVisitor(test);
92
93 for (size_t i = 0; i < CodeSize() / vixl::kInstructionSize; ++i) {
94 vixl::Instruction *instr =
95 reinterpret_cast<vixl::Instruction*>(vixl_buf_ + i * vixl::kInstructionSize);
96 decoder->Decode(instr);
97 }
98 }
99 delete[] vixl_buf_;
100 }
101
102 // Emit slow paths queued during assembly.
103 void EmitSlowPaths();
104
105 // Size of generated code.
106 size_t CodeSize() const;
107
108 // Copy instructions out of assembly buffer into the given region of memory.
109 void FinalizeInstructions(const MemoryRegion& region);
110
111 // Emit code that will create an activation on the stack.
112 void BuildFrame(size_t frame_size, ManagedRegister method_reg,
113 const std::vector<ManagedRegister>& callee_save_regs,
114 const std::vector<ManagedRegister>& entry_spills);
115
116 // Emit code that will remove an activation from the stack.
117 void RemoveFrame(size_t frame_size,
118 const std::vector<ManagedRegister>& callee_save_regs);
119
120 void IncreaseFrameSize(size_t adjust);
121 void DecreaseFrameSize(size_t adjust);
122
123 // Store routines.
124 void Store(FrameOffset offs, ManagedRegister src, size_t size);
125 void StoreRef(FrameOffset dest, ManagedRegister src);
126 void StoreRawPtr(FrameOffset dest, ManagedRegister src);
127 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm,
128 ManagedRegister scratch);
129 void StoreImmediateToThread(ThreadOffset dest, uint32_t imm,
130 ManagedRegister scratch);
131 void StoreStackOffsetToThread(ThreadOffset thr_offs,
132 FrameOffset fr_offs,
133 ManagedRegister scratch);
134 void StoreStackPointerToThread(ThreadOffset thr_offs);
135 void StoreSpanning(FrameOffset dest, ManagedRegister src,
136 FrameOffset in_off, ManagedRegister scratch);
137
138 // Load routines.
139 void Load(ManagedRegister dest, FrameOffset src, size_t size);
140 void Load(ManagedRegister dest, ThreadOffset src, size_t size);
141 void LoadRef(ManagedRegister dest, FrameOffset src);
142 void LoadRef(ManagedRegister dest, ManagedRegister base,
143 MemberOffset offs);
144 void LoadRawPtr(ManagedRegister dest, ManagedRegister base,
145 Offset offs);
146 void LoadRawPtrFromThread(ManagedRegister dest,
147 ThreadOffset offs);
148 // Copying routines.
149 void Move(ManagedRegister dest, ManagedRegister src, size_t size);
150 void CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset thr_offs,
151 ManagedRegister scratch);
152 void CopyRawPtrToThread(ThreadOffset thr_offs, FrameOffset fr_offs,
153 ManagedRegister scratch);
154 void CopyRef(FrameOffset dest, FrameOffset src,
155 ManagedRegister scratch);
156 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size);
157 void Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset,
158 ManagedRegister scratch, size_t size);
159 void Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src,
160 ManagedRegister scratch, size_t size);
161 void Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset,
162 ManagedRegister scratch, size_t size);
163 void Copy(ManagedRegister dest, Offset dest_offset,
164 ManagedRegister src, Offset src_offset,
165 ManagedRegister scratch, size_t size);
166 void Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
167 ManagedRegister scratch, size_t size);
168 void MemoryBarrier(ManagedRegister scratch);
169
170 // Sign extension.
171 void SignExtend(ManagedRegister mreg, size_t size);
172
173 // Zero extension.
174 void ZeroExtend(ManagedRegister mreg, size_t size);
175
176 // Exploit fast access in managed code to Thread::Current().
177 void GetCurrentThread(ManagedRegister tr);
178 void GetCurrentThread(FrameOffset dest_offset,
179 ManagedRegister scratch);
180
181 // Set up out_reg to hold a Object** into the SIRT, or to be NULL if the
182 // value is null and null_allowed. in_reg holds a possibly stale reference
183 // that can be used to avoid loading the SIRT entry to see if the value is
184 // NULL.
185 void CreateSirtEntry(ManagedRegister out_reg, FrameOffset sirt_offset,
186 ManagedRegister in_reg, bool null_allowed);
187
188 // Set up out_off to hold a Object** into the SIRT, or to be NULL if the
189 // value is null and null_allowed.
190 void CreateSirtEntry(FrameOffset out_off, FrameOffset sirt_offset,
191 ManagedRegister scratch, bool null_allowed);
192
193 // src holds a SIRT entry (Object**) load this into dst.
194 void LoadReferenceFromSirt(ManagedRegister dst,
195 ManagedRegister src);
196
197 // Heap::VerifyObject on src. In some cases (such as a reference to this) we
198 // know that src may not be null.
199 void VerifyObject(ManagedRegister src, bool could_be_null);
200 void VerifyObject(FrameOffset src, bool could_be_null);
201
202 // Call to address held at [base+offset].
203 void Call(ManagedRegister base, Offset offset, ManagedRegister scratch);
204 void Call(FrameOffset base, Offset offset, ManagedRegister scratch);
205 void Call(ThreadOffset offset, ManagedRegister scratch);
206
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700207 // Jump to address (not setting link register)
208 void JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch);
209
Serban Constantinescued8dd492014-02-11 14:15:10 +0000210 // Generate code to check if Thread::Current()->exception_ is non-null
211 // and branch to a ExceptionSlowPath if it is.
212 void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust);
213
214 private:
215 static vixl::Register reg_x(int code) {
216 CHECK(code < kNumberOfCoreRegisters) << code;
217 if (code == SP) {
218 return vixl::sp;
219 }
220 return vixl::Register::XRegFromCode(code);
221 }
222
223 static vixl::Register reg_w(int code) {
224 return vixl::Register::WRegFromCode(code);
225 }
226
227 static vixl::FPRegister reg_d(int code) {
228 return vixl::FPRegister::DRegFromCode(code);
229 }
230
231 static vixl::FPRegister reg_s(int code) {
232 return vixl::FPRegister::SRegFromCode(code);
233 }
234
235 // Emits Exception block.
236 void EmitExceptionPoll(Arm64Exception *exception);
237
238 void StoreWToOffset(StoreOperandType type, WRegister source,
239 Register base, int32_t offset);
240 void StoreToOffset(Register source, Register base, int32_t offset);
241 void StoreSToOffset(SRegister source, Register base, int32_t offset);
242 void StoreDToOffset(DRegister source, Register base, int32_t offset);
243
244 void LoadImmediate(Register dest, int32_t value, Condition cond = AL);
245 void Load(Arm64ManagedRegister dst, Register src, int32_t src_offset, size_t size);
246 void LoadWFromOffset(LoadOperandType type, WRegister dest,
247 Register base, int32_t offset);
248 void LoadFromOffset(Register dest, Register base, int32_t offset);
249 void LoadSFromOffset(SRegister dest, Register base, int32_t offset);
250 void LoadDFromOffset(DRegister dest, Register base, int32_t offset);
251 void AddConstant(Register rd, int32_t value, Condition cond = AL);
252 void AddConstant(Register rd, Register rn, int32_t value, Condition cond = AL);
253
254 // Vixl buffer size.
255 static constexpr size_t BUF_SIZE = 4096;
256
257 // Vixl buffer.
258 byte* vixl_buf_;
259
260 // Unique ptr - vixl assembler.
261 UniquePtr<vixl::MacroAssembler> vixl_masm_;
262
263 // List of exception blocks to generate at the end of the code cache.
264 std::vector<Arm64Exception*> exception_blocks_;
265};
266
267class Arm64Exception {
268 private:
269 explicit Arm64Exception(Arm64ManagedRegister scratch, size_t stack_adjust)
270 : scratch_(scratch), stack_adjust_(stack_adjust) {
271 }
272
273 vixl::Label* Entry() { return &exception_entry_; }
274
275 // Register used for passing Thread::Current()->exception_ .
276 const Arm64ManagedRegister scratch_;
277
278 // Stack adjust for ExceptionPool.
279 const size_t stack_adjust_;
280
281 vixl::Label exception_entry_;
282
283 friend class Arm64Assembler;
284 DISALLOW_COPY_AND_ASSIGN(Arm64Exception);
285};
286
287} // namespace arm64
288} // namespace art
289
290#endif // ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_