blob: 97fb93af826791024da60e0f8e2e169f45a30796 [file] [log] [blame]
Serban Constantinescued8dd492014-02-11 14:15:10 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13* See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
18#define ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
19
20#include <vector>
Stuart Monteithb95a5342014-03-12 13:32:32 +000021#include <stdint.h>
Serban Constantinescued8dd492014-02-11 14:15:10 +000022
23#include "base/logging.h"
24#include "constants_arm64.h"
25#include "utils/arm64/managed_register_arm64.h"
26#include "utils/assembler.h"
27#include "offsets.h"
28#include "utils.h"
29#include "UniquePtr.h"
30#include "a64/macro-assembler-a64.h"
31#include "a64/disasm-a64.h"
32
33namespace art {
34namespace arm64 {
35
36#define MEM_OP(x...) vixl::MemOperand(x)
37#define COND_OP(x) static_cast<vixl::Condition>(x)
38
39enum Condition {
40 kNoCondition = -1,
41 EQ = 0,
42 NE = 1,
43 HS = 2,
44 LO = 3,
45 MI = 4,
46 PL = 5,
47 VS = 6,
48 VC = 7,
49 HI = 8,
50 LS = 9,
51 GE = 10,
52 LT = 11,
53 GT = 12,
54 LE = 13,
55 AL = 14, // Always.
56 NV = 15, // Behaves as always/al.
57 kMaxCondition = 16,
58};
59
60enum LoadOperandType {
61 kLoadSignedByte,
62 kLoadUnsignedByte,
63 kLoadSignedHalfword,
64 kLoadUnsignedHalfword,
65 kLoadWord,
66 kLoadCoreWord,
67 kLoadSWord,
68 kLoadDWord
69};
70
71enum StoreOperandType {
72 kStoreByte,
73 kStoreHalfword,
74 kStoreWord,
75 kStoreCoreWord,
76 kStoreSWord,
77 kStoreDWord
78};
79
80class Arm64Exception;
81
Ian Rogersdd7624d2014-03-14 17:43:00 -070082class Arm64Assembler FINAL : public Assembler {
Serban Constantinescued8dd492014-02-11 14:15:10 +000083 public:
Serban Constantinescu75b91132014-04-09 18:39:10 +010084 Arm64Assembler() : vixl_buf_(new byte[kBufferSizeArm64]),
85 vixl_masm_(new vixl::MacroAssembler(vixl_buf_, kBufferSizeArm64)) {}
Serban Constantinescued8dd492014-02-11 14:15:10 +000086
87 virtual ~Arm64Assembler() {
Serban Constantinescued8dd492014-02-11 14:15:10 +000088 delete[] vixl_buf_;
89 }
90
91 // Emit slow paths queued during assembly.
92 void EmitSlowPaths();
93
94 // Size of generated code.
95 size_t CodeSize() const;
96
97 // Copy instructions out of assembly buffer into the given region of memory.
98 void FinalizeInstructions(const MemoryRegion& region);
99
100 // Emit code that will create an activation on the stack.
101 void BuildFrame(size_t frame_size, ManagedRegister method_reg,
102 const std::vector<ManagedRegister>& callee_save_regs,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700103 const ManagedRegisterEntrySpills& entry_spills) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000104
105 // Emit code that will remove an activation from the stack.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700106 void RemoveFrame(size_t frame_size, const std::vector<ManagedRegister>& callee_save_regs)
107 OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000108
Ian Rogersdd7624d2014-03-14 17:43:00 -0700109 void IncreaseFrameSize(size_t adjust) OVERRIDE;
110 void DecreaseFrameSize(size_t adjust) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000111
112 // Store routines.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700113 void Store(FrameOffset offs, ManagedRegister src, size_t size) OVERRIDE;
114 void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE;
115 void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE;
116 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100117 void StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm, ManagedRegister scratch)
Ian Rogersdd7624d2014-03-14 17:43:00 -0700118 OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100119 void StoreStackOffsetToThread64(ThreadOffset<8> thr_offs, FrameOffset fr_offs,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700120 ManagedRegister scratch) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100121 void StoreStackPointerToThread64(ThreadOffset<8> thr_offs) OVERRIDE;
Ian Rogersdd7624d2014-03-14 17:43:00 -0700122 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off,
123 ManagedRegister scratch) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000124
125 // Load routines.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700126 void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100127 void LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size) OVERRIDE;
Ian Rogersdd7624d2014-03-14 17:43:00 -0700128 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
129 void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs) OVERRIDE;
130 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100131 void LoadRawPtrFromThread64(ManagedRegister dest, ThreadOffset<8> offs) OVERRIDE;
Ian Rogersdd7624d2014-03-14 17:43:00 -0700132
Serban Constantinescued8dd492014-02-11 14:15:10 +0000133 // Copying routines.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700134 void Move(ManagedRegister dest, ManagedRegister src, size_t size) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100135 void CopyRawPtrFromThread64(FrameOffset fr_offs, ThreadOffset<8> thr_offs,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700136 ManagedRegister scratch) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100137 void CopyRawPtrToThread64(ThreadOffset<8> thr_offs, FrameOffset fr_offs, ManagedRegister scratch)
Ian Rogersdd7624d2014-03-14 17:43:00 -0700138 OVERRIDE;
139 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) OVERRIDE;
140 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) OVERRIDE;
141 void Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset, ManagedRegister scratch,
142 size_t size) OVERRIDE;
143 void Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src, ManagedRegister scratch,
144 size_t size) OVERRIDE;
145 void Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset, ManagedRegister scratch,
146 size_t size) OVERRIDE;
147 void Copy(ManagedRegister dest, Offset dest_offset, ManagedRegister src, Offset src_offset,
148 ManagedRegister scratch, size_t size) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000149 void Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700150 ManagedRegister scratch, size_t size) OVERRIDE;
151 void MemoryBarrier(ManagedRegister scratch) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000152
153 // Sign extension.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700154 void SignExtend(ManagedRegister mreg, size_t size) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000155
156 // Zero extension.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700157 void ZeroExtend(ManagedRegister mreg, size_t size) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000158
159 // Exploit fast access in managed code to Thread::Current().
Ian Rogersdd7624d2014-03-14 17:43:00 -0700160 void GetCurrentThread(ManagedRegister tr) OVERRIDE;
161 void GetCurrentThread(FrameOffset dest_offset, ManagedRegister scratch) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000162
163 // Set up out_reg to hold a Object** into the SIRT, or to be NULL if the
164 // value is null and null_allowed. in_reg holds a possibly stale reference
165 // that can be used to avoid loading the SIRT entry to see if the value is
166 // NULL.
167 void CreateSirtEntry(ManagedRegister out_reg, FrameOffset sirt_offset,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700168 ManagedRegister in_reg, bool null_allowed) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000169
170 // Set up out_off to hold a Object** into the SIRT, or to be NULL if the
171 // value is null and null_allowed.
172 void CreateSirtEntry(FrameOffset out_off, FrameOffset sirt_offset,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700173 ManagedRegister scratch, bool null_allowed) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000174
175 // src holds a SIRT entry (Object**) load this into dst.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700176 void LoadReferenceFromSirt(ManagedRegister dst, ManagedRegister src) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000177
178 // Heap::VerifyObject on src. In some cases (such as a reference to this) we
179 // know that src may not be null.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700180 void VerifyObject(ManagedRegister src, bool could_be_null) OVERRIDE;
181 void VerifyObject(FrameOffset src, bool could_be_null) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000182
183 // Call to address held at [base+offset].
Ian Rogersdd7624d2014-03-14 17:43:00 -0700184 void Call(ManagedRegister base, Offset offset, ManagedRegister scratch) OVERRIDE;
185 void Call(FrameOffset base, Offset offset, ManagedRegister scratch) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100186 void CallFromThread64(ThreadOffset<8> offset, ManagedRegister scratch) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000187
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700188 // Jump to address (not setting link register)
189 void JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch);
190
Serban Constantinescued8dd492014-02-11 14:15:10 +0000191 // Generate code to check if Thread::Current()->exception_ is non-null
192 // and branch to a ExceptionSlowPath if it is.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700193 void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000194
195 private:
196 static vixl::Register reg_x(int code) {
197 CHECK(code < kNumberOfCoreRegisters) << code;
198 if (code == SP) {
199 return vixl::sp;
Serban Constantinescu15523732014-04-02 13:18:05 +0100200 } else if (code == XZR) {
201 return vixl::xzr;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000202 }
203 return vixl::Register::XRegFromCode(code);
204 }
205
206 static vixl::Register reg_w(int code) {
207 return vixl::Register::WRegFromCode(code);
208 }
209
210 static vixl::FPRegister reg_d(int code) {
211 return vixl::FPRegister::DRegFromCode(code);
212 }
213
214 static vixl::FPRegister reg_s(int code) {
215 return vixl::FPRegister::SRegFromCode(code);
216 }
217
218 // Emits Exception block.
219 void EmitExceptionPoll(Arm64Exception *exception);
220
221 void StoreWToOffset(StoreOperandType type, WRegister source,
222 Register base, int32_t offset);
223 void StoreToOffset(Register source, Register base, int32_t offset);
224 void StoreSToOffset(SRegister source, Register base, int32_t offset);
225 void StoreDToOffset(DRegister source, Register base, int32_t offset);
226
227 void LoadImmediate(Register dest, int32_t value, Condition cond = AL);
228 void Load(Arm64ManagedRegister dst, Register src, int32_t src_offset, size_t size);
229 void LoadWFromOffset(LoadOperandType type, WRegister dest,
230 Register base, int32_t offset);
231 void LoadFromOffset(Register dest, Register base, int32_t offset);
232 void LoadSFromOffset(SRegister dest, Register base, int32_t offset);
233 void LoadDFromOffset(DRegister dest, Register base, int32_t offset);
234 void AddConstant(Register rd, int32_t value, Condition cond = AL);
235 void AddConstant(Register rd, Register rn, int32_t value, Condition cond = AL);
236
Serban Constantinescued8dd492014-02-11 14:15:10 +0000237 // Vixl buffer.
238 byte* vixl_buf_;
239
240 // Unique ptr - vixl assembler.
241 UniquePtr<vixl::MacroAssembler> vixl_masm_;
242
243 // List of exception blocks to generate at the end of the code cache.
244 std::vector<Arm64Exception*> exception_blocks_;
Serban Constantinescu15523732014-04-02 13:18:05 +0100245
246 // Used for testing.
247 friend class Arm64ManagedRegister_VixlRegisters_Test;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000248};
249
250class Arm64Exception {
251 private:
252 explicit Arm64Exception(Arm64ManagedRegister scratch, size_t stack_adjust)
253 : scratch_(scratch), stack_adjust_(stack_adjust) {
254 }
255
256 vixl::Label* Entry() { return &exception_entry_; }
257
258 // Register used for passing Thread::Current()->exception_ .
259 const Arm64ManagedRegister scratch_;
260
261 // Stack adjust for ExceptionPool.
262 const size_t stack_adjust_;
263
264 vixl::Label exception_entry_;
265
266 friend class Arm64Assembler;
267 DISALLOW_COPY_AND_ASSIGN(Arm64Exception);
268};
269
270} // namespace arm64
271} // namespace art
272
273#endif // ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_