blob: 03ae996952fdbc8a902eee8853ca78454dc4e559 [file] [log] [blame]
Serban Constantinescued8dd492014-02-11 14:15:10 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
Roland Levillain4d027112015-07-01 15:41:14 +010013 * See the License for the specific language governing permissions and
Serban Constantinescued8dd492014-02-11 14:15:10 +000014 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
18#define ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_
19
Stuart Monteithb95a5342014-03-12 13:32:32 +000020#include <stdint.h>
Ian Rogers700a4022014-05-19 16:49:03 -070021#include <memory>
22#include <vector>
Serban Constantinescued8dd492014-02-11 14:15:10 +000023
Vladimir Marko93205e32016-04-13 11:59:46 +010024#include "base/arena_containers.h"
Serban Constantinescued8dd492014-02-11 14:15:10 +000025#include "base/logging.h"
26#include "constants_arm64.h"
27#include "utils/arm64/managed_register_arm64.h"
28#include "utils/assembler.h"
29#include "offsets.h"
Andreas Gampe277ccbd2014-11-03 21:36:10 -080030
31// TODO: make vixl clean wrt -Wshadow.
32#pragma GCC diagnostic push
Andreas Gampe65b798e2015-04-06 09:35:22 -070033#pragma GCC diagnostic ignored "-Wunknown-pragmas"
Andreas Gampe277ccbd2014-11-03 21:36:10 -080034#pragma GCC diagnostic ignored "-Wshadow"
Andreas Gampe65b798e2015-04-06 09:35:22 -070035#pragma GCC diagnostic ignored "-Wmissing-noreturn"
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000036#include "vixl/a64/macro-assembler-a64.h"
37#include "vixl/a64/disasm-a64.h"
Andreas Gampe277ccbd2014-11-03 21:36:10 -080038#pragma GCC diagnostic pop
Serban Constantinescued8dd492014-02-11 14:15:10 +000039
40namespace art {
41namespace arm64 {
42
Andreas Gampec8ccf682014-09-29 20:07:43 -070043#define MEM_OP(...) vixl::MemOperand(__VA_ARGS__)
Serban Constantinescued8dd492014-02-11 14:15:10 +000044
45enum LoadOperandType {
46 kLoadSignedByte,
47 kLoadUnsignedByte,
48 kLoadSignedHalfword,
49 kLoadUnsignedHalfword,
50 kLoadWord,
51 kLoadCoreWord,
52 kLoadSWord,
53 kLoadDWord
54};
55
56enum StoreOperandType {
57 kStoreByte,
58 kStoreHalfword,
59 kStoreWord,
60 kStoreCoreWord,
61 kStoreSWord,
62 kStoreDWord
63};
64
65class Arm64Exception;
66
Ian Rogersdd7624d2014-03-14 17:43:00 -070067class Arm64Assembler FINAL : public Assembler {
Serban Constantinescued8dd492014-02-11 14:15:10 +000068 public:
Alexandre Ramescee75242014-10-08 18:41:21 +010069 // We indicate the size of the initial code generation buffer to the VIXL
70 // assembler. From there we it will automatically manage the buffer.
Vladimir Marko93205e32016-04-13 11:59:46 +010071 explicit Arm64Assembler(ArenaAllocator* arena)
72 : Assembler(arena),
73 exception_blocks_(arena->Adapter(kArenaAllocAssembler)),
74 vixl_masm_(new vixl::MacroAssembler(kArm64BaseBufferSize)) {}
Serban Constantinescued8dd492014-02-11 14:15:10 +000075
76 virtual ~Arm64Assembler() {
Serban Constantinescu0f89dac2014-05-08 13:52:53 +010077 delete vixl_masm_;
Serban Constantinescued8dd492014-02-11 14:15:10 +000078 }
79
Vladimir Markocf93a5c2015-06-16 11:33:24 +000080 // Finalize the code.
81 void FinalizeCode() OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +000082
83 // Size of generated code.
Alexandre Rameseb7b7392015-06-19 14:47:01 +010084 size_t CodeSize() const OVERRIDE;
85 const uint8_t* CodeBufferBaseAddress() const OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +000086
87 // Copy instructions out of assembly buffer into the given region of memory.
88 void FinalizeInstructions(const MemoryRegion& region);
89
Zheng Xu69a50302015-04-14 20:04:41 +080090 void SpillRegisters(vixl::CPURegList registers, int offset);
91 void UnspillRegisters(vixl::CPURegList registers, int offset);
92
Serban Constantinescued8dd492014-02-11 14:15:10 +000093 // Emit code that will create an activation on the stack.
94 void BuildFrame(size_t frame_size, ManagedRegister method_reg,
95 const std::vector<ManagedRegister>& callee_save_regs,
Ian Rogersdd7624d2014-03-14 17:43:00 -070096 const ManagedRegisterEntrySpills& entry_spills) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +000097
98 // Emit code that will remove an activation from the stack.
Ian Rogersdd7624d2014-03-14 17:43:00 -070099 void RemoveFrame(size_t frame_size, const std::vector<ManagedRegister>& callee_save_regs)
100 OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000101
Ian Rogersdd7624d2014-03-14 17:43:00 -0700102 void IncreaseFrameSize(size_t adjust) OVERRIDE;
103 void DecreaseFrameSize(size_t adjust) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000104
105 // Store routines.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700106 void Store(FrameOffset offs, ManagedRegister src, size_t size) OVERRIDE;
107 void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE;
108 void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE;
109 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100110 void StoreImmediateToThread64(ThreadOffset<8> dest, uint32_t imm, ManagedRegister scratch)
Ian Rogersdd7624d2014-03-14 17:43:00 -0700111 OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100112 void StoreStackOffsetToThread64(ThreadOffset<8> thr_offs, FrameOffset fr_offs,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700113 ManagedRegister scratch) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100114 void StoreStackPointerToThread64(ThreadOffset<8> thr_offs) OVERRIDE;
Ian Rogersdd7624d2014-03-14 17:43:00 -0700115 void StoreSpanning(FrameOffset dest, ManagedRegister src, FrameOffset in_off,
116 ManagedRegister scratch) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000117
118 // Load routines.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700119 void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100120 void LoadFromThread64(ManagedRegister dest, ThreadOffset<8> src, size_t size) OVERRIDE;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700121 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE;
122 void LoadRef(ManagedRegister dest, ManagedRegister base, MemberOffset offs,
Roland Levillain4d027112015-07-01 15:41:14 +0100123 bool unpoison_reference) OVERRIDE;
Ian Rogersdd7624d2014-03-14 17:43:00 -0700124 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100125 void LoadRawPtrFromThread64(ManagedRegister dest, ThreadOffset<8> offs) OVERRIDE;
Ian Rogersdd7624d2014-03-14 17:43:00 -0700126
Serban Constantinescued8dd492014-02-11 14:15:10 +0000127 // Copying routines.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700128 void Move(ManagedRegister dest, ManagedRegister src, size_t size) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100129 void CopyRawPtrFromThread64(FrameOffset fr_offs, ThreadOffset<8> thr_offs,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700130 ManagedRegister scratch) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100131 void CopyRawPtrToThread64(ThreadOffset<8> thr_offs, FrameOffset fr_offs, ManagedRegister scratch)
Ian Rogersdd7624d2014-03-14 17:43:00 -0700132 OVERRIDE;
133 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) OVERRIDE;
134 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) OVERRIDE;
135 void Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset, ManagedRegister scratch,
136 size_t size) OVERRIDE;
137 void Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src, ManagedRegister scratch,
138 size_t size) OVERRIDE;
139 void Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset, ManagedRegister scratch,
140 size_t size) OVERRIDE;
141 void Copy(ManagedRegister dest, Offset dest_offset, ManagedRegister src, Offset src_offset,
142 ManagedRegister scratch, size_t size) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000143 void Copy(FrameOffset dest, Offset dest_offset, FrameOffset src, Offset src_offset,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700144 ManagedRegister scratch, size_t size) OVERRIDE;
145 void MemoryBarrier(ManagedRegister scratch) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000146
147 // Sign extension.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700148 void SignExtend(ManagedRegister mreg, size_t size) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000149
150 // Zero extension.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700151 void ZeroExtend(ManagedRegister mreg, size_t size) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000152
153 // Exploit fast access in managed code to Thread::Current().
Ian Rogersdd7624d2014-03-14 17:43:00 -0700154 void GetCurrentThread(ManagedRegister tr) OVERRIDE;
155 void GetCurrentThread(FrameOffset dest_offset, ManagedRegister scratch) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000156
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700157 // Set up out_reg to hold a Object** into the handle scope, or to be null if the
Serban Constantinescued8dd492014-02-11 14:15:10 +0000158 // value is null and null_allowed. in_reg holds a possibly stale reference
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700159 // that can be used to avoid loading the handle scope entry to see if the value is
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700160 // null.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700161 void CreateHandleScopeEntry(ManagedRegister out_reg, FrameOffset handlescope_offset,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700162 ManagedRegister in_reg, bool null_allowed) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000163
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700164 // Set up out_off to hold a Object** into the handle scope, or to be null if the
Serban Constantinescued8dd492014-02-11 14:15:10 +0000165 // value is null and null_allowed.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700166 void CreateHandleScopeEntry(FrameOffset out_off, FrameOffset handlescope_offset,
Ian Rogersdd7624d2014-03-14 17:43:00 -0700167 ManagedRegister scratch, bool null_allowed) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000168
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700169 // src holds a handle scope entry (Object**) load this into dst.
170 void LoadReferenceFromHandleScope(ManagedRegister dst, ManagedRegister src) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000171
172 // Heap::VerifyObject on src. In some cases (such as a reference to this) we
173 // know that src may not be null.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700174 void VerifyObject(ManagedRegister src, bool could_be_null) OVERRIDE;
175 void VerifyObject(FrameOffset src, bool could_be_null) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000176
177 // Call to address held at [base+offset].
Ian Rogersdd7624d2014-03-14 17:43:00 -0700178 void Call(ManagedRegister base, Offset offset, ManagedRegister scratch) OVERRIDE;
179 void Call(FrameOffset base, Offset offset, ManagedRegister scratch) OVERRIDE;
Serban Constantinescu75b91132014-04-09 18:39:10 +0100180 void CallFromThread64(ThreadOffset<8> offset, ManagedRegister scratch) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000181
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700182 // Jump to address (not setting link register)
183 void JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch);
184
Serban Constantinescued8dd492014-02-11 14:15:10 +0000185 // Generate code to check if Thread::Current()->exception_ is non-null
186 // and branch to a ExceptionSlowPath if it is.
Ian Rogersdd7624d2014-03-14 17:43:00 -0700187 void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust) OVERRIDE;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000188
Roland Levillain4d027112015-07-01 15:41:14 +0100189 //
190 // Heap poisoning.
191 //
192
193 // Poison a heap reference contained in `reg`.
194 void PoisonHeapReference(vixl::Register reg);
195 // Unpoison a heap reference contained in `reg`.
196 void UnpoisonHeapReference(vixl::Register reg);
197 // Unpoison a heap reference contained in `reg` if heap poisoning is enabled.
198 void MaybeUnpoisonHeapReference(vixl::Register reg);
199
Andreas Gampe85b62f22015-09-09 13:15:38 -0700200 void Bind(Label* label ATTRIBUTE_UNUSED) OVERRIDE {
201 UNIMPLEMENTED(FATAL) << "Do not use Bind for ARM64";
202 }
203 void Jump(Label* label ATTRIBUTE_UNUSED) OVERRIDE {
204 UNIMPLEMENTED(FATAL) << "Do not use Jump for ARM64";
205 }
206
Serban Constantinescued8dd492014-02-11 14:15:10 +0000207 private:
208 static vixl::Register reg_x(int code) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100209 CHECK(code < kNumberOfXRegisters) << code;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000210 if (code == SP) {
211 return vixl::sp;
Serban Constantinescu15523732014-04-02 13:18:05 +0100212 } else if (code == XZR) {
213 return vixl::xzr;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000214 }
215 return vixl::Register::XRegFromCode(code);
216 }
217
218 static vixl::Register reg_w(int code) {
Alexandre Rames37c92df2014-10-17 14:35:27 +0100219 CHECK(code < kNumberOfWRegisters) << code;
Alexandre Ramesa304f972014-10-17 14:35:27 +0100220 if (code == WSP) {
221 return vixl::wsp;
222 } else if (code == WZR) {
223 return vixl::wzr;
224 }
Serban Constantinescued8dd492014-02-11 14:15:10 +0000225 return vixl::Register::WRegFromCode(code);
226 }
227
228 static vixl::FPRegister reg_d(int code) {
229 return vixl::FPRegister::DRegFromCode(code);
230 }
231
232 static vixl::FPRegister reg_s(int code) {
233 return vixl::FPRegister::SRegFromCode(code);
234 }
235
236 // Emits Exception block.
237 void EmitExceptionPoll(Arm64Exception *exception);
238
239 void StoreWToOffset(StoreOperandType type, WRegister source,
Alexandre Rames37c92df2014-10-17 14:35:27 +0100240 XRegister base, int32_t offset);
241 void StoreToOffset(XRegister source, XRegister base, int32_t offset);
242 void StoreSToOffset(SRegister source, XRegister base, int32_t offset);
243 void StoreDToOffset(DRegister source, XRegister base, int32_t offset);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000244
Alexandre Rames37c92df2014-10-17 14:35:27 +0100245 void LoadImmediate(XRegister dest, int32_t value, vixl::Condition cond = vixl::al);
246 void Load(Arm64ManagedRegister dst, XRegister src, int32_t src_offset, size_t size);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000247 void LoadWFromOffset(LoadOperandType type, WRegister dest,
Alexandre Rames37c92df2014-10-17 14:35:27 +0100248 XRegister base, int32_t offset);
249 void LoadFromOffset(XRegister dest, XRegister base, int32_t offset);
250 void LoadSFromOffset(SRegister dest, XRegister base, int32_t offset);
251 void LoadDFromOffset(DRegister dest, XRegister base, int32_t offset);
252 void AddConstant(XRegister rd, int32_t value, vixl::Condition cond = vixl::al);
253 void AddConstant(XRegister rd, XRegister rn, int32_t value, vixl::Condition cond = vixl::al);
Serban Constantinescued8dd492014-02-11 14:15:10 +0000254
Serban Constantinescued8dd492014-02-11 14:15:10 +0000255 // List of exception blocks to generate at the end of the code cache.
Vladimir Marko93205e32016-04-13 11:59:46 +0100256 ArenaVector<Arm64Exception*> exception_blocks_;
Serban Constantinescu15523732014-04-02 13:18:05 +0100257
Alexandre Rames5319def2014-10-23 10:03:10 +0100258 public:
259 // Vixl assembler.
260 vixl::MacroAssembler* const vixl_masm_;
261
Serban Constantinescu15523732014-04-02 13:18:05 +0100262 // Used for testing.
263 friend class Arm64ManagedRegister_VixlRegisters_Test;
Serban Constantinescued8dd492014-02-11 14:15:10 +0000264};
265
266class Arm64Exception {
267 private:
Roland Levillain3887c462015-08-12 18:15:42 +0100268 Arm64Exception(Arm64ManagedRegister scratch, size_t stack_adjust)
Serban Constantinescued8dd492014-02-11 14:15:10 +0000269 : scratch_(scratch), stack_adjust_(stack_adjust) {
270 }
271
272 vixl::Label* Entry() { return &exception_entry_; }
273
274 // Register used for passing Thread::Current()->exception_ .
275 const Arm64ManagedRegister scratch_;
276
277 // Stack adjust for ExceptionPool.
278 const size_t stack_adjust_;
279
280 vixl::Label exception_entry_;
281
282 friend class Arm64Assembler;
283 DISALLOW_COPY_AND_ASSIGN(Arm64Exception);
284};
285
286} // namespace arm64
287} // namespace art
288
289#endif // ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_