Serban Constantinescu | ed8dd49 | 2014-02-11 14:15:10 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "assembler_arm64.h" |
| 18 | #include "base/logging.h" |
| 19 | #include "entrypoints/quick/quick_entrypoints.h" |
| 20 | #include "offsets.h" |
| 21 | #include "thread.h" |
Serban Constantinescu | ed8dd49 | 2014-02-11 14:15:10 +0000 | [diff] [blame] | 22 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 23 | using namespace vixl::aarch64; // NOLINT(build/namespaces) |
Alexandre Rames | ba9388c | 2014-08-22 14:08:36 +0100 | [diff] [blame] | 24 | |
Serban Constantinescu | ed8dd49 | 2014-02-11 14:15:10 +0000 | [diff] [blame] | 25 | namespace art { |
| 26 | namespace arm64 { |
| 27 | |
| 28 | #ifdef ___ |
| 29 | #error "ARM64 Assembler macro already defined." |
| 30 | #else |
Alexandre Rames | 087930f | 2016-08-02 13:45:28 +0100 | [diff] [blame] | 31 | #define ___ vixl_masm_. |
Serban Constantinescu | ed8dd49 | 2014-02-11 14:15:10 +0000 | [diff] [blame] | 32 | #endif |
| 33 | |
Vladimir Marko | cf93a5c | 2015-06-16 11:33:24 +0000 | [diff] [blame] | 34 | void Arm64Assembler::FinalizeCode() { |
Serban Constantinescu | ed8dd49 | 2014-02-11 14:15:10 +0000 | [diff] [blame] | 35 | ___ FinalizeCode(); |
| 36 | } |
| 37 | |
| 38 | size_t Arm64Assembler::CodeSize() const { |
Artem Serov | af4e42a | 2016-08-08 15:11:24 +0100 | [diff] [blame] | 39 | return vixl_masm_.GetSizeOfCodeGenerated(); |
Serban Constantinescu | ed8dd49 | 2014-02-11 14:15:10 +0000 | [diff] [blame] | 40 | } |
| 41 | |
Alexandre Rames | eb7b739 | 2015-06-19 14:47:01 +0100 | [diff] [blame] | 42 | const uint8_t* Arm64Assembler::CodeBufferBaseAddress() const { |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 43 | return vixl_masm_.GetBuffer().GetStartAddress<const uint8_t*>(); |
Alexandre Rames | eb7b739 | 2015-06-19 14:47:01 +0100 | [diff] [blame] | 44 | } |
| 45 | |
Serban Constantinescu | ed8dd49 | 2014-02-11 14:15:10 +0000 | [diff] [blame] | 46 | void Arm64Assembler::FinalizeInstructions(const MemoryRegion& region) { |
| 47 | // Copy the instructions from the buffer. |
Scott Wakeling | b77051e | 2016-11-21 19:46:00 +0000 | [diff] [blame] | 48 | MemoryRegion from(vixl_masm_.GetBuffer()->GetStartAddress<void*>(), CodeSize()); |
Serban Constantinescu | ed8dd49 | 2014-02-11 14:15:10 +0000 | [diff] [blame] | 49 | region.CopyFrom(0, from); |
| 50 | } |
| 51 | |
Serban Constantinescu | ed8dd49 | 2014-02-11 14:15:10 +0000 | [diff] [blame] | 52 | void Arm64Assembler::LoadRawPtr(ManagedRegister m_dst, ManagedRegister m_base, Offset offs) { |
| 53 | Arm64ManagedRegister dst = m_dst.AsArm64(); |
| 54 | Arm64ManagedRegister base = m_base.AsArm64(); |
Alexandre Rames | 37c92df | 2014-10-17 14:35:27 +0100 | [diff] [blame] | 55 | CHECK(dst.IsXRegister() && base.IsXRegister()); |
Serban Constantinescu | 0f89dac | 2014-05-08 13:52:53 +0100 | [diff] [blame] | 56 | // Remove dst and base form the temp list - higher level API uses IP1, IP0. |
Alexandre Rames | 087930f | 2016-08-02 13:45:28 +0100 | [diff] [blame] | 57 | UseScratchRegisterScope temps(&vixl_masm_); |
Alexandre Rames | 37c92df | 2014-10-17 14:35:27 +0100 | [diff] [blame] | 58 | temps.Exclude(reg_x(dst.AsXRegister()), reg_x(base.AsXRegister())); |
| 59 | ___ Ldr(reg_x(dst.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value())); |
Serban Constantinescu | ed8dd49 | 2014-02-11 14:15:10 +0000 | [diff] [blame] | 60 | } |
| 61 | |
Andreas Gampe | c6ee54e | 2014-03-24 16:45:44 -0700 | [diff] [blame] | 62 | void Arm64Assembler::JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) { |
| 63 | Arm64ManagedRegister base = m_base.AsArm64(); |
| 64 | Arm64ManagedRegister scratch = m_scratch.AsArm64(); |
Alexandre Rames | 37c92df | 2014-10-17 14:35:27 +0100 | [diff] [blame] | 65 | CHECK(base.IsXRegister()) << base; |
| 66 | CHECK(scratch.IsXRegister()) << scratch; |
Serban Constantinescu | 0f89dac | 2014-05-08 13:52:53 +0100 | [diff] [blame] | 67 | // Remove base and scratch form the temp list - higher level API uses IP1, IP0. |
Alexandre Rames | 087930f | 2016-08-02 13:45:28 +0100 | [diff] [blame] | 68 | UseScratchRegisterScope temps(&vixl_masm_); |
Alexandre Rames | 37c92df | 2014-10-17 14:35:27 +0100 | [diff] [blame] | 69 | temps.Exclude(reg_x(base.AsXRegister()), reg_x(scratch.AsXRegister())); |
| 70 | ___ Ldr(reg_x(scratch.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value())); |
| 71 | ___ Br(reg_x(scratch.AsXRegister())); |
Andreas Gampe | c6ee54e | 2014-03-24 16:45:44 -0700 | [diff] [blame] | 72 | } |
| 73 | |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 74 | static inline dwarf::Reg DWARFReg(CPURegister reg) { |
| 75 | if (reg.IsFPRegister()) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 76 | return dwarf::Reg::Arm64Fp(reg.GetCode()); |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 77 | } else { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 78 | DCHECK_LT(reg.GetCode(), 31u); // X0 - X30. |
| 79 | return dwarf::Reg::Arm64Core(reg.GetCode()); |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 80 | } |
David Srbecky | dd97393 | 2015-04-07 20:29:48 +0100 | [diff] [blame] | 81 | } |
| 82 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 83 | void Arm64Assembler::SpillRegisters(CPURegList registers, int offset) { |
| 84 | int size = registers.GetRegisterSizeInBytes(); |
Alexandre Rames | 087930f | 2016-08-02 13:45:28 +0100 | [diff] [blame] | 85 | const Register sp = vixl_masm_.StackPointer(); |
Anton Kirilov | bde6ae1 | 2016-06-10 17:46:12 +0100 | [diff] [blame] | 86 | // Since we are operating on register pairs, we would like to align on |
| 87 | // double the standard size; on the other hand, we don't want to insert |
| 88 | // an extra store, which will happen if the number of registers is even. |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 89 | if (!IsAlignedParam(offset, 2 * size) && registers.GetCount() % 2 != 0) { |
Anton Kirilov | bde6ae1 | 2016-06-10 17:46:12 +0100 | [diff] [blame] | 90 | const CPURegister& dst0 = registers.PopLowestIndex(); |
| 91 | ___ Str(dst0, MemOperand(sp, offset)); |
| 92 | cfi_.RelOffset(DWARFReg(dst0), offset); |
| 93 | offset += size; |
| 94 | } |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 95 | while (registers.GetCount() >= 2) { |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 96 | const CPURegister& dst0 = registers.PopLowestIndex(); |
| 97 | const CPURegister& dst1 = registers.PopLowestIndex(); |
| 98 | ___ Stp(dst0, dst1, MemOperand(sp, offset)); |
| 99 | cfi_.RelOffset(DWARFReg(dst0), offset); |
| 100 | cfi_.RelOffset(DWARFReg(dst1), offset + size); |
| 101 | offset += 2 * size; |
| 102 | } |
| 103 | if (!registers.IsEmpty()) { |
| 104 | const CPURegister& dst0 = registers.PopLowestIndex(); |
| 105 | ___ Str(dst0, MemOperand(sp, offset)); |
| 106 | cfi_.RelOffset(DWARFReg(dst0), offset); |
| 107 | } |
| 108 | DCHECK(registers.IsEmpty()); |
David Srbecky | dd97393 | 2015-04-07 20:29:48 +0100 | [diff] [blame] | 109 | } |
| 110 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 111 | void Arm64Assembler::UnspillRegisters(CPURegList registers, int offset) { |
| 112 | int size = registers.GetRegisterSizeInBytes(); |
Alexandre Rames | 087930f | 2016-08-02 13:45:28 +0100 | [diff] [blame] | 113 | const Register sp = vixl_masm_.StackPointer(); |
Anton Kirilov | bde6ae1 | 2016-06-10 17:46:12 +0100 | [diff] [blame] | 114 | // Be consistent with the logic for spilling registers. |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 115 | if (!IsAlignedParam(offset, 2 * size) && registers.GetCount() % 2 != 0) { |
Anton Kirilov | bde6ae1 | 2016-06-10 17:46:12 +0100 | [diff] [blame] | 116 | const CPURegister& dst0 = registers.PopLowestIndex(); |
| 117 | ___ Ldr(dst0, MemOperand(sp, offset)); |
| 118 | cfi_.Restore(DWARFReg(dst0)); |
| 119 | offset += size; |
| 120 | } |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 121 | while (registers.GetCount() >= 2) { |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 122 | const CPURegister& dst0 = registers.PopLowestIndex(); |
| 123 | const CPURegister& dst1 = registers.PopLowestIndex(); |
| 124 | ___ Ldp(dst0, dst1, MemOperand(sp, offset)); |
| 125 | cfi_.Restore(DWARFReg(dst0)); |
| 126 | cfi_.Restore(DWARFReg(dst1)); |
| 127 | offset += 2 * size; |
| 128 | } |
| 129 | if (!registers.IsEmpty()) { |
| 130 | const CPURegister& dst0 = registers.PopLowestIndex(); |
| 131 | ___ Ldr(dst0, MemOperand(sp, offset)); |
| 132 | cfi_.Restore(DWARFReg(dst0)); |
| 133 | } |
| 134 | DCHECK(registers.IsEmpty()); |
| 135 | } |
Ian Rogers | 790a6b7 | 2014-04-01 10:36:00 -0700 | [diff] [blame] | 136 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 137 | void Arm64Assembler::PoisonHeapReference(Register reg) { |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 138 | DCHECK(reg.IsW()); |
| 139 | // reg = -reg. |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 140 | ___ Neg(reg, Operand(reg)); |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 141 | } |
| 142 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 143 | void Arm64Assembler::UnpoisonHeapReference(Register reg) { |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 144 | DCHECK(reg.IsW()); |
| 145 | // reg = -reg. |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 146 | ___ Neg(reg, Operand(reg)); |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 147 | } |
| 148 | |
Roland Levillain | 0b671c0 | 2016-08-19 12:02:34 +0100 | [diff] [blame] | 149 | void Arm64Assembler::MaybePoisonHeapReference(Register reg) { |
| 150 | if (kPoisonHeapReferences) { |
| 151 | PoisonHeapReference(reg); |
| 152 | } |
| 153 | } |
| 154 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 155 | void Arm64Assembler::MaybeUnpoisonHeapReference(Register reg) { |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 156 | if (kPoisonHeapReferences) { |
| 157 | UnpoisonHeapReference(reg); |
| 158 | } |
| 159 | } |
| 160 | |
| 161 | #undef ___ |
| 162 | |
Serban Constantinescu | ed8dd49 | 2014-02-11 14:15:10 +0000 | [diff] [blame] | 163 | } // namespace arm64 |
| 164 | } // namespace art |