Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_COMPILER_OPTIMIZING_COMMON_ARM64_H_ |
| 18 | #define ART_COMPILER_OPTIMIZING_COMMON_ARM64_H_ |
| 19 | |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 20 | #include "code_generator.h" |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 21 | #include "locations.h" |
| 22 | #include "nodes.h" |
| 23 | #include "utils/arm64/assembler_arm64.h" |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 24 | |
Artem Serov | af4e42a | 2016-08-08 15:11:24 +0100 | [diff] [blame^] | 25 | // TODO(VIXL): Make VIXL compile with -Wshadow. |
| 26 | #pragma GCC diagnostic push |
| 27 | #pragma GCC diagnostic ignored "-Wshadow" |
| 28 | #include "aarch64/disasm-aarch64.h" |
| 29 | #include "aarch64/macro-assembler-aarch64.h" |
| 30 | #include "aarch64/simulator-aarch64.h" |
| 31 | #pragma GCC diagnostic pop |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 32 | |
| 33 | namespace art { |
| 34 | namespace arm64 { |
| 35 | namespace helpers { |
| 36 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 37 | // Convenience helpers to ease conversion to and from VIXL operands. |
| 38 | static_assert((SP == 31) && (WSP == 31) && (XZR == 32) && (WZR == 32), |
| 39 | "Unexpected values for register codes."); |
| 40 | |
| 41 | static inline int VIXLRegCodeFromART(int code) { |
| 42 | if (code == SP) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 43 | return vixl::aarch64::kSPRegInternalCode; |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 44 | } |
| 45 | if (code == XZR) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 46 | return vixl::aarch64::kZeroRegCode; |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 47 | } |
| 48 | return code; |
| 49 | } |
| 50 | |
| 51 | static inline int ARTRegCodeFromVIXL(int code) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 52 | if (code == vixl::aarch64::kSPRegInternalCode) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 53 | return SP; |
| 54 | } |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 55 | if (code == vixl::aarch64::kZeroRegCode) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 56 | return XZR; |
| 57 | } |
| 58 | return code; |
| 59 | } |
| 60 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 61 | static inline vixl::aarch64::Register XRegisterFrom(Location location) { |
Roland Levillain | 3a448e4 | 2016-04-01 18:37:46 +0100 | [diff] [blame] | 62 | DCHECK(location.IsRegister()) << location; |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 63 | return vixl::aarch64::Register::GetXRegFromCode(VIXLRegCodeFromART(location.reg())); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 64 | } |
| 65 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 66 | static inline vixl::aarch64::Register WRegisterFrom(Location location) { |
Roland Levillain | 3a448e4 | 2016-04-01 18:37:46 +0100 | [diff] [blame] | 67 | DCHECK(location.IsRegister()) << location; |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 68 | return vixl::aarch64::Register::GetWRegFromCode(VIXLRegCodeFromART(location.reg())); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 69 | } |
| 70 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 71 | static inline vixl::aarch64::Register RegisterFrom(Location location, Primitive::Type type) { |
Roland Levillain | 3a448e4 | 2016-04-01 18:37:46 +0100 | [diff] [blame] | 72 | DCHECK(type != Primitive::kPrimVoid && !Primitive::IsFloatingPointType(type)) << type; |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 73 | return type == Primitive::kPrimLong ? XRegisterFrom(location) : WRegisterFrom(location); |
| 74 | } |
| 75 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 76 | static inline vixl::aarch64::Register OutputRegister(HInstruction* instr) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 77 | return RegisterFrom(instr->GetLocations()->Out(), instr->GetType()); |
| 78 | } |
| 79 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 80 | static inline vixl::aarch64::Register InputRegisterAt(HInstruction* instr, int input_index) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 81 | return RegisterFrom(instr->GetLocations()->InAt(input_index), |
| 82 | instr->InputAt(input_index)->GetType()); |
| 83 | } |
| 84 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 85 | static inline vixl::aarch64::FPRegister DRegisterFrom(Location location) { |
Roland Levillain | 3a448e4 | 2016-04-01 18:37:46 +0100 | [diff] [blame] | 86 | DCHECK(location.IsFpuRegister()) << location; |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 87 | return vixl::aarch64::FPRegister::GetDRegFromCode(location.reg()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 88 | } |
| 89 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 90 | static inline vixl::aarch64::FPRegister SRegisterFrom(Location location) { |
Roland Levillain | 3a448e4 | 2016-04-01 18:37:46 +0100 | [diff] [blame] | 91 | DCHECK(location.IsFpuRegister()) << location; |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 92 | return vixl::aarch64::FPRegister::GetSRegFromCode(location.reg()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 93 | } |
| 94 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 95 | static inline vixl::aarch64::FPRegister FPRegisterFrom(Location location, Primitive::Type type) { |
Roland Levillain | 3a448e4 | 2016-04-01 18:37:46 +0100 | [diff] [blame] | 96 | DCHECK(Primitive::IsFloatingPointType(type)) << type; |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 97 | return type == Primitive::kPrimDouble ? DRegisterFrom(location) : SRegisterFrom(location); |
| 98 | } |
| 99 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 100 | static inline vixl::aarch64::FPRegister OutputFPRegister(HInstruction* instr) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 101 | return FPRegisterFrom(instr->GetLocations()->Out(), instr->GetType()); |
| 102 | } |
| 103 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 104 | static inline vixl::aarch64::FPRegister InputFPRegisterAt(HInstruction* instr, int input_index) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 105 | return FPRegisterFrom(instr->GetLocations()->InAt(input_index), |
| 106 | instr->InputAt(input_index)->GetType()); |
| 107 | } |
| 108 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 109 | static inline vixl::aarch64::CPURegister CPURegisterFrom(Location location, Primitive::Type type) { |
| 110 | return Primitive::IsFloatingPointType(type) |
| 111 | ? vixl::aarch64::CPURegister(FPRegisterFrom(location, type)) |
| 112 | : vixl::aarch64::CPURegister(RegisterFrom(location, type)); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 113 | } |
| 114 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 115 | static inline vixl::aarch64::CPURegister OutputCPURegister(HInstruction* instr) { |
Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 116 | return Primitive::IsFloatingPointType(instr->GetType()) |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 117 | ? static_cast<vixl::aarch64::CPURegister>(OutputFPRegister(instr)) |
| 118 | : static_cast<vixl::aarch64::CPURegister>(OutputRegister(instr)); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 119 | } |
| 120 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 121 | static inline vixl::aarch64::CPURegister InputCPURegisterAt(HInstruction* instr, int index) { |
Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame] | 122 | return Primitive::IsFloatingPointType(instr->InputAt(index)->GetType()) |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 123 | ? static_cast<vixl::aarch64::CPURegister>(InputFPRegisterAt(instr, index)) |
| 124 | : static_cast<vixl::aarch64::CPURegister>(InputRegisterAt(instr, index)); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 125 | } |
| 126 | |
| 127 | static inline int64_t Int64ConstantFrom(Location location) { |
| 128 | HConstant* instr = location.GetConstant(); |
Nicolas Geoffray | de0eb6f | 2015-03-04 10:28:04 +0000 | [diff] [blame] | 129 | if (instr->IsIntConstant()) { |
| 130 | return instr->AsIntConstant()->GetValue(); |
| 131 | } else if (instr->IsNullConstant()) { |
| 132 | return 0; |
| 133 | } else { |
Roland Levillain | 3a448e4 | 2016-04-01 18:37:46 +0100 | [diff] [blame] | 134 | DCHECK(instr->IsLongConstant()) << instr->DebugName(); |
Nicolas Geoffray | de0eb6f | 2015-03-04 10:28:04 +0000 | [diff] [blame] | 135 | return instr->AsLongConstant()->GetValue(); |
| 136 | } |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 137 | } |
| 138 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 139 | static inline vixl::aarch64::Operand OperandFrom(Location location, Primitive::Type type) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 140 | if (location.IsRegister()) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 141 | return vixl::aarch64::Operand(RegisterFrom(location, type)); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 142 | } else { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 143 | return vixl::aarch64::Operand(Int64ConstantFrom(location)); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 144 | } |
| 145 | } |
| 146 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 147 | static inline vixl::aarch64::Operand InputOperandAt(HInstruction* instr, int input_index) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 148 | return OperandFrom(instr->GetLocations()->InAt(input_index), |
| 149 | instr->InputAt(input_index)->GetType()); |
| 150 | } |
| 151 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 152 | static inline vixl::aarch64::MemOperand StackOperandFrom(Location location) { |
| 153 | return vixl::aarch64::MemOperand(vixl::aarch64::sp, location.GetStackIndex()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 154 | } |
| 155 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 156 | static inline vixl::aarch64::MemOperand HeapOperand(const vixl::aarch64::Register& base, |
| 157 | size_t offset = 0) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 158 | // A heap reference must be 32bit, so fit in a W register. |
| 159 | DCHECK(base.IsW()); |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 160 | return vixl::aarch64::MemOperand(base.X(), offset); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 161 | } |
| 162 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 163 | static inline vixl::aarch64::MemOperand HeapOperand(const vixl::aarch64::Register& base, |
| 164 | const vixl::aarch64::Register& regoffset, |
| 165 | vixl::aarch64::Shift shift = vixl::aarch64::LSL, |
| 166 | unsigned shift_amount = 0) { |
Alexandre Rames | 82000b0 | 2015-07-07 11:34:16 +0100 | [diff] [blame] | 167 | // A heap reference must be 32bit, so fit in a W register. |
| 168 | DCHECK(base.IsW()); |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 169 | return vixl::aarch64::MemOperand(base.X(), regoffset, shift, shift_amount); |
Alexandre Rames | 82000b0 | 2015-07-07 11:34:16 +0100 | [diff] [blame] | 170 | } |
| 171 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 172 | static inline vixl::aarch64::MemOperand HeapOperand(const vixl::aarch64::Register& base, |
| 173 | Offset offset) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 174 | return HeapOperand(base, offset.SizeValue()); |
| 175 | } |
| 176 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 177 | static inline vixl::aarch64::MemOperand HeapOperandFrom(Location location, Offset offset) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 178 | return HeapOperand(RegisterFrom(location, Primitive::kPrimNot), offset); |
| 179 | } |
| 180 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 181 | static inline Location LocationFrom(const vixl::aarch64::Register& reg) { |
| 182 | return Location::RegisterLocation(ARTRegCodeFromVIXL(reg.GetCode())); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 183 | } |
| 184 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 185 | static inline Location LocationFrom(const vixl::aarch64::FPRegister& fpreg) { |
| 186 | return Location::FpuRegisterLocation(fpreg.GetCode()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 187 | } |
| 188 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 189 | static inline vixl::aarch64::Operand OperandFromMemOperand( |
| 190 | const vixl::aarch64::MemOperand& mem_op) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 191 | if (mem_op.IsImmediateOffset()) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 192 | return vixl::aarch64::Operand(mem_op.GetOffset()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 193 | } else { |
| 194 | DCHECK(mem_op.IsRegisterOffset()); |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 195 | if (mem_op.GetExtend() != vixl::aarch64::NO_EXTEND) { |
| 196 | return vixl::aarch64::Operand(mem_op.GetRegisterOffset(), |
| 197 | mem_op.GetExtend(), |
| 198 | mem_op.GetShiftAmount()); |
| 199 | } else if (mem_op.GetShift() != vixl::aarch64::NO_SHIFT) { |
| 200 | return vixl::aarch64::Operand(mem_op.GetRegisterOffset(), |
| 201 | mem_op.GetShift(), |
| 202 | mem_op.GetShiftAmount()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 203 | } else { |
| 204 | LOG(FATAL) << "Should not reach here"; |
| 205 | UNREACHABLE(); |
| 206 | } |
| 207 | } |
| 208 | } |
| 209 | |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 210 | static bool CanEncodeConstantAsImmediate(HConstant* constant, HInstruction* instr) { |
Roland Levillain | 22c4922 | 2016-03-18 14:04:28 +0000 | [diff] [blame] | 211 | DCHECK(constant->IsIntConstant() || constant->IsLongConstant() || constant->IsNullConstant()) |
| 212 | << constant->DebugName(); |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 213 | |
| 214 | // For single uses we let VIXL handle the constant generation since it will |
| 215 | // use registers that are not managed by the register allocator (wip0, wip1). |
Vladimir Marko | 46817b8 | 2016-03-29 12:21:58 +0100 | [diff] [blame] | 216 | if (constant->GetUses().HasExactlyOneElement()) { |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 217 | return true; |
| 218 | } |
| 219 | |
Scott Wakeling | 40a04bf | 2015-12-11 09:50:36 +0000 | [diff] [blame] | 220 | // Our code generator ensures shift distances are within an encodable range. |
| 221 | if (instr->IsRor()) { |
| 222 | return true; |
| 223 | } |
| 224 | |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 225 | int64_t value = CodeGenerator::GetInt64ValueOf(constant); |
| 226 | |
Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 227 | if (instr->IsAnd() || instr->IsOr() || instr->IsXor()) { |
| 228 | // Uses logical operations. |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 229 | return vixl::aarch64::Assembler::IsImmLogical(value, vixl::aarch64::kXRegSize); |
Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 230 | } else if (instr->IsNeg()) { |
| 231 | // Uses mov -immediate. |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 232 | return vixl::aarch64::Assembler::IsImmMovn(value, vixl::aarch64::kXRegSize); |
Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 233 | } else { |
| 234 | DCHECK(instr->IsAdd() || |
Artem Serov | 328429f | 2016-07-06 16:23:04 +0100 | [diff] [blame] | 235 | instr->IsIntermediateAddress() || |
Alexandre Rames | e6dbf48 | 2015-10-19 10:10:41 +0100 | [diff] [blame] | 236 | instr->IsBoundsCheck() || |
| 237 | instr->IsCompare() || |
| 238 | instr->IsCondition() || |
Roland Levillain | 22c4922 | 2016-03-18 14:04:28 +0000 | [diff] [blame] | 239 | instr->IsSub()) |
| 240 | << instr->DebugName(); |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 241 | // Uses aliases of ADD/SUB instructions. |
Alexandre Rames | b69fbfb | 2015-10-16 09:08:46 +0100 | [diff] [blame] | 242 | // If `value` does not fit but `-value` does, VIXL will automatically use |
| 243 | // the 'opposite' instruction. |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 244 | return vixl::aarch64::Assembler::IsImmAddSub(value) |
| 245 | || vixl::aarch64::Assembler::IsImmAddSub(-value); |
Serban Constantinescu | 2d35d9d | 2015-02-22 22:08:01 +0000 | [diff] [blame] | 246 | } |
| 247 | } |
| 248 | |
| 249 | static inline Location ARM64EncodableConstantOrRegister(HInstruction* constant, |
| 250 | HInstruction* instr) { |
| 251 | if (constant->IsConstant() |
| 252 | && CanEncodeConstantAsImmediate(constant->AsConstant(), instr)) { |
| 253 | return Location::ConstantLocation(constant->AsConstant()); |
| 254 | } |
| 255 | |
| 256 | return Location::RequiresRegister(); |
| 257 | } |
| 258 | |
Zheng Xu | da40309 | 2015-04-24 17:35:39 +0800 | [diff] [blame] | 259 | // Check if registers in art register set have the same register code in vixl. If the register |
| 260 | // codes are same, we can initialize vixl register list simply by the register masks. Currently, |
| 261 | // only SP/WSP and ZXR/WZR codes are different between art and vixl. |
| 262 | // Note: This function is only used for debug checks. |
| 263 | static inline bool ArtVixlRegCodeCoherentForRegSet(uint32_t art_core_registers, |
| 264 | size_t num_core, |
| 265 | uint32_t art_fpu_registers, |
| 266 | size_t num_fpu) { |
| 267 | // The register masks won't work if the number of register is larger than 32. |
| 268 | DCHECK_GE(sizeof(art_core_registers) * 8, num_core); |
| 269 | DCHECK_GE(sizeof(art_fpu_registers) * 8, num_fpu); |
| 270 | for (size_t art_reg_code = 0; art_reg_code < num_core; ++art_reg_code) { |
| 271 | if (RegisterSet::Contains(art_core_registers, art_reg_code)) { |
| 272 | if (art_reg_code != static_cast<size_t>(VIXLRegCodeFromART(art_reg_code))) { |
| 273 | return false; |
| 274 | } |
| 275 | } |
| 276 | } |
| 277 | // There is no register code translation for float registers. |
| 278 | return true; |
| 279 | } |
| 280 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 281 | static inline vixl::aarch64::Shift ShiftFromOpKind(HArm64DataProcWithShifterOp::OpKind op_kind) { |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 282 | switch (op_kind) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 283 | case HArm64DataProcWithShifterOp::kASR: return vixl::aarch64::ASR; |
| 284 | case HArm64DataProcWithShifterOp::kLSL: return vixl::aarch64::LSL; |
| 285 | case HArm64DataProcWithShifterOp::kLSR: return vixl::aarch64::LSR; |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 286 | default: |
| 287 | LOG(FATAL) << "Unexpected op kind " << op_kind; |
| 288 | UNREACHABLE(); |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 289 | return vixl::aarch64::NO_SHIFT; |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 290 | } |
| 291 | } |
| 292 | |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 293 | static inline vixl::aarch64::Extend ExtendFromOpKind(HArm64DataProcWithShifterOp::OpKind op_kind) { |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 294 | switch (op_kind) { |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 295 | case HArm64DataProcWithShifterOp::kUXTB: return vixl::aarch64::UXTB; |
| 296 | case HArm64DataProcWithShifterOp::kUXTH: return vixl::aarch64::UXTH; |
| 297 | case HArm64DataProcWithShifterOp::kUXTW: return vixl::aarch64::UXTW; |
| 298 | case HArm64DataProcWithShifterOp::kSXTB: return vixl::aarch64::SXTB; |
| 299 | case HArm64DataProcWithShifterOp::kSXTH: return vixl::aarch64::SXTH; |
| 300 | case HArm64DataProcWithShifterOp::kSXTW: return vixl::aarch64::SXTW; |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 301 | default: |
| 302 | LOG(FATAL) << "Unexpected op kind " << op_kind; |
| 303 | UNREACHABLE(); |
Scott Wakeling | 97c72b7 | 2016-06-24 16:19:36 +0100 | [diff] [blame] | 304 | return vixl::aarch64::NO_EXTEND; |
Alexandre Rames | 8626b74 | 2015-11-25 16:28:08 +0000 | [diff] [blame] | 305 | } |
| 306 | } |
| 307 | |
| 308 | static inline bool CanFitInShifterOperand(HInstruction* instruction) { |
| 309 | if (instruction->IsTypeConversion()) { |
| 310 | HTypeConversion* conversion = instruction->AsTypeConversion(); |
| 311 | Primitive::Type result_type = conversion->GetResultType(); |
| 312 | Primitive::Type input_type = conversion->GetInputType(); |
| 313 | // We don't expect to see the same type as input and result. |
| 314 | return Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type) && |
| 315 | (result_type != input_type); |
| 316 | } else { |
| 317 | return (instruction->IsShl() && instruction->AsShl()->InputAt(1)->IsIntConstant()) || |
| 318 | (instruction->IsShr() && instruction->AsShr()->InputAt(1)->IsIntConstant()) || |
| 319 | (instruction->IsUShr() && instruction->AsUShr()->InputAt(1)->IsIntConstant()); |
| 320 | } |
| 321 | } |
| 322 | |
| 323 | static inline bool HasShifterOperand(HInstruction* instr) { |
| 324 | // `neg` instructions are an alias of `sub` using the zero register as the |
| 325 | // first register input. |
| 326 | bool res = instr->IsAdd() || instr->IsAnd() || instr->IsNeg() || |
| 327 | instr->IsOr() || instr->IsSub() || instr->IsXor(); |
| 328 | return res; |
| 329 | } |
| 330 | |
| 331 | static inline bool ShifterOperandSupportsExtension(HInstruction* instruction) { |
| 332 | DCHECK(HasShifterOperand(instruction)); |
| 333 | // Although the `neg` instruction is an alias of the `sub` instruction, `HNeg` |
| 334 | // does *not* support extension. This is because the `extended register` form |
| 335 | // of the `sub` instruction interprets the left register with code 31 as the |
| 336 | // stack pointer and not the zero register. (So does the `immediate` form.) In |
| 337 | // the other form `shifted register, the register with code 31 is interpreted |
| 338 | // as the zero register. |
| 339 | return instruction->IsAdd() || instruction->IsSub(); |
| 340 | } |
| 341 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 342 | } // namespace helpers |
| 343 | } // namespace arm64 |
| 344 | } // namespace art |
| 345 | |
| 346 | #endif // ART_COMPILER_OPTIMIZING_COMMON_ARM64_H_ |