Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "intrinsics_arm64.h" |
| 18 | |
Serban Constantinescu | 579885a | 2015-02-22 20:51:33 +0000 | [diff] [blame] | 19 | #include "arch/arm64/instruction_set_features_arm64.h" |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 20 | #include "art_method.h" |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 21 | #include "code_generator_arm64.h" |
| 22 | #include "common_arm64.h" |
| 23 | #include "entrypoints/quick/quick_entrypoints.h" |
| 24 | #include "intrinsics.h" |
| 25 | #include "mirror/array-inl.h" |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 26 | #include "mirror/string.h" |
| 27 | #include "thread.h" |
| 28 | #include "utils/arm64/assembler_arm64.h" |
| 29 | #include "utils/arm64/constants_arm64.h" |
| 30 | |
Serban Constantinescu | 82e52ce | 2015-03-26 16:50:57 +0000 | [diff] [blame] | 31 | #include "vixl/a64/disasm-a64.h" |
| 32 | #include "vixl/a64/macro-assembler-a64.h" |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 33 | |
| 34 | using namespace vixl; // NOLINT(build/namespaces) |
| 35 | |
| 36 | namespace art { |
| 37 | |
| 38 | namespace arm64 { |
| 39 | |
| 40 | using helpers::DRegisterFrom; |
| 41 | using helpers::FPRegisterFrom; |
| 42 | using helpers::HeapOperand; |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 43 | using helpers::LocationFrom; |
Scott Wakeling | 9ee23f4 | 2015-07-23 10:44:35 +0100 | [diff] [blame] | 44 | using helpers::OperandFrom; |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 45 | using helpers::RegisterFrom; |
| 46 | using helpers::SRegisterFrom; |
| 47 | using helpers::WRegisterFrom; |
| 48 | using helpers::XRegisterFrom; |
xueliang.zhong | 49924c9 | 2016-03-03 10:52:51 +0000 | [diff] [blame] | 49 | using helpers::InputRegisterAt; |
Scott Wakeling | 1f36f41 | 2016-04-21 11:13:45 +0100 | [diff] [blame] | 50 | using helpers::OutputRegister; |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 51 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 52 | namespace { |
| 53 | |
| 54 | ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) { |
| 55 | return MemOperand(XRegisterFrom(location), offset); |
| 56 | } |
| 57 | |
| 58 | } // namespace |
| 59 | |
| 60 | vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() { |
| 61 | return codegen_->GetAssembler()->vixl_masm_; |
| 62 | } |
| 63 | |
| 64 | ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() { |
| 65 | return codegen_->GetGraph()->GetArena(); |
| 66 | } |
| 67 | |
| 68 | #define __ codegen->GetAssembler()->vixl_masm_-> |
| 69 | |
| 70 | static void MoveFromReturnRegister(Location trg, |
| 71 | Primitive::Type type, |
| 72 | CodeGeneratorARM64* codegen) { |
| 73 | if (!trg.IsValid()) { |
| 74 | DCHECK(type == Primitive::kPrimVoid); |
| 75 | return; |
| 76 | } |
| 77 | |
| 78 | DCHECK_NE(type, Primitive::kPrimVoid); |
| 79 | |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 80 | if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 81 | Register trg_reg = RegisterFrom(trg, type); |
| 82 | Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type); |
| 83 | __ Mov(trg_reg, res_reg, kDiscardForSameWReg); |
| 84 | } else { |
| 85 | FPRegister trg_reg = FPRegisterFrom(trg, type); |
| 86 | FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type); |
| 87 | __ Fmov(trg_reg, res_reg); |
| 88 | } |
| 89 | } |
| 90 | |
Roland Levillain | ec525fc | 2015-04-28 15:50:20 +0100 | [diff] [blame] | 91 | static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) { |
Roland Levillain | 2d27c8e | 2015-04-28 15:48:45 +0100 | [diff] [blame] | 92 | InvokeDexCallingConventionVisitorARM64 calling_convention_visitor; |
Roland Levillain | ec525fc | 2015-04-28 15:50:20 +0100 | [diff] [blame] | 93 | IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 94 | } |
| 95 | |
| 96 | // Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified |
| 97 | // call. This will copy the arguments into the positions for a regular call. |
| 98 | // |
| 99 | // Note: The actual parameters are required to be in the locations given by the invoke's location |
| 100 | // summary. If an intrinsic modifies those locations before a slowpath call, they must be |
| 101 | // restored! |
| 102 | class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 { |
| 103 | public: |
David Srbecky | 9cd6d37 | 2016-02-09 15:24:47 +0000 | [diff] [blame] | 104 | explicit IntrinsicSlowPathARM64(HInvoke* invoke) |
| 105 | : SlowPathCodeARM64(invoke), invoke_(invoke) { } |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 106 | |
| 107 | void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { |
| 108 | CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in); |
| 109 | __ Bind(GetEntryLabel()); |
| 110 | |
Nicolas Geoffray | a8ac913 | 2015-03-13 16:36:36 +0000 | [diff] [blame] | 111 | SaveLiveRegisters(codegen, invoke_->GetLocations()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 112 | |
Roland Levillain | ec525fc | 2015-04-28 15:50:20 +0100 | [diff] [blame] | 113 | MoveArguments(invoke_, codegen); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 114 | |
| 115 | if (invoke_->IsInvokeStaticOrDirect()) { |
Nicolas Geoffray | 94015b9 | 2015-06-04 18:21:04 +0100 | [diff] [blame] | 116 | codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), |
| 117 | LocationFrom(kArtMethodRegister)); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 118 | } else { |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 119 | codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister)); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 120 | } |
Andreas Gampe | bfb5ba9 | 2015-09-01 15:45:02 +0000 | [diff] [blame] | 121 | codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 122 | |
| 123 | // Copy the result back to the expected output. |
| 124 | Location out = invoke_->GetLocations()->Out(); |
| 125 | if (out.IsValid()) { |
| 126 | DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory. |
| 127 | DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg())); |
| 128 | MoveFromReturnRegister(out, invoke_->GetType(), codegen); |
| 129 | } |
| 130 | |
Nicolas Geoffray | a8ac913 | 2015-03-13 16:36:36 +0000 | [diff] [blame] | 131 | RestoreLiveRegisters(codegen, invoke_->GetLocations()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 132 | __ B(GetExitLabel()); |
| 133 | } |
| 134 | |
Alexandre Rames | 9931f31 | 2015-06-19 14:47:01 +0100 | [diff] [blame] | 135 | const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; } |
| 136 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 137 | private: |
| 138 | // The instruction where this slow path is happening. |
| 139 | HInvoke* const invoke_; |
| 140 | |
| 141 | DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64); |
| 142 | }; |
| 143 | |
| 144 | #undef __ |
| 145 | |
| 146 | bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) { |
| 147 | Dispatch(invoke); |
| 148 | LocationSummary* res = invoke->GetLocations(); |
Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 149 | if (res == nullptr) { |
| 150 | return false; |
| 151 | } |
Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 152 | return res->Intrinsified(); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 153 | } |
| 154 | |
| 155 | #define __ masm-> |
| 156 | |
| 157 | static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 158 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 159 | LocationSummary::kNoCall, |
| 160 | kIntrinsified); |
| 161 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 162 | locations->SetOut(Location::RequiresRegister()); |
| 163 | } |
| 164 | |
| 165 | static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 166 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 167 | LocationSummary::kNoCall, |
| 168 | kIntrinsified); |
| 169 | locations->SetInAt(0, Location::RequiresRegister()); |
| 170 | locations->SetOut(Location::RequiresFpuRegister()); |
| 171 | } |
| 172 | |
| 173 | static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) { |
| 174 | Location input = locations->InAt(0); |
| 175 | Location output = locations->Out(); |
| 176 | __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output), |
| 177 | is64bit ? DRegisterFrom(input) : SRegisterFrom(input)); |
| 178 | } |
| 179 | |
| 180 | static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) { |
| 181 | Location input = locations->InAt(0); |
| 182 | Location output = locations->Out(); |
| 183 | __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output), |
| 184 | is64bit ? XRegisterFrom(input) : WRegisterFrom(input)); |
| 185 | } |
| 186 | |
| 187 | void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { |
| 188 | CreateFPToIntLocations(arena_, invoke); |
| 189 | } |
| 190 | void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { |
| 191 | CreateIntToFPLocations(arena_, invoke); |
| 192 | } |
| 193 | |
| 194 | void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 195 | MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 196 | } |
| 197 | void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 198 | MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 199 | } |
| 200 | |
| 201 | void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { |
| 202 | CreateFPToIntLocations(arena_, invoke); |
| 203 | } |
| 204 | void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) { |
| 205 | CreateIntToFPLocations(arena_, invoke); |
| 206 | } |
| 207 | |
| 208 | void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 209 | MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 210 | } |
| 211 | void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 212 | MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 213 | } |
| 214 | |
| 215 | static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 216 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 217 | LocationSummary::kNoCall, |
| 218 | kIntrinsified); |
| 219 | locations->SetInAt(0, Location::RequiresRegister()); |
| 220 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 221 | } |
| 222 | |
| 223 | static void GenReverseBytes(LocationSummary* locations, |
| 224 | Primitive::Type type, |
| 225 | vixl::MacroAssembler* masm) { |
| 226 | Location in = locations->InAt(0); |
| 227 | Location out = locations->Out(); |
| 228 | |
| 229 | switch (type) { |
| 230 | case Primitive::kPrimShort: |
| 231 | __ Rev16(WRegisterFrom(out), WRegisterFrom(in)); |
| 232 | __ Sxth(WRegisterFrom(out), WRegisterFrom(out)); |
| 233 | break; |
| 234 | case Primitive::kPrimInt: |
| 235 | case Primitive::kPrimLong: |
| 236 | __ Rev(RegisterFrom(out, type), RegisterFrom(in, type)); |
| 237 | break; |
| 238 | default: |
| 239 | LOG(FATAL) << "Unexpected size for reverse-bytes: " << type; |
| 240 | UNREACHABLE(); |
| 241 | } |
| 242 | } |
| 243 | |
| 244 | void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) { |
| 245 | CreateIntToIntLocations(arena_, invoke); |
| 246 | } |
| 247 | |
| 248 | void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) { |
| 249 | GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); |
| 250 | } |
| 251 | |
| 252 | void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) { |
| 253 | CreateIntToIntLocations(arena_, invoke); |
| 254 | } |
| 255 | |
| 256 | void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) { |
| 257 | GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); |
| 258 | } |
| 259 | |
| 260 | void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) { |
| 261 | CreateIntToIntLocations(arena_, invoke); |
| 262 | } |
| 263 | |
| 264 | void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) { |
| 265 | GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler()); |
| 266 | } |
| 267 | |
Aart Bik | 7b56502 | 2016-01-28 14:36:22 -0800 | [diff] [blame] | 268 | static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 269 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 270 | LocationSummary::kNoCall, |
| 271 | kIntrinsified); |
| 272 | locations->SetInAt(0, Location::RequiresRegister()); |
| 273 | locations->SetInAt(1, Location::RequiresRegister()); |
| 274 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 275 | } |
| 276 | |
Scott Wakeling | 611d339 | 2015-07-10 11:42:06 +0100 | [diff] [blame] | 277 | static void GenNumberOfLeadingZeros(LocationSummary* locations, |
| 278 | Primitive::Type type, |
| 279 | vixl::MacroAssembler* masm) { |
| 280 | DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); |
| 281 | |
| 282 | Location in = locations->InAt(0); |
| 283 | Location out = locations->Out(); |
| 284 | |
| 285 | __ Clz(RegisterFrom(out, type), RegisterFrom(in, type)); |
| 286 | } |
| 287 | |
| 288 | void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { |
| 289 | CreateIntToIntLocations(arena_, invoke); |
| 290 | } |
| 291 | |
| 292 | void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) { |
| 293 | GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); |
| 294 | } |
| 295 | |
| 296 | void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { |
| 297 | CreateIntToIntLocations(arena_, invoke); |
| 298 | } |
| 299 | |
| 300 | void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) { |
| 301 | GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); |
| 302 | } |
| 303 | |
Scott Wakeling | 9ee23f4 | 2015-07-23 10:44:35 +0100 | [diff] [blame] | 304 | static void GenNumberOfTrailingZeros(LocationSummary* locations, |
| 305 | Primitive::Type type, |
| 306 | vixl::MacroAssembler* masm) { |
| 307 | DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); |
| 308 | |
| 309 | Location in = locations->InAt(0); |
| 310 | Location out = locations->Out(); |
| 311 | |
| 312 | __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type)); |
| 313 | __ Clz(RegisterFrom(out, type), RegisterFrom(out, type)); |
| 314 | } |
| 315 | |
| 316 | void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { |
| 317 | CreateIntToIntLocations(arena_, invoke); |
| 318 | } |
| 319 | |
| 320 | void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) { |
| 321 | GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); |
| 322 | } |
| 323 | |
| 324 | void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { |
| 325 | CreateIntToIntLocations(arena_, invoke); |
| 326 | } |
| 327 | |
| 328 | void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) { |
| 329 | GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); |
| 330 | } |
| 331 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 332 | static void GenReverse(LocationSummary* locations, |
| 333 | Primitive::Type type, |
| 334 | vixl::MacroAssembler* masm) { |
| 335 | DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); |
| 336 | |
| 337 | Location in = locations->InAt(0); |
| 338 | Location out = locations->Out(); |
| 339 | |
| 340 | __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type)); |
| 341 | } |
| 342 | |
| 343 | void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) { |
| 344 | CreateIntToIntLocations(arena_, invoke); |
| 345 | } |
| 346 | |
| 347 | void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) { |
| 348 | GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); |
| 349 | } |
| 350 | |
| 351 | void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) { |
| 352 | CreateIntToIntLocations(arena_, invoke); |
| 353 | } |
| 354 | |
| 355 | void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) { |
| 356 | GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); |
| 357 | } |
| 358 | |
Roland Levillain | fa3912e | 2016-04-01 18:21:55 +0100 | [diff] [blame] | 359 | static void GenBitCount(HInvoke* instr, Primitive::Type type, vixl::MacroAssembler* masm) { |
| 360 | DCHECK(Primitive::IsIntOrLongType(type)) << type; |
| 361 | DCHECK_EQ(instr->GetType(), Primitive::kPrimInt); |
| 362 | DCHECK_EQ(Primitive::PrimitiveKind(instr->InputAt(0)->GetType()), type); |
xueliang.zhong | 49924c9 | 2016-03-03 10:52:51 +0000 | [diff] [blame] | 363 | |
xueliang.zhong | 49924c9 | 2016-03-03 10:52:51 +0000 | [diff] [blame] | 364 | UseScratchRegisterScope temps(masm); |
| 365 | |
Nicolas Geoffray | 457413a | 2016-03-04 11:10:17 +0000 | [diff] [blame] | 366 | Register src = InputRegisterAt(instr, 0); |
Roland Levillain | fa3912e | 2016-04-01 18:21:55 +0100 | [diff] [blame] | 367 | Register dst = RegisterFrom(instr->GetLocations()->Out(), type); |
| 368 | FPRegister fpr = (type == Primitive::kPrimLong) ? temps.AcquireD() : temps.AcquireS(); |
xueliang.zhong | 49924c9 | 2016-03-03 10:52:51 +0000 | [diff] [blame] | 369 | |
| 370 | __ Fmov(fpr, src); |
Nicolas Geoffray | 457413a | 2016-03-04 11:10:17 +0000 | [diff] [blame] | 371 | __ Cnt(fpr.V8B(), fpr.V8B()); |
| 372 | __ Addv(fpr.B(), fpr.V8B()); |
xueliang.zhong | 49924c9 | 2016-03-03 10:52:51 +0000 | [diff] [blame] | 373 | __ Fmov(dst, fpr); |
| 374 | } |
| 375 | |
| 376 | void IntrinsicLocationsBuilderARM64::VisitLongBitCount(HInvoke* invoke) { |
| 377 | CreateIntToIntLocations(arena_, invoke); |
| 378 | } |
| 379 | |
| 380 | void IntrinsicCodeGeneratorARM64::VisitLongBitCount(HInvoke* invoke) { |
Roland Levillain | fa3912e | 2016-04-01 18:21:55 +0100 | [diff] [blame] | 381 | GenBitCount(invoke, Primitive::kPrimLong, GetVIXLAssembler()); |
xueliang.zhong | 49924c9 | 2016-03-03 10:52:51 +0000 | [diff] [blame] | 382 | } |
| 383 | |
| 384 | void IntrinsicLocationsBuilderARM64::VisitIntegerBitCount(HInvoke* invoke) { |
| 385 | CreateIntToIntLocations(arena_, invoke); |
| 386 | } |
| 387 | |
| 388 | void IntrinsicCodeGeneratorARM64::VisitIntegerBitCount(HInvoke* invoke) { |
Roland Levillain | fa3912e | 2016-04-01 18:21:55 +0100 | [diff] [blame] | 389 | GenBitCount(invoke, Primitive::kPrimInt, GetVIXLAssembler()); |
xueliang.zhong | 49924c9 | 2016-03-03 10:52:51 +0000 | [diff] [blame] | 390 | } |
| 391 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 392 | static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 393 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 394 | LocationSummary::kNoCall, |
| 395 | kIntrinsified); |
| 396 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 397 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 398 | } |
| 399 | |
| 400 | static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) { |
| 401 | Location in = locations->InAt(0); |
| 402 | Location out = locations->Out(); |
| 403 | |
| 404 | FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in); |
| 405 | FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out); |
| 406 | |
| 407 | __ Fabs(out_reg, in_reg); |
| 408 | } |
| 409 | |
| 410 | void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) { |
| 411 | CreateFPToFPLocations(arena_, invoke); |
| 412 | } |
| 413 | |
| 414 | void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 415 | MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 416 | } |
| 417 | |
| 418 | void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) { |
| 419 | CreateFPToFPLocations(arena_, invoke); |
| 420 | } |
| 421 | |
| 422 | void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 423 | MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 424 | } |
| 425 | |
| 426 | static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) { |
| 427 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 428 | LocationSummary::kNoCall, |
| 429 | kIntrinsified); |
| 430 | locations->SetInAt(0, Location::RequiresRegister()); |
| 431 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 432 | } |
| 433 | |
| 434 | static void GenAbsInteger(LocationSummary* locations, |
| 435 | bool is64bit, |
| 436 | vixl::MacroAssembler* masm) { |
| 437 | Location in = locations->InAt(0); |
| 438 | Location output = locations->Out(); |
| 439 | |
| 440 | Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in); |
| 441 | Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output); |
| 442 | |
| 443 | __ Cmp(in_reg, Operand(0)); |
| 444 | __ Cneg(out_reg, in_reg, lt); |
| 445 | } |
| 446 | |
| 447 | void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) { |
| 448 | CreateIntToInt(arena_, invoke); |
| 449 | } |
| 450 | |
| 451 | void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 452 | GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 453 | } |
| 454 | |
| 455 | void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) { |
| 456 | CreateIntToInt(arena_, invoke); |
| 457 | } |
| 458 | |
| 459 | void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 460 | GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 461 | } |
| 462 | |
| 463 | static void GenMinMaxFP(LocationSummary* locations, |
| 464 | bool is_min, |
| 465 | bool is_double, |
| 466 | vixl::MacroAssembler* masm) { |
| 467 | Location op1 = locations->InAt(0); |
| 468 | Location op2 = locations->InAt(1); |
| 469 | Location out = locations->Out(); |
| 470 | |
| 471 | FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1); |
| 472 | FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2); |
| 473 | FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out); |
| 474 | if (is_min) { |
| 475 | __ Fmin(out_reg, op1_reg, op2_reg); |
| 476 | } else { |
| 477 | __ Fmax(out_reg, op1_reg, op2_reg); |
| 478 | } |
| 479 | } |
| 480 | |
| 481 | static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 482 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 483 | LocationSummary::kNoCall, |
| 484 | kIntrinsified); |
| 485 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 486 | locations->SetInAt(1, Location::RequiresFpuRegister()); |
| 487 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 488 | } |
| 489 | |
| 490 | void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) { |
| 491 | CreateFPFPToFPLocations(arena_, invoke); |
| 492 | } |
| 493 | |
| 494 | void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 495 | GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 496 | } |
| 497 | |
| 498 | void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) { |
| 499 | CreateFPFPToFPLocations(arena_, invoke); |
| 500 | } |
| 501 | |
| 502 | void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 503 | GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 504 | } |
| 505 | |
| 506 | void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { |
| 507 | CreateFPFPToFPLocations(arena_, invoke); |
| 508 | } |
| 509 | |
| 510 | void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 511 | GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 512 | } |
| 513 | |
| 514 | void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { |
| 515 | CreateFPFPToFPLocations(arena_, invoke); |
| 516 | } |
| 517 | |
| 518 | void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 519 | GenMinMaxFP( |
| 520 | invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 521 | } |
| 522 | |
| 523 | static void GenMinMax(LocationSummary* locations, |
| 524 | bool is_min, |
| 525 | bool is_long, |
| 526 | vixl::MacroAssembler* masm) { |
| 527 | Location op1 = locations->InAt(0); |
| 528 | Location op2 = locations->InAt(1); |
| 529 | Location out = locations->Out(); |
| 530 | |
| 531 | Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1); |
| 532 | Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2); |
| 533 | Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out); |
| 534 | |
| 535 | __ Cmp(op1_reg, op2_reg); |
| 536 | __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt); |
| 537 | } |
| 538 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 539 | void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) { |
| 540 | CreateIntIntToIntLocations(arena_, invoke); |
| 541 | } |
| 542 | |
| 543 | void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 544 | GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 545 | } |
| 546 | |
| 547 | void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) { |
| 548 | CreateIntIntToIntLocations(arena_, invoke); |
| 549 | } |
| 550 | |
| 551 | void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 552 | GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 553 | } |
| 554 | |
| 555 | void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) { |
| 556 | CreateIntIntToIntLocations(arena_, invoke); |
| 557 | } |
| 558 | |
| 559 | void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 560 | GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 561 | } |
| 562 | |
| 563 | void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) { |
| 564 | CreateIntIntToIntLocations(arena_, invoke); |
| 565 | } |
| 566 | |
| 567 | void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 568 | GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 569 | } |
| 570 | |
| 571 | void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) { |
| 572 | CreateFPToFPLocations(arena_, invoke); |
| 573 | } |
| 574 | |
| 575 | void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) { |
| 576 | LocationSummary* locations = invoke->GetLocations(); |
| 577 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 578 | __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); |
| 579 | } |
| 580 | |
| 581 | void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) { |
| 582 | CreateFPToFPLocations(arena_, invoke); |
| 583 | } |
| 584 | |
| 585 | void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) { |
| 586 | LocationSummary* locations = invoke->GetLocations(); |
| 587 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 588 | __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); |
| 589 | } |
| 590 | |
| 591 | void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) { |
| 592 | CreateFPToFPLocations(arena_, invoke); |
| 593 | } |
| 594 | |
| 595 | void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) { |
| 596 | LocationSummary* locations = invoke->GetLocations(); |
| 597 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 598 | __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); |
| 599 | } |
| 600 | |
| 601 | void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) { |
| 602 | CreateFPToFPLocations(arena_, invoke); |
| 603 | } |
| 604 | |
| 605 | void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) { |
| 606 | LocationSummary* locations = invoke->GetLocations(); |
| 607 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 608 | __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); |
| 609 | } |
| 610 | |
| 611 | static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 612 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 613 | LocationSummary::kNoCall, |
| 614 | kIntrinsified); |
| 615 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 616 | locations->SetOut(Location::RequiresRegister()); |
| 617 | } |
| 618 | |
| 619 | static void GenMathRound(LocationSummary* locations, |
| 620 | bool is_double, |
| 621 | vixl::MacroAssembler* masm) { |
| 622 | FPRegister in_reg = is_double ? |
| 623 | DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0)); |
| 624 | Register out_reg = is_double ? |
| 625 | XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out()); |
| 626 | UseScratchRegisterScope temps(masm); |
| 627 | FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg); |
| 628 | |
| 629 | // 0.5 can be encoded as an immediate, so use fmov. |
| 630 | if (is_double) { |
| 631 | __ Fmov(temp1_reg, static_cast<double>(0.5)); |
| 632 | } else { |
| 633 | __ Fmov(temp1_reg, static_cast<float>(0.5)); |
| 634 | } |
| 635 | __ Fadd(temp1_reg, in_reg, temp1_reg); |
| 636 | __ Fcvtms(out_reg, temp1_reg); |
| 637 | } |
| 638 | |
| 639 | void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) { |
Andreas Gampe | e6d0d8d | 2015-12-28 09:54:29 -0800 | [diff] [blame] | 640 | // See intrinsics.h. |
| 641 | if (kRoundIsPlusPointFive) { |
| 642 | CreateFPToIntPlusTempLocations(arena_, invoke); |
| 643 | } |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 644 | } |
| 645 | |
| 646 | void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 647 | GenMathRound(invoke->GetLocations(), /* is_double */ true, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 648 | } |
| 649 | |
| 650 | void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) { |
Andreas Gampe | e6d0d8d | 2015-12-28 09:54:29 -0800 | [diff] [blame] | 651 | // See intrinsics.h. |
| 652 | if (kRoundIsPlusPointFive) { |
| 653 | CreateFPToIntPlusTempLocations(arena_, invoke); |
| 654 | } |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 655 | } |
| 656 | |
| 657 | void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 658 | GenMathRound(invoke->GetLocations(), /* is_double */ false, GetVIXLAssembler()); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 659 | } |
| 660 | |
| 661 | void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) { |
| 662 | CreateIntToIntLocations(arena_, invoke); |
| 663 | } |
| 664 | |
| 665 | void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) { |
| 666 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 667 | __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()), |
| 668 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 669 | } |
| 670 | |
| 671 | void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) { |
| 672 | CreateIntToIntLocations(arena_, invoke); |
| 673 | } |
| 674 | |
| 675 | void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) { |
| 676 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 677 | __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()), |
| 678 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 679 | } |
| 680 | |
| 681 | void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) { |
| 682 | CreateIntToIntLocations(arena_, invoke); |
| 683 | } |
| 684 | |
| 685 | void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) { |
| 686 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 687 | __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()), |
| 688 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 689 | } |
| 690 | |
| 691 | void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) { |
| 692 | CreateIntToIntLocations(arena_, invoke); |
| 693 | } |
| 694 | |
| 695 | void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) { |
| 696 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 697 | __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()), |
| 698 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 699 | } |
| 700 | |
| 701 | static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 702 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 703 | LocationSummary::kNoCall, |
| 704 | kIntrinsified); |
| 705 | locations->SetInAt(0, Location::RequiresRegister()); |
| 706 | locations->SetInAt(1, Location::RequiresRegister()); |
| 707 | } |
| 708 | |
| 709 | void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) { |
| 710 | CreateIntIntToVoidLocations(arena_, invoke); |
| 711 | } |
| 712 | |
| 713 | void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) { |
| 714 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 715 | __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)), |
| 716 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 717 | } |
| 718 | |
| 719 | void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) { |
| 720 | CreateIntIntToVoidLocations(arena_, invoke); |
| 721 | } |
| 722 | |
| 723 | void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) { |
| 724 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 725 | __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)), |
| 726 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 727 | } |
| 728 | |
| 729 | void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) { |
| 730 | CreateIntIntToVoidLocations(arena_, invoke); |
| 731 | } |
| 732 | |
| 733 | void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) { |
| 734 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 735 | __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)), |
| 736 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 737 | } |
| 738 | |
| 739 | void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) { |
| 740 | CreateIntIntToVoidLocations(arena_, invoke); |
| 741 | } |
| 742 | |
| 743 | void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) { |
| 744 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 745 | __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)), |
| 746 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 747 | } |
| 748 | |
| 749 | void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) { |
| 750 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 751 | LocationSummary::kNoCall, |
| 752 | kIntrinsified); |
| 753 | locations->SetOut(Location::RequiresRegister()); |
| 754 | } |
| 755 | |
| 756 | void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) { |
| 757 | codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()), |
| 758 | MemOperand(tr, Thread::PeerOffset<8>().Int32Value())); |
| 759 | } |
| 760 | |
| 761 | static void GenUnsafeGet(HInvoke* invoke, |
| 762 | Primitive::Type type, |
| 763 | bool is_volatile, |
| 764 | CodeGeneratorARM64* codegen) { |
| 765 | LocationSummary* locations = invoke->GetLocations(); |
| 766 | DCHECK((type == Primitive::kPrimInt) || |
| 767 | (type == Primitive::kPrimLong) || |
| 768 | (type == Primitive::kPrimNot)); |
| 769 | vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; |
Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 770 | Location base_loc = locations->InAt(1); |
| 771 | Register base = WRegisterFrom(base_loc); // Object pointer. |
| 772 | Location offset_loc = locations->InAt(2); |
| 773 | Register offset = XRegisterFrom(offset_loc); // Long offset. |
| 774 | Location trg_loc = locations->Out(); |
| 775 | Register trg = RegisterFrom(trg_loc, type); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 776 | |
Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 777 | if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) { |
| 778 | // UnsafeGetObject/UnsafeGetObjectVolatile with Baker's read barrier case. |
| 779 | UseScratchRegisterScope temps(masm); |
| 780 | Register temp = temps.AcquireW(); |
Roland Levillain | bfea335 | 2016-06-23 13:48:47 +0100 | [diff] [blame] | 781 | codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke, |
| 782 | trg_loc, |
| 783 | base, |
| 784 | /* offset */ 0U, |
| 785 | /* index */ offset_loc, |
| 786 | /* scale_factor */ 0U, |
| 787 | temp, |
| 788 | /* needs_null_check */ false, |
| 789 | is_volatile); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 790 | } else { |
Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 791 | // Other cases. |
| 792 | MemOperand mem_op(base.X(), offset); |
| 793 | if (is_volatile) { |
Serban Constantinescu | 4a6a67c | 2016-01-27 09:19:56 +0000 | [diff] [blame] | 794 | codegen->LoadAcquire(invoke, trg, mem_op, /* needs_null_check */ true); |
Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 795 | } else { |
| 796 | codegen->Load(type, trg, mem_op); |
| 797 | } |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 798 | |
Roland Levillain | 4401586 | 2016-01-22 11:47:17 +0000 | [diff] [blame] | 799 | if (type == Primitive::kPrimNot) { |
| 800 | DCHECK(trg.IsW()); |
| 801 | codegen->MaybeGenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc); |
| 802 | } |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 803 | } |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 804 | } |
| 805 | |
| 806 | static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 807 | bool can_call = kEmitCompilerReadBarrier && |
| 808 | (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject || |
| 809 | invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 810 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
Roland Levillain | 22ccc3a | 2015-11-24 13:10:05 +0000 | [diff] [blame] | 811 | can_call ? |
| 812 | LocationSummary::kCallOnSlowPath : |
| 813 | LocationSummary::kNoCall, |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 814 | kIntrinsified); |
| 815 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 816 | locations->SetInAt(1, Location::RequiresRegister()); |
| 817 | locations->SetInAt(2, Location::RequiresRegister()); |
Roland Levillain | bfea335 | 2016-06-23 13:48:47 +0100 | [diff] [blame] | 818 | locations->SetOut(Location::RequiresRegister(), |
| 819 | can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 820 | } |
| 821 | |
| 822 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) { |
| 823 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 824 | } |
| 825 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) { |
| 826 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 827 | } |
| 828 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) { |
| 829 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 830 | } |
| 831 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { |
| 832 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 833 | } |
| 834 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) { |
| 835 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 836 | } |
| 837 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { |
| 838 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 839 | } |
| 840 | |
| 841 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 842 | GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 843 | } |
| 844 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 845 | GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 846 | } |
| 847 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 848 | GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 849 | } |
| 850 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 851 | GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 852 | } |
| 853 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 854 | GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 855 | } |
| 856 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 857 | GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 858 | } |
| 859 | |
| 860 | static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) { |
| 861 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 862 | LocationSummary::kNoCall, |
| 863 | kIntrinsified); |
| 864 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 865 | locations->SetInAt(1, Location::RequiresRegister()); |
| 866 | locations->SetInAt(2, Location::RequiresRegister()); |
| 867 | locations->SetInAt(3, Location::RequiresRegister()); |
| 868 | } |
| 869 | |
| 870 | void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) { |
| 871 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 872 | } |
| 873 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) { |
| 874 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 875 | } |
| 876 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) { |
| 877 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 878 | } |
| 879 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) { |
| 880 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 881 | } |
| 882 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { |
| 883 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 884 | } |
| 885 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { |
| 886 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 887 | } |
| 888 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) { |
| 889 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 890 | } |
| 891 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) { |
| 892 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 893 | } |
| 894 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) { |
| 895 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 896 | } |
| 897 | |
| 898 | static void GenUnsafePut(LocationSummary* locations, |
| 899 | Primitive::Type type, |
| 900 | bool is_volatile, |
| 901 | bool is_ordered, |
| 902 | CodeGeneratorARM64* codegen) { |
| 903 | vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; |
| 904 | |
| 905 | Register base = WRegisterFrom(locations->InAt(1)); // Object pointer. |
| 906 | Register offset = XRegisterFrom(locations->InAt(2)); // Long offset. |
| 907 | Register value = RegisterFrom(locations->InAt(3), type); |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 908 | Register source = value; |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 909 | MemOperand mem_op(base.X(), offset); |
| 910 | |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 911 | { |
| 912 | // We use a block to end the scratch scope before the write barrier, thus |
| 913 | // freeing the temporary registers so they can be used in `MarkGCCard`. |
| 914 | UseScratchRegisterScope temps(masm); |
| 915 | |
| 916 | if (kPoisonHeapReferences && type == Primitive::kPrimNot) { |
| 917 | DCHECK(value.IsW()); |
| 918 | Register temp = temps.AcquireW(); |
| 919 | __ Mov(temp.W(), value.W()); |
| 920 | codegen->GetAssembler()->PoisonHeapReference(temp.W()); |
| 921 | source = temp; |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 922 | } |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 923 | |
| 924 | if (is_volatile || is_ordered) { |
Serban Constantinescu | 4a6a67c | 2016-01-27 09:19:56 +0000 | [diff] [blame] | 925 | codegen->StoreRelease(type, source, mem_op); |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 926 | } else { |
| 927 | codegen->Store(type, source, mem_op); |
| 928 | } |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 929 | } |
| 930 | |
| 931 | if (type == Primitive::kPrimNot) { |
Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 932 | bool value_can_be_null = true; // TODO: Worth finding out this information? |
| 933 | codegen->MarkGCCard(base, value, value_can_be_null); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 934 | } |
| 935 | } |
| 936 | |
| 937 | void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 938 | GenUnsafePut(invoke->GetLocations(), |
| 939 | Primitive::kPrimInt, |
| 940 | /* is_volatile */ false, |
| 941 | /* is_ordered */ false, |
| 942 | codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 943 | } |
| 944 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 945 | GenUnsafePut(invoke->GetLocations(), |
| 946 | Primitive::kPrimInt, |
| 947 | /* is_volatile */ false, |
| 948 | /* is_ordered */ true, |
| 949 | codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 950 | } |
| 951 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 952 | GenUnsafePut(invoke->GetLocations(), |
| 953 | Primitive::kPrimInt, |
| 954 | /* is_volatile */ true, |
| 955 | /* is_ordered */ false, |
| 956 | codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 957 | } |
| 958 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 959 | GenUnsafePut(invoke->GetLocations(), |
| 960 | Primitive::kPrimNot, |
| 961 | /* is_volatile */ false, |
| 962 | /* is_ordered */ false, |
| 963 | codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 964 | } |
| 965 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 966 | GenUnsafePut(invoke->GetLocations(), |
| 967 | Primitive::kPrimNot, |
| 968 | /* is_volatile */ false, |
| 969 | /* is_ordered */ true, |
| 970 | codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 971 | } |
| 972 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 973 | GenUnsafePut(invoke->GetLocations(), |
| 974 | Primitive::kPrimNot, |
| 975 | /* is_volatile */ true, |
| 976 | /* is_ordered */ false, |
| 977 | codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 978 | } |
| 979 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 980 | GenUnsafePut(invoke->GetLocations(), |
| 981 | Primitive::kPrimLong, |
| 982 | /* is_volatile */ false, |
| 983 | /* is_ordered */ false, |
| 984 | codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 985 | } |
| 986 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 987 | GenUnsafePut(invoke->GetLocations(), |
| 988 | Primitive::kPrimLong, |
| 989 | /* is_volatile */ false, |
| 990 | /* is_ordered */ true, |
| 991 | codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 992 | } |
| 993 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 994 | GenUnsafePut(invoke->GetLocations(), |
| 995 | Primitive::kPrimLong, |
| 996 | /* is_volatile */ true, |
| 997 | /* is_ordered */ false, |
| 998 | codegen_); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 999 | } |
| 1000 | |
Roland Levillain | 2e50ecb | 2016-01-27 14:08:33 +0000 | [diff] [blame] | 1001 | static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, |
| 1002 | HInvoke* invoke, |
| 1003 | Primitive::Type type) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1004 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 1005 | LocationSummary::kNoCall, |
| 1006 | kIntrinsified); |
| 1007 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 1008 | locations->SetInAt(1, Location::RequiresRegister()); |
| 1009 | locations->SetInAt(2, Location::RequiresRegister()); |
| 1010 | locations->SetInAt(3, Location::RequiresRegister()); |
| 1011 | locations->SetInAt(4, Location::RequiresRegister()); |
| 1012 | |
Roland Levillain | 2e50ecb | 2016-01-27 14:08:33 +0000 | [diff] [blame] | 1013 | // If heap poisoning is enabled, we don't want the unpoisoning |
| 1014 | // operations to potentially clobber the output. |
| 1015 | Location::OutputOverlap overlaps = (kPoisonHeapReferences && type == Primitive::kPrimNot) |
| 1016 | ? Location::kOutputOverlap |
| 1017 | : Location::kNoOutputOverlap; |
| 1018 | locations->SetOut(Location::RequiresRegister(), overlaps); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1019 | } |
| 1020 | |
| 1021 | static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1022 | vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; |
| 1023 | |
| 1024 | Register out = WRegisterFrom(locations->Out()); // Boolean result. |
| 1025 | |
| 1026 | Register base = WRegisterFrom(locations->InAt(1)); // Object pointer. |
| 1027 | Register offset = XRegisterFrom(locations->InAt(2)); // Long offset. |
| 1028 | Register expected = RegisterFrom(locations->InAt(3), type); // Expected. |
| 1029 | Register value = RegisterFrom(locations->InAt(4), type); // Value. |
| 1030 | |
| 1031 | // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps. |
| 1032 | if (type == Primitive::kPrimNot) { |
| 1033 | // Mark card for object assuming new value is stored. |
Nicolas Geoffray | 07276db | 2015-05-18 14:22:09 +0100 | [diff] [blame] | 1034 | bool value_can_be_null = true; // TODO: Worth finding out this information? |
| 1035 | codegen->MarkGCCard(base, value, value_can_be_null); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1036 | } |
| 1037 | |
| 1038 | UseScratchRegisterScope temps(masm); |
| 1039 | Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory. |
| 1040 | Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory. |
| 1041 | |
| 1042 | Register tmp_32 = tmp_value.W(); |
| 1043 | |
| 1044 | __ Add(tmp_ptr, base.X(), Operand(offset)); |
| 1045 | |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1046 | if (kPoisonHeapReferences && type == Primitive::kPrimNot) { |
| 1047 | codegen->GetAssembler()->PoisonHeapReference(expected); |
Roland Levillain | 2e50ecb | 2016-01-27 14:08:33 +0000 | [diff] [blame] | 1048 | if (value.Is(expected)) { |
| 1049 | // Do not poison `value`, as it is the same register as |
| 1050 | // `expected`, which has just been poisoned. |
| 1051 | } else { |
| 1052 | codegen->GetAssembler()->PoisonHeapReference(value); |
| 1053 | } |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1054 | } |
| 1055 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1056 | // do { |
| 1057 | // tmp_value = [tmp_ptr] - expected; |
| 1058 | // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value)); |
| 1059 | // result = tmp_value != 0; |
| 1060 | |
| 1061 | vixl::Label loop_head, exit_loop; |
Serban Constantinescu | 4a6a67c | 2016-01-27 09:19:56 +0000 | [diff] [blame] | 1062 | __ Bind(&loop_head); |
| 1063 | // TODO: When `type == Primitive::kPrimNot`, add a read barrier for |
| 1064 | // the reference stored in the object before attempting the CAS, |
| 1065 | // similar to the one in the art::Unsafe_compareAndSwapObject JNI |
| 1066 | // implementation. |
| 1067 | // |
| 1068 | // Note that this code is not (yet) used when read barriers are |
| 1069 | // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject). |
| 1070 | DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier)); |
| 1071 | __ Ldaxr(tmp_value, MemOperand(tmp_ptr)); |
| 1072 | __ Cmp(tmp_value, expected); |
| 1073 | __ B(&exit_loop, ne); |
| 1074 | __ Stlxr(tmp_32, value, MemOperand(tmp_ptr)); |
| 1075 | __ Cbnz(tmp_32, &loop_head); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1076 | __ Bind(&exit_loop); |
| 1077 | __ Cset(out, eq); |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1078 | |
| 1079 | if (kPoisonHeapReferences && type == Primitive::kPrimNot) { |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1080 | codegen->GetAssembler()->UnpoisonHeapReference(expected); |
Roland Levillain | 2e50ecb | 2016-01-27 14:08:33 +0000 | [diff] [blame] | 1081 | if (value.Is(expected)) { |
| 1082 | // Do not unpoison `value`, as it is the same register as |
| 1083 | // `expected`, which has just been unpoisoned. |
| 1084 | } else { |
| 1085 | codegen->GetAssembler()->UnpoisonHeapReference(value); |
| 1086 | } |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 1087 | } |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1088 | } |
| 1089 | |
| 1090 | void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) { |
Roland Levillain | 2e50ecb | 2016-01-27 14:08:33 +0000 | [diff] [blame] | 1091 | CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimInt); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1092 | } |
| 1093 | void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) { |
Roland Levillain | 2e50ecb | 2016-01-27 14:08:33 +0000 | [diff] [blame] | 1094 | CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimLong); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1095 | } |
| 1096 | void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) { |
Roland Levillain | 391b866 | 2015-12-18 11:43:38 +0000 | [diff] [blame] | 1097 | // The UnsafeCASObject intrinsic is missing a read barrier, and |
| 1098 | // therefore sometimes does not work as expected (b/25883050). |
| 1099 | // Turn it off temporarily as a quick fix, until the read barrier is |
Roland Levillain | 3d31242 | 2016-06-23 13:53:42 +0100 | [diff] [blame] | 1100 | // implemented (see TODO in GenCAS). |
Roland Levillain | 391b866 | 2015-12-18 11:43:38 +0000 | [diff] [blame] | 1101 | // |
Roland Levillain | 3d31242 | 2016-06-23 13:53:42 +0100 | [diff] [blame] | 1102 | // TODO(rpl): Implement read barrier support in GenCAS and re-enable |
| 1103 | // this intrinsic. |
Roland Levillain | 2e50ecb | 2016-01-27 14:08:33 +0000 | [diff] [blame] | 1104 | if (kEmitCompilerReadBarrier) { |
Roland Levillain | 985ff70 | 2015-10-23 13:25:35 +0100 | [diff] [blame] | 1105 | return; |
| 1106 | } |
| 1107 | |
Roland Levillain | 2e50ecb | 2016-01-27 14:08:33 +0000 | [diff] [blame] | 1108 | CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimNot); |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1109 | } |
| 1110 | |
| 1111 | void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) { |
| 1112 | GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_); |
| 1113 | } |
| 1114 | void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) { |
| 1115 | GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_); |
| 1116 | } |
| 1117 | void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) { |
Roland Levillain | 3d31242 | 2016-06-23 13:53:42 +0100 | [diff] [blame] | 1118 | // The UnsafeCASObject intrinsic is missing a read barrier, and |
| 1119 | // therefore sometimes does not work as expected (b/25883050). |
| 1120 | // Turn it off temporarily as a quick fix, until the read barrier is |
| 1121 | // implemented (see TODO in GenCAS). |
| 1122 | // |
| 1123 | // TODO(rpl): Implement read barrier support in GenCAS and re-enable |
| 1124 | // this intrinsic. |
| 1125 | DCHECK(!kEmitCompilerReadBarrier); |
| 1126 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1127 | GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_); |
| 1128 | } |
| 1129 | |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1130 | void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) { |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1131 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
Scott Wakeling | 1f36f41 | 2016-04-21 11:13:45 +0100 | [diff] [blame] | 1132 | invoke->InputAt(1)->CanBeNull() |
| 1133 | ? LocationSummary::kCallOnSlowPath |
| 1134 | : LocationSummary::kNoCall, |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1135 | kIntrinsified); |
Scott Wakeling | 1f36f41 | 2016-04-21 11:13:45 +0100 | [diff] [blame] | 1136 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1137 | locations->SetInAt(1, Location::RequiresRegister()); |
| 1138 | locations->AddTemp(Location::RequiresRegister()); |
| 1139 | locations->AddTemp(Location::RequiresRegister()); |
| 1140 | locations->AddTemp(Location::RequiresRegister()); |
| 1141 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1142 | } |
| 1143 | |
| 1144 | void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) { |
| 1145 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 1146 | LocationSummary* locations = invoke->GetLocations(); |
| 1147 | |
Scott Wakeling | 1f36f41 | 2016-04-21 11:13:45 +0100 | [diff] [blame] | 1148 | Register str = XRegisterFrom(locations->InAt(0)); |
| 1149 | Register arg = XRegisterFrom(locations->InAt(1)); |
| 1150 | Register out = OutputRegister(invoke); |
| 1151 | |
| 1152 | Register temp0 = WRegisterFrom(locations->GetTemp(0)); |
| 1153 | Register temp1 = WRegisterFrom(locations->GetTemp(1)); |
| 1154 | Register temp2 = WRegisterFrom(locations->GetTemp(2)); |
| 1155 | |
| 1156 | vixl::Label loop; |
| 1157 | vixl::Label find_char_diff; |
| 1158 | vixl::Label end; |
| 1159 | |
| 1160 | // Get offsets of count and value fields within a string object. |
| 1161 | const int32_t count_offset = mirror::String::CountOffset().Int32Value(); |
| 1162 | const int32_t value_offset = mirror::String::ValueOffset().Int32Value(); |
| 1163 | |
Nicolas Geoffray | 512e04d | 2015-03-27 17:21:24 +0000 | [diff] [blame] | 1164 | // Note that the null check must have been done earlier. |
Calin Juravle | 641547a | 2015-04-21 22:08:51 +0100 | [diff] [blame] | 1165 | DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1166 | |
Scott Wakeling | 1f36f41 | 2016-04-21 11:13:45 +0100 | [diff] [blame] | 1167 | // Take slow path and throw if input can be and is null. |
| 1168 | SlowPathCodeARM64* slow_path = nullptr; |
| 1169 | const bool can_slow_path = invoke->InputAt(1)->CanBeNull(); |
| 1170 | if (can_slow_path) { |
| 1171 | slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); |
| 1172 | codegen_->AddSlowPath(slow_path); |
| 1173 | __ Cbz(arg, slow_path->GetEntryLabel()); |
| 1174 | } |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1175 | |
Scott Wakeling | 1f36f41 | 2016-04-21 11:13:45 +0100 | [diff] [blame] | 1176 | // Reference equality check, return 0 if same reference. |
| 1177 | __ Subs(out, str, arg); |
| 1178 | __ B(&end, eq); |
| 1179 | // Load lengths of this and argument strings. |
| 1180 | __ Ldr(temp0, MemOperand(str.X(), count_offset)); |
| 1181 | __ Ldr(temp1, MemOperand(arg.X(), count_offset)); |
| 1182 | // Return zero if both strings are empty. |
| 1183 | __ Orr(out, temp0, temp1); |
| 1184 | __ Cbz(out, &end); |
| 1185 | // out = length diff. |
| 1186 | __ Subs(out, temp0, temp1); |
| 1187 | // temp2 = min(len(str), len(arg)). |
| 1188 | __ Csel(temp2, temp1, temp0, ge); |
| 1189 | // Shorter string is empty? |
| 1190 | __ Cbz(temp2, &end); |
| 1191 | |
| 1192 | // Store offset of string value in preparation for comparison loop. |
| 1193 | __ Mov(temp1, value_offset); |
| 1194 | |
| 1195 | UseScratchRegisterScope scratch_scope(masm); |
| 1196 | Register temp4 = scratch_scope.AcquireX(); |
| 1197 | |
| 1198 | // Assertions that must hold in order to compare strings 4 characters at a time. |
| 1199 | DCHECK_ALIGNED(value_offset, 8); |
| 1200 | static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded"); |
| 1201 | |
| 1202 | const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar); |
| 1203 | DCHECK_EQ(char_size, 2u); |
| 1204 | |
| 1205 | // Promote temp0 to an X reg, ready for LDR. |
| 1206 | temp0 = temp0.X(); |
| 1207 | |
| 1208 | // Loop to compare 4x16-bit characters at a time (ok because of string data alignment). |
| 1209 | __ Bind(&loop); |
| 1210 | __ Ldr(temp4, MemOperand(str.X(), temp1)); |
| 1211 | __ Ldr(temp0, MemOperand(arg.X(), temp1)); |
| 1212 | __ Cmp(temp4, temp0); |
| 1213 | __ B(ne, &find_char_diff); |
| 1214 | __ Add(temp1, temp1, char_size * 4); |
| 1215 | __ Subs(temp2, temp2, 4); |
| 1216 | __ B(gt, &loop); |
| 1217 | __ B(&end); |
| 1218 | |
| 1219 | // Promote temp1 to an X reg, ready for EOR. |
| 1220 | temp1 = temp1.X(); |
| 1221 | |
| 1222 | // Find the single 16-bit character difference. |
| 1223 | __ Bind(&find_char_diff); |
| 1224 | // Get the bit position of the first character that differs. |
| 1225 | __ Eor(temp1, temp0, temp4); |
| 1226 | __ Rbit(temp1, temp1); |
| 1227 | __ Clz(temp1, temp1); |
Scott Wakeling | 1f36f41 | 2016-04-21 11:13:45 +0100 | [diff] [blame] | 1228 | // If the number of 16-bit chars remaining <= the index where the difference occurs (0-3), then |
| 1229 | // the difference occurs outside the remaining string data, so just return length diff (out). |
| 1230 | __ Cmp(temp2, Operand(temp1, LSR, 4)); |
| 1231 | __ B(le, &end); |
| 1232 | // Extract the characters and calculate the difference. |
Scott Wakeling | e5ed20b | 2016-05-20 10:41:38 +0100 | [diff] [blame] | 1233 | __ Bic(temp1, temp1, 0xf); |
Scott Wakeling | 1f36f41 | 2016-04-21 11:13:45 +0100 | [diff] [blame] | 1234 | __ Lsr(temp0, temp0, temp1); |
| 1235 | __ Lsr(temp4, temp4, temp1); |
| 1236 | __ And(temp4, temp4, 0xffff); |
| 1237 | __ Sub(out, temp4, Operand(temp0, UXTH)); |
| 1238 | |
| 1239 | __ Bind(&end); |
| 1240 | |
| 1241 | if (can_slow_path) { |
| 1242 | __ Bind(slow_path->GetExitLabel()); |
| 1243 | } |
Nicolas Geoffray | d75948a | 2015-03-27 09:53:16 +0000 | [diff] [blame] | 1244 | } |
| 1245 | |
Agi Csaki | ea34b40 | 2015-08-13 17:51:19 -0700 | [diff] [blame] | 1246 | void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) { |
| 1247 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 1248 | LocationSummary::kNoCall, |
| 1249 | kIntrinsified); |
| 1250 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1251 | locations->SetInAt(1, Location::RequiresRegister()); |
| 1252 | // Temporary registers to store lengths of strings and for calculations. |
| 1253 | locations->AddTemp(Location::RequiresRegister()); |
| 1254 | locations->AddTemp(Location::RequiresRegister()); |
| 1255 | |
| 1256 | locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap); |
| 1257 | } |
| 1258 | |
| 1259 | void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) { |
| 1260 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 1261 | LocationSummary* locations = invoke->GetLocations(); |
| 1262 | |
| 1263 | Register str = WRegisterFrom(locations->InAt(0)); |
| 1264 | Register arg = WRegisterFrom(locations->InAt(1)); |
| 1265 | Register out = XRegisterFrom(locations->Out()); |
| 1266 | |
| 1267 | UseScratchRegisterScope scratch_scope(masm); |
| 1268 | Register temp = scratch_scope.AcquireW(); |
| 1269 | Register temp1 = WRegisterFrom(locations->GetTemp(0)); |
| 1270 | Register temp2 = WRegisterFrom(locations->GetTemp(1)); |
| 1271 | |
| 1272 | vixl::Label loop; |
| 1273 | vixl::Label end; |
| 1274 | vixl::Label return_true; |
| 1275 | vixl::Label return_false; |
| 1276 | |
| 1277 | // Get offsets of count, value, and class fields within a string object. |
| 1278 | const int32_t count_offset = mirror::String::CountOffset().Int32Value(); |
| 1279 | const int32_t value_offset = mirror::String::ValueOffset().Int32Value(); |
| 1280 | const int32_t class_offset = mirror::Object::ClassOffset().Int32Value(); |
| 1281 | |
| 1282 | // Note that the null check must have been done earlier. |
| 1283 | DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); |
| 1284 | |
Vladimir Marko | 53b5200 | 2016-05-24 19:30:45 +0100 | [diff] [blame] | 1285 | StringEqualsOptimizations optimizations(invoke); |
| 1286 | if (!optimizations.GetArgumentNotNull()) { |
| 1287 | // Check if input is null, return false if it is. |
| 1288 | __ Cbz(arg, &return_false); |
| 1289 | } |
Agi Csaki | ea34b40 | 2015-08-13 17:51:19 -0700 | [diff] [blame] | 1290 | |
| 1291 | // Reference equality check, return true if same reference. |
| 1292 | __ Cmp(str, arg); |
| 1293 | __ B(&return_true, eq); |
| 1294 | |
Vladimir Marko | 53b5200 | 2016-05-24 19:30:45 +0100 | [diff] [blame] | 1295 | if (!optimizations.GetArgumentIsString()) { |
| 1296 | // Instanceof check for the argument by comparing class fields. |
| 1297 | // All string objects must have the same type since String cannot be subclassed. |
| 1298 | // Receiver must be a string object, so its class field is equal to all strings' class fields. |
| 1299 | // If the argument is a string object, its class field must be equal to receiver's class field. |
| 1300 | __ Ldr(temp, MemOperand(str.X(), class_offset)); |
| 1301 | __ Ldr(temp1, MemOperand(arg.X(), class_offset)); |
| 1302 | __ Cmp(temp, temp1); |
| 1303 | __ B(&return_false, ne); |
| 1304 | } |
Agi Csaki | ea34b40 | 2015-08-13 17:51:19 -0700 | [diff] [blame] | 1305 | |
| 1306 | // Load lengths of this and argument strings. |
| 1307 | __ Ldr(temp, MemOperand(str.X(), count_offset)); |
| 1308 | __ Ldr(temp1, MemOperand(arg.X(), count_offset)); |
| 1309 | // Check if lengths are equal, return false if they're not. |
| 1310 | __ Cmp(temp, temp1); |
| 1311 | __ B(&return_false, ne); |
| 1312 | // Store offset of string value in preparation for comparison loop |
| 1313 | __ Mov(temp1, value_offset); |
| 1314 | // Return true if both strings are empty. |
| 1315 | __ Cbz(temp, &return_true); |
| 1316 | |
| 1317 | // Assertions that must hold in order to compare strings 4 characters at a time. |
| 1318 | DCHECK_ALIGNED(value_offset, 8); |
| 1319 | static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded"); |
| 1320 | |
| 1321 | temp1 = temp1.X(); |
| 1322 | temp2 = temp2.X(); |
| 1323 | |
| 1324 | // Loop to compare strings 4 characters at a time starting at the beginning of the string. |
| 1325 | // Ok to do this because strings are zero-padded to be 8-byte aligned. |
| 1326 | __ Bind(&loop); |
| 1327 | __ Ldr(out, MemOperand(str.X(), temp1)); |
| 1328 | __ Ldr(temp2, MemOperand(arg.X(), temp1)); |
| 1329 | __ Add(temp1, temp1, Operand(sizeof(uint64_t))); |
| 1330 | __ Cmp(out, temp2); |
| 1331 | __ B(&return_false, ne); |
| 1332 | __ Sub(temp, temp, Operand(4), SetFlags); |
| 1333 | __ B(&loop, gt); |
| 1334 | |
| 1335 | // Return true and exit the function. |
| 1336 | // If loop does not result in returning false, we return true. |
| 1337 | __ Bind(&return_true); |
| 1338 | __ Mov(out, 1); |
| 1339 | __ B(&end); |
| 1340 | |
| 1341 | // Return false and exit the function. |
| 1342 | __ Bind(&return_false); |
| 1343 | __ Mov(out, 0); |
| 1344 | __ Bind(&end); |
| 1345 | } |
| 1346 | |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1347 | static void GenerateVisitStringIndexOf(HInvoke* invoke, |
| 1348 | vixl::MacroAssembler* masm, |
| 1349 | CodeGeneratorARM64* codegen, |
| 1350 | ArenaAllocator* allocator, |
| 1351 | bool start_at_zero) { |
| 1352 | LocationSummary* locations = invoke->GetLocations(); |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1353 | |
| 1354 | // Note that the null check must have been done earlier. |
| 1355 | DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0))); |
| 1356 | |
| 1357 | // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically, |
Vladimir Marko | fb6c90a | 2016-05-06 15:52:12 +0100 | [diff] [blame] | 1358 | // or directly dispatch for a large constant, or omit slow-path for a small constant or a char. |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1359 | SlowPathCodeARM64* slow_path = nullptr; |
Vladimir Marko | fb6c90a | 2016-05-06 15:52:12 +0100 | [diff] [blame] | 1360 | HInstruction* code_point = invoke->InputAt(1); |
| 1361 | if (code_point->IsIntConstant()) { |
Vladimir Marko | da05108 | 2016-05-17 16:10:20 +0100 | [diff] [blame] | 1362 | if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) > 0xFFFFU) { |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1363 | // Always needs the slow-path. We could directly dispatch to it, but this case should be |
| 1364 | // rare, so for simplicity just put the full slow-path down and branch unconditionally. |
| 1365 | slow_path = new (allocator) IntrinsicSlowPathARM64(invoke); |
| 1366 | codegen->AddSlowPath(slow_path); |
| 1367 | __ B(slow_path->GetEntryLabel()); |
| 1368 | __ Bind(slow_path->GetExitLabel()); |
| 1369 | return; |
| 1370 | } |
Vladimir Marko | fb6c90a | 2016-05-06 15:52:12 +0100 | [diff] [blame] | 1371 | } else if (code_point->GetType() != Primitive::kPrimChar) { |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1372 | Register char_reg = WRegisterFrom(locations->InAt(1)); |
Vladimir Marko | fb6c90a | 2016-05-06 15:52:12 +0100 | [diff] [blame] | 1373 | __ Tst(char_reg, 0xFFFF0000); |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1374 | slow_path = new (allocator) IntrinsicSlowPathARM64(invoke); |
| 1375 | codegen->AddSlowPath(slow_path); |
Vladimir Marko | fb6c90a | 2016-05-06 15:52:12 +0100 | [diff] [blame] | 1376 | __ B(ne, slow_path->GetEntryLabel()); |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1377 | } |
| 1378 | |
| 1379 | if (start_at_zero) { |
| 1380 | // Start-index = 0. |
Vladimir Marko | fb6c90a | 2016-05-06 15:52:12 +0100 | [diff] [blame] | 1381 | Register tmp_reg = WRegisterFrom(locations->GetTemp(0)); |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1382 | __ Mov(tmp_reg, 0); |
| 1383 | } |
| 1384 | |
| 1385 | __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value())); |
Roland Levillain | 42ad288 | 2016-02-29 18:26:54 +0000 | [diff] [blame] | 1386 | CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>(); |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1387 | __ Blr(lr); |
| 1388 | |
| 1389 | if (slow_path != nullptr) { |
| 1390 | __ Bind(slow_path->GetExitLabel()); |
| 1391 | } |
| 1392 | } |
| 1393 | |
| 1394 | void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) { |
| 1395 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 1396 | LocationSummary::kCall, |
| 1397 | kIntrinsified); |
| 1398 | // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's |
| 1399 | // best to align the inputs accordingly. |
| 1400 | InvokeRuntimeCallingConvention calling_convention; |
| 1401 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
| 1402 | locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); |
| 1403 | locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt)); |
| 1404 | |
Vladimir Marko | fb6c90a | 2016-05-06 15:52:12 +0100 | [diff] [blame] | 1405 | // Need to send start_index=0. |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1406 | locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2))); |
| 1407 | } |
| 1408 | |
| 1409 | void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 1410 | GenerateVisitStringIndexOf( |
| 1411 | invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1412 | } |
| 1413 | |
| 1414 | void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) { |
| 1415 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 1416 | LocationSummary::kCall, |
| 1417 | kIntrinsified); |
| 1418 | // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's |
| 1419 | // best to align the inputs accordingly. |
| 1420 | InvokeRuntimeCallingConvention calling_convention; |
| 1421 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
| 1422 | locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); |
| 1423 | locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); |
| 1424 | locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt)); |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1425 | } |
| 1426 | |
| 1427 | void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) { |
Roland Levillain | bf84a3d | 2015-12-04 14:33:02 +0000 | [diff] [blame] | 1428 | GenerateVisitStringIndexOf( |
| 1429 | invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); |
Andreas Gampe | ba6fdbc | 2015-05-07 22:31:55 -0700 | [diff] [blame] | 1430 | } |
| 1431 | |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1432 | void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) { |
| 1433 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 1434 | LocationSummary::kCall, |
| 1435 | kIntrinsified); |
| 1436 | InvokeRuntimeCallingConvention calling_convention; |
| 1437 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
| 1438 | locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); |
| 1439 | locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); |
| 1440 | locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3))); |
| 1441 | locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot)); |
| 1442 | } |
| 1443 | |
| 1444 | void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) { |
| 1445 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 1446 | LocationSummary* locations = invoke->GetLocations(); |
| 1447 | |
| 1448 | Register byte_array = WRegisterFrom(locations->InAt(0)); |
| 1449 | __ Cmp(byte_array, 0); |
| 1450 | SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); |
| 1451 | codegen_->AddSlowPath(slow_path); |
| 1452 | __ B(eq, slow_path->GetEntryLabel()); |
| 1453 | |
| 1454 | __ Ldr(lr, |
| 1455 | MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value())); |
Roland Levillain | f969a20 | 2016-03-09 16:14:00 +0000 | [diff] [blame] | 1456 | CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>(); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1457 | __ Blr(lr); |
Roland Levillain | f969a20 | 2016-03-09 16:14:00 +0000 | [diff] [blame] | 1458 | codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1459 | __ Bind(slow_path->GetExitLabel()); |
| 1460 | } |
| 1461 | |
| 1462 | void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) { |
| 1463 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 1464 | LocationSummary::kCall, |
| 1465 | kIntrinsified); |
| 1466 | InvokeRuntimeCallingConvention calling_convention; |
| 1467 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
| 1468 | locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1))); |
| 1469 | locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2))); |
| 1470 | locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot)); |
| 1471 | } |
| 1472 | |
| 1473 | void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) { |
| 1474 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 1475 | |
Roland Levillain | cc3839c | 2016-02-29 16:23:48 +0000 | [diff] [blame] | 1476 | // No need to emit code checking whether `locations->InAt(2)` is a null |
| 1477 | // pointer, as callers of the native method |
| 1478 | // |
| 1479 | // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data) |
| 1480 | // |
| 1481 | // all include a null check on `data` before calling that method. |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1482 | __ Ldr(lr, |
| 1483 | MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value())); |
Roland Levillain | f969a20 | 2016-03-09 16:14:00 +0000 | [diff] [blame] | 1484 | CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>(); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1485 | __ Blr(lr); |
Roland Levillain | f969a20 | 2016-03-09 16:14:00 +0000 | [diff] [blame] | 1486 | codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1487 | } |
| 1488 | |
| 1489 | void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) { |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1490 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 1491 | LocationSummary::kCall, |
| 1492 | kIntrinsified); |
| 1493 | InvokeRuntimeCallingConvention calling_convention; |
| 1494 | locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0))); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1495 | locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot)); |
| 1496 | } |
| 1497 | |
| 1498 | void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) { |
| 1499 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 1500 | LocationSummary* locations = invoke->GetLocations(); |
| 1501 | |
| 1502 | Register string_to_copy = WRegisterFrom(locations->InAt(0)); |
| 1503 | __ Cmp(string_to_copy, 0); |
| 1504 | SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); |
| 1505 | codegen_->AddSlowPath(slow_path); |
| 1506 | __ B(eq, slow_path->GetEntryLabel()); |
| 1507 | |
| 1508 | __ Ldr(lr, |
| 1509 | MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value())); |
Roland Levillain | f969a20 | 2016-03-09 16:14:00 +0000 | [diff] [blame] | 1510 | CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>(); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1511 | __ Blr(lr); |
Roland Levillain | f969a20 | 2016-03-09 16:14:00 +0000 | [diff] [blame] | 1512 | codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1513 | __ Bind(slow_path->GetExitLabel()); |
| 1514 | } |
| 1515 | |
Anton Kirilov | 02fc24e | 2016-01-20 16:48:19 +0000 | [diff] [blame] | 1516 | static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 1517 | DCHECK_EQ(invoke->GetNumberOfArguments(), 1U); |
| 1518 | DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType())); |
| 1519 | DCHECK(Primitive::IsFloatingPointType(invoke->GetType())); |
| 1520 | |
| 1521 | LocationSummary* const locations = new (arena) LocationSummary(invoke, |
| 1522 | LocationSummary::kCall, |
| 1523 | kIntrinsified); |
| 1524 | InvokeRuntimeCallingConvention calling_convention; |
| 1525 | |
| 1526 | locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0))); |
| 1527 | locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType())); |
| 1528 | } |
| 1529 | |
| 1530 | static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 1531 | DCHECK_EQ(invoke->GetNumberOfArguments(), 2U); |
| 1532 | DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType())); |
| 1533 | DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(1)->GetType())); |
| 1534 | DCHECK(Primitive::IsFloatingPointType(invoke->GetType())); |
| 1535 | |
| 1536 | LocationSummary* const locations = new (arena) LocationSummary(invoke, |
| 1537 | LocationSummary::kCall, |
| 1538 | kIntrinsified); |
| 1539 | InvokeRuntimeCallingConvention calling_convention; |
| 1540 | |
| 1541 | locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0))); |
| 1542 | locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1))); |
| 1543 | locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType())); |
| 1544 | } |
| 1545 | |
| 1546 | static void GenFPToFPCall(HInvoke* invoke, |
| 1547 | vixl::MacroAssembler* masm, |
| 1548 | CodeGeneratorARM64* codegen, |
| 1549 | QuickEntrypointEnum entry) { |
| 1550 | __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64WordSize>(entry).Int32Value())); |
| 1551 | __ Blr(lr); |
| 1552 | codegen->RecordPcInfo(invoke, invoke->GetDexPc()); |
| 1553 | } |
| 1554 | |
| 1555 | void IntrinsicLocationsBuilderARM64::VisitMathCos(HInvoke* invoke) { |
| 1556 | CreateFPToFPCallLocations(arena_, invoke); |
| 1557 | } |
| 1558 | |
| 1559 | void IntrinsicCodeGeneratorARM64::VisitMathCos(HInvoke* invoke) { |
| 1560 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCos); |
| 1561 | } |
| 1562 | |
| 1563 | void IntrinsicLocationsBuilderARM64::VisitMathSin(HInvoke* invoke) { |
| 1564 | CreateFPToFPCallLocations(arena_, invoke); |
| 1565 | } |
| 1566 | |
| 1567 | void IntrinsicCodeGeneratorARM64::VisitMathSin(HInvoke* invoke) { |
| 1568 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickSin); |
| 1569 | } |
| 1570 | |
| 1571 | void IntrinsicLocationsBuilderARM64::VisitMathAcos(HInvoke* invoke) { |
| 1572 | CreateFPToFPCallLocations(arena_, invoke); |
| 1573 | } |
| 1574 | |
| 1575 | void IntrinsicCodeGeneratorARM64::VisitMathAcos(HInvoke* invoke) { |
| 1576 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAcos); |
| 1577 | } |
| 1578 | |
| 1579 | void IntrinsicLocationsBuilderARM64::VisitMathAsin(HInvoke* invoke) { |
| 1580 | CreateFPToFPCallLocations(arena_, invoke); |
| 1581 | } |
| 1582 | |
| 1583 | void IntrinsicCodeGeneratorARM64::VisitMathAsin(HInvoke* invoke) { |
| 1584 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAsin); |
| 1585 | } |
| 1586 | |
| 1587 | void IntrinsicLocationsBuilderARM64::VisitMathAtan(HInvoke* invoke) { |
| 1588 | CreateFPToFPCallLocations(arena_, invoke); |
| 1589 | } |
| 1590 | |
| 1591 | void IntrinsicCodeGeneratorARM64::VisitMathAtan(HInvoke* invoke) { |
| 1592 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAtan); |
| 1593 | } |
| 1594 | |
| 1595 | void IntrinsicLocationsBuilderARM64::VisitMathCbrt(HInvoke* invoke) { |
| 1596 | CreateFPToFPCallLocations(arena_, invoke); |
| 1597 | } |
| 1598 | |
| 1599 | void IntrinsicCodeGeneratorARM64::VisitMathCbrt(HInvoke* invoke) { |
| 1600 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCbrt); |
| 1601 | } |
| 1602 | |
| 1603 | void IntrinsicLocationsBuilderARM64::VisitMathCosh(HInvoke* invoke) { |
| 1604 | CreateFPToFPCallLocations(arena_, invoke); |
| 1605 | } |
| 1606 | |
| 1607 | void IntrinsicCodeGeneratorARM64::VisitMathCosh(HInvoke* invoke) { |
| 1608 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCosh); |
| 1609 | } |
| 1610 | |
| 1611 | void IntrinsicLocationsBuilderARM64::VisitMathExp(HInvoke* invoke) { |
| 1612 | CreateFPToFPCallLocations(arena_, invoke); |
| 1613 | } |
| 1614 | |
| 1615 | void IntrinsicCodeGeneratorARM64::VisitMathExp(HInvoke* invoke) { |
| 1616 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickExp); |
| 1617 | } |
| 1618 | |
| 1619 | void IntrinsicLocationsBuilderARM64::VisitMathExpm1(HInvoke* invoke) { |
| 1620 | CreateFPToFPCallLocations(arena_, invoke); |
| 1621 | } |
| 1622 | |
| 1623 | void IntrinsicCodeGeneratorARM64::VisitMathExpm1(HInvoke* invoke) { |
| 1624 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickExpm1); |
| 1625 | } |
| 1626 | |
| 1627 | void IntrinsicLocationsBuilderARM64::VisitMathLog(HInvoke* invoke) { |
| 1628 | CreateFPToFPCallLocations(arena_, invoke); |
| 1629 | } |
| 1630 | |
| 1631 | void IntrinsicCodeGeneratorARM64::VisitMathLog(HInvoke* invoke) { |
| 1632 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickLog); |
| 1633 | } |
| 1634 | |
| 1635 | void IntrinsicLocationsBuilderARM64::VisitMathLog10(HInvoke* invoke) { |
| 1636 | CreateFPToFPCallLocations(arena_, invoke); |
| 1637 | } |
| 1638 | |
| 1639 | void IntrinsicCodeGeneratorARM64::VisitMathLog10(HInvoke* invoke) { |
| 1640 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickLog10); |
| 1641 | } |
| 1642 | |
| 1643 | void IntrinsicLocationsBuilderARM64::VisitMathSinh(HInvoke* invoke) { |
| 1644 | CreateFPToFPCallLocations(arena_, invoke); |
| 1645 | } |
| 1646 | |
| 1647 | void IntrinsicCodeGeneratorARM64::VisitMathSinh(HInvoke* invoke) { |
| 1648 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickSinh); |
| 1649 | } |
| 1650 | |
| 1651 | void IntrinsicLocationsBuilderARM64::VisitMathTan(HInvoke* invoke) { |
| 1652 | CreateFPToFPCallLocations(arena_, invoke); |
| 1653 | } |
| 1654 | |
| 1655 | void IntrinsicCodeGeneratorARM64::VisitMathTan(HInvoke* invoke) { |
| 1656 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickTan); |
| 1657 | } |
| 1658 | |
| 1659 | void IntrinsicLocationsBuilderARM64::VisitMathTanh(HInvoke* invoke) { |
| 1660 | CreateFPToFPCallLocations(arena_, invoke); |
| 1661 | } |
| 1662 | |
| 1663 | void IntrinsicCodeGeneratorARM64::VisitMathTanh(HInvoke* invoke) { |
| 1664 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickTanh); |
| 1665 | } |
| 1666 | |
| 1667 | void IntrinsicLocationsBuilderARM64::VisitMathAtan2(HInvoke* invoke) { |
| 1668 | CreateFPFPToFPCallLocations(arena_, invoke); |
| 1669 | } |
| 1670 | |
| 1671 | void IntrinsicCodeGeneratorARM64::VisitMathAtan2(HInvoke* invoke) { |
| 1672 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAtan2); |
| 1673 | } |
| 1674 | |
| 1675 | void IntrinsicLocationsBuilderARM64::VisitMathHypot(HInvoke* invoke) { |
| 1676 | CreateFPFPToFPCallLocations(arena_, invoke); |
| 1677 | } |
| 1678 | |
| 1679 | void IntrinsicCodeGeneratorARM64::VisitMathHypot(HInvoke* invoke) { |
| 1680 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickHypot); |
| 1681 | } |
| 1682 | |
| 1683 | void IntrinsicLocationsBuilderARM64::VisitMathNextAfter(HInvoke* invoke) { |
| 1684 | CreateFPFPToFPCallLocations(arena_, invoke); |
| 1685 | } |
| 1686 | |
| 1687 | void IntrinsicCodeGeneratorARM64::VisitMathNextAfter(HInvoke* invoke) { |
| 1688 | GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickNextAfter); |
| 1689 | } |
| 1690 | |
Tim Zhang | 25abd6c | 2016-01-19 23:39:24 +0800 | [diff] [blame] | 1691 | void IntrinsicLocationsBuilderARM64::VisitStringGetCharsNoCheck(HInvoke* invoke) { |
| 1692 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 1693 | LocationSummary::kNoCall, |
| 1694 | kIntrinsified); |
| 1695 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1696 | locations->SetInAt(1, Location::RequiresRegister()); |
| 1697 | locations->SetInAt(2, Location::RequiresRegister()); |
| 1698 | locations->SetInAt(3, Location::RequiresRegister()); |
| 1699 | locations->SetInAt(4, Location::RequiresRegister()); |
| 1700 | |
| 1701 | locations->AddTemp(Location::RequiresRegister()); |
| 1702 | locations->AddTemp(Location::RequiresRegister()); |
Scott Wakeling | df109d9 | 2016-04-22 11:35:56 +0100 | [diff] [blame] | 1703 | locations->AddTemp(Location::RequiresRegister()); |
Tim Zhang | 25abd6c | 2016-01-19 23:39:24 +0800 | [diff] [blame] | 1704 | } |
| 1705 | |
| 1706 | void IntrinsicCodeGeneratorARM64::VisitStringGetCharsNoCheck(HInvoke* invoke) { |
| 1707 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 1708 | LocationSummary* locations = invoke->GetLocations(); |
| 1709 | |
| 1710 | // Check assumption that sizeof(Char) is 2 (used in scaling below). |
| 1711 | const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar); |
| 1712 | DCHECK_EQ(char_size, 2u); |
| 1713 | |
| 1714 | // Location of data in char array buffer. |
| 1715 | const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value(); |
| 1716 | |
| 1717 | // Location of char array data in string. |
| 1718 | const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value(); |
| 1719 | |
| 1720 | // void getCharsNoCheck(int srcBegin, int srcEnd, char[] dst, int dstBegin); |
| 1721 | // Since getChars() calls getCharsNoCheck() - we use registers rather than constants. |
| 1722 | Register srcObj = XRegisterFrom(locations->InAt(0)); |
| 1723 | Register srcBegin = XRegisterFrom(locations->InAt(1)); |
| 1724 | Register srcEnd = XRegisterFrom(locations->InAt(2)); |
| 1725 | Register dstObj = XRegisterFrom(locations->InAt(3)); |
| 1726 | Register dstBegin = XRegisterFrom(locations->InAt(4)); |
| 1727 | |
| 1728 | Register src_ptr = XRegisterFrom(locations->GetTemp(0)); |
Scott Wakeling | df109d9 | 2016-04-22 11:35:56 +0100 | [diff] [blame] | 1729 | Register num_chr = XRegisterFrom(locations->GetTemp(1)); |
| 1730 | Register tmp1 = XRegisterFrom(locations->GetTemp(2)); |
Tim Zhang | 25abd6c | 2016-01-19 23:39:24 +0800 | [diff] [blame] | 1731 | |
| 1732 | UseScratchRegisterScope temps(masm); |
| 1733 | Register dst_ptr = temps.AcquireX(); |
Scott Wakeling | df109d9 | 2016-04-22 11:35:56 +0100 | [diff] [blame] | 1734 | Register tmp2 = temps.AcquireX(); |
Tim Zhang | 25abd6c | 2016-01-19 23:39:24 +0800 | [diff] [blame] | 1735 | |
Scott Wakeling | df109d9 | 2016-04-22 11:35:56 +0100 | [diff] [blame] | 1736 | // src address to copy from. |
Tim Zhang | 25abd6c | 2016-01-19 23:39:24 +0800 | [diff] [blame] | 1737 | __ Add(src_ptr, srcObj, Operand(value_offset)); |
Tim Zhang | 25abd6c | 2016-01-19 23:39:24 +0800 | [diff] [blame] | 1738 | __ Add(src_ptr, src_ptr, Operand(srcBegin, LSL, 1)); |
| 1739 | |
Scott Wakeling | df109d9 | 2016-04-22 11:35:56 +0100 | [diff] [blame] | 1740 | // dst address start to copy to. |
Tim Zhang | 25abd6c | 2016-01-19 23:39:24 +0800 | [diff] [blame] | 1741 | __ Add(dst_ptr, dstObj, Operand(data_offset)); |
| 1742 | __ Add(dst_ptr, dst_ptr, Operand(dstBegin, LSL, 1)); |
| 1743 | |
Scott Wakeling | df109d9 | 2016-04-22 11:35:56 +0100 | [diff] [blame] | 1744 | __ Sub(num_chr, srcEnd, srcBegin); |
| 1745 | |
Tim Zhang | 25abd6c | 2016-01-19 23:39:24 +0800 | [diff] [blame] | 1746 | // Do the copy. |
Scott Wakeling | df109d9 | 2016-04-22 11:35:56 +0100 | [diff] [blame] | 1747 | vixl::Label loop; |
| 1748 | vixl::Label done; |
| 1749 | vixl::Label remainder; |
| 1750 | |
| 1751 | // Early out for valid zero-length retrievals. |
| 1752 | __ Cbz(num_chr, &done); |
| 1753 | |
| 1754 | // Save repairing the value of num_chr on the < 8 character path. |
| 1755 | __ Subs(tmp1, num_chr, 8); |
| 1756 | __ B(lt, &remainder); |
| 1757 | |
| 1758 | // Keep the result of the earlier subs, we are going to fetch at least 8 characters. |
| 1759 | __ Mov(num_chr, tmp1); |
| 1760 | |
| 1761 | // Main loop used for longer fetches loads and stores 8x16-bit characters at a time. |
| 1762 | // (Unaligned addresses are acceptable here and not worth inlining extra code to rectify.) |
Tim Zhang | 25abd6c | 2016-01-19 23:39:24 +0800 | [diff] [blame] | 1763 | __ Bind(&loop); |
Scott Wakeling | df109d9 | 2016-04-22 11:35:56 +0100 | [diff] [blame] | 1764 | __ Ldp(tmp1, tmp2, MemOperand(src_ptr, char_size * 8, vixl::PostIndex)); |
| 1765 | __ Subs(num_chr, num_chr, 8); |
| 1766 | __ Stp(tmp1, tmp2, MemOperand(dst_ptr, char_size * 8, vixl::PostIndex)); |
| 1767 | __ B(ge, &loop); |
| 1768 | |
| 1769 | __ Adds(num_chr, num_chr, 8); |
| 1770 | __ B(eq, &done); |
| 1771 | |
| 1772 | // Main loop for < 8 character case and remainder handling. Loads and stores one |
| 1773 | // 16-bit Java character at a time. |
| 1774 | __ Bind(&remainder); |
| 1775 | __ Ldrh(tmp1, MemOperand(src_ptr, char_size, vixl::PostIndex)); |
| 1776 | __ Subs(num_chr, num_chr, 1); |
| 1777 | __ Strh(tmp1, MemOperand(dst_ptr, char_size, vixl::PostIndex)); |
| 1778 | __ B(gt, &remainder); |
| 1779 | |
Tim Zhang | 25abd6c | 2016-01-19 23:39:24 +0800 | [diff] [blame] | 1780 | __ Bind(&done); |
| 1781 | } |
| 1782 | |
Scott Wakeling | d3d0da5 | 2016-02-29 15:17:20 +0000 | [diff] [blame] | 1783 | // Mirrors ARRAYCOPY_SHORT_CHAR_ARRAY_THRESHOLD in libcore, so we can choose to use the native |
| 1784 | // implementation there for longer copy lengths. |
donghui.bai | c2ec9ad | 2016-03-10 14:02:55 +0800 | [diff] [blame] | 1785 | static constexpr int32_t kSystemArrayCopyCharThreshold = 32; |
Scott Wakeling | d3d0da5 | 2016-02-29 15:17:20 +0000 | [diff] [blame] | 1786 | |
| 1787 | static void SetSystemArrayCopyLocationRequires(LocationSummary* locations, |
| 1788 | uint32_t at, |
| 1789 | HInstruction* input) { |
| 1790 | HIntConstant* const_input = input->AsIntConstant(); |
| 1791 | if (const_input != nullptr && !vixl::Assembler::IsImmAddSub(const_input->GetValue())) { |
| 1792 | locations->SetInAt(at, Location::RequiresRegister()); |
| 1793 | } else { |
| 1794 | locations->SetInAt(at, Location::RegisterOrConstant(input)); |
| 1795 | } |
| 1796 | } |
| 1797 | |
| 1798 | void IntrinsicLocationsBuilderARM64::VisitSystemArrayCopyChar(HInvoke* invoke) { |
| 1799 | // Check to see if we have known failures that will cause us to have to bail out |
| 1800 | // to the runtime, and just generate the runtime call directly. |
| 1801 | HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant(); |
| 1802 | HIntConstant* dst_pos = invoke->InputAt(3)->AsIntConstant(); |
| 1803 | |
| 1804 | // The positions must be non-negative. |
| 1805 | if ((src_pos != nullptr && src_pos->GetValue() < 0) || |
| 1806 | (dst_pos != nullptr && dst_pos->GetValue() < 0)) { |
| 1807 | // We will have to fail anyways. |
| 1808 | return; |
| 1809 | } |
| 1810 | |
| 1811 | // The length must be >= 0 and not so long that we would (currently) prefer libcore's |
| 1812 | // native implementation. |
| 1813 | HIntConstant* length = invoke->InputAt(4)->AsIntConstant(); |
| 1814 | if (length != nullptr) { |
| 1815 | int32_t len = length->GetValue(); |
donghui.bai | c2ec9ad | 2016-03-10 14:02:55 +0800 | [diff] [blame] | 1816 | if (len < 0 || len > kSystemArrayCopyCharThreshold) { |
Scott Wakeling | d3d0da5 | 2016-02-29 15:17:20 +0000 | [diff] [blame] | 1817 | // Just call as normal. |
| 1818 | return; |
| 1819 | } |
| 1820 | } |
| 1821 | |
| 1822 | ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetArena(); |
| 1823 | LocationSummary* locations = new (allocator) LocationSummary(invoke, |
| 1824 | LocationSummary::kCallOnSlowPath, |
| 1825 | kIntrinsified); |
| 1826 | // arraycopy(char[] src, int src_pos, char[] dst, int dst_pos, int length). |
| 1827 | locations->SetInAt(0, Location::RequiresRegister()); |
| 1828 | SetSystemArrayCopyLocationRequires(locations, 1, invoke->InputAt(1)); |
| 1829 | locations->SetInAt(2, Location::RequiresRegister()); |
| 1830 | SetSystemArrayCopyLocationRequires(locations, 3, invoke->InputAt(3)); |
| 1831 | SetSystemArrayCopyLocationRequires(locations, 4, invoke->InputAt(4)); |
| 1832 | |
| 1833 | locations->AddTemp(Location::RequiresRegister()); |
| 1834 | locations->AddTemp(Location::RequiresRegister()); |
| 1835 | locations->AddTemp(Location::RequiresRegister()); |
| 1836 | } |
| 1837 | |
| 1838 | static void CheckSystemArrayCopyPosition(vixl::MacroAssembler* masm, |
| 1839 | const Location& pos, |
| 1840 | const Register& input, |
| 1841 | const Location& length, |
| 1842 | SlowPathCodeARM64* slow_path, |
| 1843 | const Register& input_len, |
| 1844 | const Register& temp, |
| 1845 | bool length_is_input_length = false) { |
| 1846 | const int32_t length_offset = mirror::Array::LengthOffset().Int32Value(); |
| 1847 | if (pos.IsConstant()) { |
| 1848 | int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue(); |
| 1849 | if (pos_const == 0) { |
| 1850 | if (!length_is_input_length) { |
| 1851 | // Check that length(input) >= length. |
| 1852 | __ Ldr(temp, MemOperand(input, length_offset)); |
| 1853 | __ Cmp(temp, OperandFrom(length, Primitive::kPrimInt)); |
| 1854 | __ B(slow_path->GetEntryLabel(), lt); |
| 1855 | } |
| 1856 | } else { |
| 1857 | // Check that length(input) >= pos. |
| 1858 | __ Ldr(input_len, MemOperand(input, length_offset)); |
| 1859 | __ Subs(temp, input_len, pos_const); |
| 1860 | __ B(slow_path->GetEntryLabel(), lt); |
| 1861 | |
| 1862 | // Check that (length(input) - pos) >= length. |
| 1863 | __ Cmp(temp, OperandFrom(length, Primitive::kPrimInt)); |
| 1864 | __ B(slow_path->GetEntryLabel(), lt); |
| 1865 | } |
| 1866 | } else if (length_is_input_length) { |
| 1867 | // The only way the copy can succeed is if pos is zero. |
| 1868 | __ Cbnz(WRegisterFrom(pos), slow_path->GetEntryLabel()); |
| 1869 | } else { |
| 1870 | // Check that pos >= 0. |
| 1871 | Register pos_reg = WRegisterFrom(pos); |
| 1872 | __ Tbnz(pos_reg, pos_reg.size() - 1, slow_path->GetEntryLabel()); |
| 1873 | |
| 1874 | // Check that pos <= length(input) && (length(input) - pos) >= length. |
| 1875 | __ Ldr(temp, MemOperand(input, length_offset)); |
| 1876 | __ Subs(temp, temp, pos_reg); |
| 1877 | // Ccmp if length(input) >= pos, else definitely bail to slow path (N!=V == lt). |
| 1878 | __ Ccmp(temp, OperandFrom(length, Primitive::kPrimInt), NFlag, ge); |
| 1879 | __ B(slow_path->GetEntryLabel(), lt); |
| 1880 | } |
| 1881 | } |
| 1882 | |
| 1883 | // Compute base source address, base destination address, and end source address |
| 1884 | // for System.arraycopy* intrinsics. |
| 1885 | static void GenSystemArrayCopyAddresses(vixl::MacroAssembler* masm, |
| 1886 | Primitive::Type type, |
| 1887 | const Register& src, |
| 1888 | const Location& src_pos, |
| 1889 | const Register& dst, |
| 1890 | const Location& dst_pos, |
| 1891 | const Location& copy_length, |
| 1892 | const Register& src_base, |
| 1893 | const Register& dst_base, |
| 1894 | const Register& src_end) { |
| 1895 | DCHECK(type == Primitive::kPrimNot || type == Primitive::kPrimChar) |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 1896 | << "Unexpected element type: " << type; |
| 1897 | const int32_t element_size = Primitive::ComponentSize(type); |
| 1898 | const int32_t element_size_shift = Primitive::ComponentSizeShift(type); |
Scott Wakeling | d3d0da5 | 2016-02-29 15:17:20 +0000 | [diff] [blame] | 1899 | |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 1900 | uint32_t data_offset = mirror::Array::DataOffset(element_size).Uint32Value(); |
Scott Wakeling | d3d0da5 | 2016-02-29 15:17:20 +0000 | [diff] [blame] | 1901 | if (src_pos.IsConstant()) { |
| 1902 | int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue(); |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 1903 | __ Add(src_base, src, element_size * constant + data_offset); |
Scott Wakeling | d3d0da5 | 2016-02-29 15:17:20 +0000 | [diff] [blame] | 1904 | } else { |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 1905 | __ Add(src_base, src, data_offset); |
| 1906 | __ Add(src_base, src_base, Operand(XRegisterFrom(src_pos), LSL, element_size_shift)); |
Scott Wakeling | d3d0da5 | 2016-02-29 15:17:20 +0000 | [diff] [blame] | 1907 | } |
| 1908 | |
| 1909 | if (dst_pos.IsConstant()) { |
| 1910 | int32_t constant = dst_pos.GetConstant()->AsIntConstant()->GetValue(); |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 1911 | __ Add(dst_base, dst, element_size * constant + data_offset); |
Scott Wakeling | d3d0da5 | 2016-02-29 15:17:20 +0000 | [diff] [blame] | 1912 | } else { |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 1913 | __ Add(dst_base, dst, data_offset); |
| 1914 | __ Add(dst_base, dst_base, Operand(XRegisterFrom(dst_pos), LSL, element_size_shift)); |
Scott Wakeling | d3d0da5 | 2016-02-29 15:17:20 +0000 | [diff] [blame] | 1915 | } |
| 1916 | |
| 1917 | if (copy_length.IsConstant()) { |
| 1918 | int32_t constant = copy_length.GetConstant()->AsIntConstant()->GetValue(); |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 1919 | __ Add(src_end, src_base, element_size * constant); |
Scott Wakeling | d3d0da5 | 2016-02-29 15:17:20 +0000 | [diff] [blame] | 1920 | } else { |
Roland Levillain | ebea3d2 | 2016-04-12 15:42:57 +0100 | [diff] [blame] | 1921 | __ Add(src_end, src_base, Operand(XRegisterFrom(copy_length), LSL, element_size_shift)); |
Scott Wakeling | d3d0da5 | 2016-02-29 15:17:20 +0000 | [diff] [blame] | 1922 | } |
| 1923 | } |
| 1924 | |
| 1925 | void IntrinsicCodeGeneratorARM64::VisitSystemArrayCopyChar(HInvoke* invoke) { |
| 1926 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 1927 | LocationSummary* locations = invoke->GetLocations(); |
| 1928 | Register src = XRegisterFrom(locations->InAt(0)); |
| 1929 | Location src_pos = locations->InAt(1); |
| 1930 | Register dst = XRegisterFrom(locations->InAt(2)); |
| 1931 | Location dst_pos = locations->InAt(3); |
| 1932 | Location length = locations->InAt(4); |
| 1933 | |
| 1934 | SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); |
| 1935 | codegen_->AddSlowPath(slow_path); |
| 1936 | |
| 1937 | // If source and destination are the same, take the slow path. Overlapping copy regions must be |
| 1938 | // copied in reverse and we can't know in all cases if it's needed. |
| 1939 | __ Cmp(src, dst); |
| 1940 | __ B(slow_path->GetEntryLabel(), eq); |
| 1941 | |
| 1942 | // Bail out if the source is null. |
| 1943 | __ Cbz(src, slow_path->GetEntryLabel()); |
| 1944 | |
| 1945 | // Bail out if the destination is null. |
| 1946 | __ Cbz(dst, slow_path->GetEntryLabel()); |
| 1947 | |
| 1948 | if (!length.IsConstant()) { |
| 1949 | // If the length is negative, bail out. |
| 1950 | __ Tbnz(WRegisterFrom(length), kWRegSize - 1, slow_path->GetEntryLabel()); |
| 1951 | // If the length > 32 then (currently) prefer libcore's native implementation. |
donghui.bai | c2ec9ad | 2016-03-10 14:02:55 +0800 | [diff] [blame] | 1952 | __ Cmp(WRegisterFrom(length), kSystemArrayCopyCharThreshold); |
Scott Wakeling | d3d0da5 | 2016-02-29 15:17:20 +0000 | [diff] [blame] | 1953 | __ B(slow_path->GetEntryLabel(), gt); |
| 1954 | } else { |
| 1955 | // We have already checked in the LocationsBuilder for the constant case. |
| 1956 | DCHECK_GE(length.GetConstant()->AsIntConstant()->GetValue(), 0); |
| 1957 | DCHECK_LE(length.GetConstant()->AsIntConstant()->GetValue(), 32); |
| 1958 | } |
| 1959 | |
| 1960 | Register src_curr_addr = WRegisterFrom(locations->GetTemp(0)); |
| 1961 | Register dst_curr_addr = WRegisterFrom(locations->GetTemp(1)); |
| 1962 | Register src_stop_addr = WRegisterFrom(locations->GetTemp(2)); |
| 1963 | |
| 1964 | CheckSystemArrayCopyPosition(masm, |
| 1965 | src_pos, |
| 1966 | src, |
| 1967 | length, |
| 1968 | slow_path, |
| 1969 | src_curr_addr, |
| 1970 | dst_curr_addr, |
| 1971 | false); |
| 1972 | |
| 1973 | CheckSystemArrayCopyPosition(masm, |
| 1974 | dst_pos, |
| 1975 | dst, |
| 1976 | length, |
| 1977 | slow_path, |
| 1978 | src_curr_addr, |
| 1979 | dst_curr_addr, |
| 1980 | false); |
| 1981 | |
| 1982 | src_curr_addr = src_curr_addr.X(); |
| 1983 | dst_curr_addr = dst_curr_addr.X(); |
| 1984 | src_stop_addr = src_stop_addr.X(); |
| 1985 | |
| 1986 | GenSystemArrayCopyAddresses(masm, |
| 1987 | Primitive::kPrimChar, |
| 1988 | src, |
| 1989 | src_pos, |
| 1990 | dst, |
| 1991 | dst_pos, |
| 1992 | length, |
| 1993 | src_curr_addr, |
| 1994 | dst_curr_addr, |
| 1995 | src_stop_addr); |
| 1996 | |
| 1997 | // Iterate over the arrays and do a raw copy of the chars. |
| 1998 | const int32_t char_size = Primitive::ComponentSize(Primitive::kPrimChar); |
| 1999 | UseScratchRegisterScope temps(masm); |
| 2000 | Register tmp = temps.AcquireW(); |
| 2001 | vixl::Label loop, done; |
| 2002 | __ Bind(&loop); |
| 2003 | __ Cmp(src_curr_addr, src_stop_addr); |
| 2004 | __ B(&done, eq); |
| 2005 | __ Ldrh(tmp, MemOperand(src_curr_addr, char_size, vixl::PostIndex)); |
| 2006 | __ Strh(tmp, MemOperand(dst_curr_addr, char_size, vixl::PostIndex)); |
| 2007 | __ B(&loop); |
| 2008 | __ Bind(&done); |
| 2009 | |
| 2010 | __ Bind(slow_path->GetExitLabel()); |
| 2011 | } |
| 2012 | |
donghui.bai | c2ec9ad | 2016-03-10 14:02:55 +0800 | [diff] [blame] | 2013 | // We can choose to use the native implementation there for longer copy lengths. |
| 2014 | static constexpr int32_t kSystemArrayCopyThreshold = 128; |
| 2015 | |
| 2016 | // CodeGenerator::CreateSystemArrayCopyLocationSummary use three temporary registers. |
| 2017 | // We want to use two temporary registers in order to reduce the register pressure in arm64. |
| 2018 | // So we don't use the CodeGenerator::CreateSystemArrayCopyLocationSummary. |
| 2019 | void IntrinsicLocationsBuilderARM64::VisitSystemArrayCopy(HInvoke* invoke) { |
Roland Levillain | 3d31242 | 2016-06-23 13:53:42 +0100 | [diff] [blame] | 2020 | // TODO(rpl): Implement read barriers in the SystemArrayCopy |
| 2021 | // intrinsic and re-enable it (b/29516905). |
| 2022 | if (kEmitCompilerReadBarrier) { |
| 2023 | return; |
| 2024 | } |
| 2025 | |
donghui.bai | c2ec9ad | 2016-03-10 14:02:55 +0800 | [diff] [blame] | 2026 | // Check to see if we have known failures that will cause us to have to bail out |
| 2027 | // to the runtime, and just generate the runtime call directly. |
| 2028 | HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant(); |
| 2029 | HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant(); |
| 2030 | |
| 2031 | // The positions must be non-negative. |
| 2032 | if ((src_pos != nullptr && src_pos->GetValue() < 0) || |
| 2033 | (dest_pos != nullptr && dest_pos->GetValue() < 0)) { |
| 2034 | // We will have to fail anyways. |
| 2035 | return; |
| 2036 | } |
| 2037 | |
| 2038 | // The length must be >= 0. |
| 2039 | HIntConstant* length = invoke->InputAt(4)->AsIntConstant(); |
| 2040 | if (length != nullptr) { |
| 2041 | int32_t len = length->GetValue(); |
| 2042 | if (len < 0 || len >= kSystemArrayCopyThreshold) { |
| 2043 | // Just call as normal. |
| 2044 | return; |
| 2045 | } |
| 2046 | } |
| 2047 | |
| 2048 | SystemArrayCopyOptimizations optimizations(invoke); |
| 2049 | |
| 2050 | if (optimizations.GetDestinationIsSource()) { |
| 2051 | if (src_pos != nullptr && dest_pos != nullptr && src_pos->GetValue() < dest_pos->GetValue()) { |
| 2052 | // We only support backward copying if source and destination are the same. |
| 2053 | return; |
| 2054 | } |
| 2055 | } |
| 2056 | |
| 2057 | if (optimizations.GetDestinationIsPrimitiveArray() || optimizations.GetSourceIsPrimitiveArray()) { |
| 2058 | // We currently don't intrinsify primitive copying. |
| 2059 | return; |
| 2060 | } |
| 2061 | |
| 2062 | ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetArena(); |
| 2063 | LocationSummary* locations = new (allocator) LocationSummary(invoke, |
| 2064 | LocationSummary::kCallOnSlowPath, |
| 2065 | kIntrinsified); |
| 2066 | // arraycopy(Object src, int src_pos, Object dest, int dest_pos, int length). |
| 2067 | locations->SetInAt(0, Location::RequiresRegister()); |
| 2068 | SetSystemArrayCopyLocationRequires(locations, 1, invoke->InputAt(1)); |
| 2069 | locations->SetInAt(2, Location::RequiresRegister()); |
| 2070 | SetSystemArrayCopyLocationRequires(locations, 3, invoke->InputAt(3)); |
| 2071 | SetSystemArrayCopyLocationRequires(locations, 4, invoke->InputAt(4)); |
| 2072 | |
| 2073 | locations->AddTemp(Location::RequiresRegister()); |
| 2074 | locations->AddTemp(Location::RequiresRegister()); |
| 2075 | } |
| 2076 | |
| 2077 | void IntrinsicCodeGeneratorARM64::VisitSystemArrayCopy(HInvoke* invoke) { |
Roland Levillain | 3d31242 | 2016-06-23 13:53:42 +0100 | [diff] [blame] | 2078 | // TODO(rpl): Implement read barriers in the SystemArrayCopy |
| 2079 | // intrinsic and re-enable it (b/29516905). |
| 2080 | DCHECK(!kEmitCompilerReadBarrier); |
| 2081 | |
donghui.bai | c2ec9ad | 2016-03-10 14:02:55 +0800 | [diff] [blame] | 2082 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 2083 | LocationSummary* locations = invoke->GetLocations(); |
| 2084 | |
| 2085 | uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); |
| 2086 | uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value(); |
| 2087 | uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value(); |
| 2088 | uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value(); |
| 2089 | |
| 2090 | Register src = XRegisterFrom(locations->InAt(0)); |
| 2091 | Location src_pos = locations->InAt(1); |
| 2092 | Register dest = XRegisterFrom(locations->InAt(2)); |
| 2093 | Location dest_pos = locations->InAt(3); |
| 2094 | Location length = locations->InAt(4); |
| 2095 | Register temp1 = WRegisterFrom(locations->GetTemp(0)); |
| 2096 | Register temp2 = WRegisterFrom(locations->GetTemp(1)); |
| 2097 | |
| 2098 | SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); |
| 2099 | codegen_->AddSlowPath(slow_path); |
| 2100 | |
| 2101 | vixl::Label conditions_on_positions_validated; |
| 2102 | SystemArrayCopyOptimizations optimizations(invoke); |
| 2103 | |
donghui.bai | c2ec9ad | 2016-03-10 14:02:55 +0800 | [diff] [blame] | 2104 | // If source and destination are the same, we go to slow path if we need to do |
| 2105 | // forward copying. |
| 2106 | if (src_pos.IsConstant()) { |
| 2107 | int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue(); |
| 2108 | if (dest_pos.IsConstant()) { |
Nicolas Geoffray | 9f65db8 | 2016-07-07 12:07:42 +0100 | [diff] [blame^] | 2109 | int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue(); |
| 2110 | if (optimizations.GetDestinationIsSource()) { |
| 2111 | // Checked when building locations. |
| 2112 | DCHECK_GE(src_pos_constant, dest_pos_constant); |
| 2113 | } else if (src_pos_constant < dest_pos_constant) { |
| 2114 | __ Cmp(src, dest); |
| 2115 | __ B(slow_path->GetEntryLabel(), eq); |
| 2116 | } |
donghui.bai | c2ec9ad | 2016-03-10 14:02:55 +0800 | [diff] [blame] | 2117 | // Checked when building locations. |
| 2118 | DCHECK(!optimizations.GetDestinationIsSource() |
| 2119 | || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue())); |
| 2120 | } else { |
| 2121 | if (!optimizations.GetDestinationIsSource()) { |
Nicolas Geoffray | 9f65db8 | 2016-07-07 12:07:42 +0100 | [diff] [blame^] | 2122 | __ Cmp(src, dest); |
donghui.bai | c2ec9ad | 2016-03-10 14:02:55 +0800 | [diff] [blame] | 2123 | __ B(&conditions_on_positions_validated, ne); |
| 2124 | } |
| 2125 | __ Cmp(WRegisterFrom(dest_pos), src_pos_constant); |
| 2126 | __ B(slow_path->GetEntryLabel(), gt); |
| 2127 | } |
| 2128 | } else { |
| 2129 | if (!optimizations.GetDestinationIsSource()) { |
Nicolas Geoffray | 9f65db8 | 2016-07-07 12:07:42 +0100 | [diff] [blame^] | 2130 | __ Cmp(src, dest); |
donghui.bai | c2ec9ad | 2016-03-10 14:02:55 +0800 | [diff] [blame] | 2131 | __ B(&conditions_on_positions_validated, ne); |
| 2132 | } |
| 2133 | __ Cmp(RegisterFrom(src_pos, invoke->InputAt(1)->GetType()), |
| 2134 | OperandFrom(dest_pos, invoke->InputAt(3)->GetType())); |
| 2135 | __ B(slow_path->GetEntryLabel(), lt); |
| 2136 | } |
| 2137 | |
| 2138 | __ Bind(&conditions_on_positions_validated); |
| 2139 | |
| 2140 | if (!optimizations.GetSourceIsNotNull()) { |
| 2141 | // Bail out if the source is null. |
| 2142 | __ Cbz(src, slow_path->GetEntryLabel()); |
| 2143 | } |
| 2144 | |
| 2145 | if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) { |
| 2146 | // Bail out if the destination is null. |
| 2147 | __ Cbz(dest, slow_path->GetEntryLabel()); |
| 2148 | } |
| 2149 | |
| 2150 | // We have already checked in the LocationsBuilder for the constant case. |
| 2151 | if (!length.IsConstant() && |
| 2152 | !optimizations.GetCountIsSourceLength() && |
| 2153 | !optimizations.GetCountIsDestinationLength()) { |
| 2154 | // If the length is negative, bail out. |
| 2155 | __ Tbnz(WRegisterFrom(length), kWRegSize - 1, slow_path->GetEntryLabel()); |
| 2156 | // If the length >= 128 then (currently) prefer native implementation. |
| 2157 | __ Cmp(WRegisterFrom(length), kSystemArrayCopyThreshold); |
| 2158 | __ B(slow_path->GetEntryLabel(), ge); |
| 2159 | } |
| 2160 | // Validity checks: source. |
| 2161 | CheckSystemArrayCopyPosition(masm, |
| 2162 | src_pos, |
| 2163 | src, |
| 2164 | length, |
| 2165 | slow_path, |
| 2166 | temp1, |
| 2167 | temp2, |
| 2168 | optimizations.GetCountIsSourceLength()); |
| 2169 | |
| 2170 | // Validity checks: dest. |
| 2171 | CheckSystemArrayCopyPosition(masm, |
| 2172 | dest_pos, |
| 2173 | dest, |
| 2174 | length, |
| 2175 | slow_path, |
| 2176 | temp1, |
| 2177 | temp2, |
| 2178 | optimizations.GetCountIsDestinationLength()); |
| 2179 | { |
| 2180 | // We use a block to end the scratch scope before the write barrier, thus |
| 2181 | // freeing the temporary registers so they can be used in `MarkGCCard`. |
| 2182 | UseScratchRegisterScope temps(masm); |
| 2183 | Register temp3 = temps.AcquireW(); |
| 2184 | if (!optimizations.GetDoesNotNeedTypeCheck()) { |
| 2185 | // Check whether all elements of the source array are assignable to the component |
| 2186 | // type of the destination array. We do two checks: the classes are the same, |
| 2187 | // or the destination is Object[]. If none of these checks succeed, we go to the |
| 2188 | // slow path. |
| 2189 | __ Ldr(temp1, MemOperand(dest, class_offset)); |
| 2190 | __ Ldr(temp2, MemOperand(src, class_offset)); |
| 2191 | bool did_unpoison = false; |
| 2192 | if (!optimizations.GetDestinationIsNonPrimitiveArray() || |
| 2193 | !optimizations.GetSourceIsNonPrimitiveArray()) { |
| 2194 | // One or two of the references need to be unpoisoned. Unpoison them |
| 2195 | // both to make the identity check valid. |
| 2196 | codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1); |
| 2197 | codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp2); |
| 2198 | did_unpoison = true; |
| 2199 | } |
| 2200 | |
| 2201 | if (!optimizations.GetDestinationIsNonPrimitiveArray()) { |
| 2202 | // Bail out if the destination is not a non primitive array. |
| 2203 | // /* HeapReference<Class> */ temp3 = temp1->component_type_ |
| 2204 | __ Ldr(temp3, HeapOperand(temp1, component_offset)); |
| 2205 | __ Cbz(temp3, slow_path->GetEntryLabel()); |
| 2206 | codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3); |
| 2207 | __ Ldrh(temp3, HeapOperand(temp3, primitive_offset)); |
| 2208 | static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot"); |
| 2209 | __ Cbnz(temp3, slow_path->GetEntryLabel()); |
| 2210 | } |
| 2211 | |
| 2212 | if (!optimizations.GetSourceIsNonPrimitiveArray()) { |
| 2213 | // Bail out if the source is not a non primitive array. |
| 2214 | // /* HeapReference<Class> */ temp3 = temp2->component_type_ |
| 2215 | __ Ldr(temp3, HeapOperand(temp2, component_offset)); |
| 2216 | __ Cbz(temp3, slow_path->GetEntryLabel()); |
| 2217 | codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3); |
| 2218 | __ Ldrh(temp3, HeapOperand(temp3, primitive_offset)); |
| 2219 | static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot"); |
| 2220 | __ Cbnz(temp3, slow_path->GetEntryLabel()); |
| 2221 | } |
| 2222 | |
| 2223 | __ Cmp(temp1, temp2); |
| 2224 | |
| 2225 | if (optimizations.GetDestinationIsTypedObjectArray()) { |
| 2226 | vixl::Label do_copy; |
| 2227 | __ B(&do_copy, eq); |
| 2228 | if (!did_unpoison) { |
| 2229 | codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1); |
| 2230 | } |
| 2231 | // /* HeapReference<Class> */ temp1 = temp1->component_type_ |
| 2232 | __ Ldr(temp1, HeapOperand(temp1, component_offset)); |
| 2233 | codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1); |
| 2234 | // /* HeapReference<Class> */ temp1 = temp1->super_class_ |
| 2235 | __ Ldr(temp1, HeapOperand(temp1, super_offset)); |
| 2236 | // No need to unpoison the result, we're comparing against null. |
| 2237 | __ Cbnz(temp1, slow_path->GetEntryLabel()); |
| 2238 | __ Bind(&do_copy); |
| 2239 | } else { |
| 2240 | __ B(slow_path->GetEntryLabel(), ne); |
| 2241 | } |
| 2242 | } else if (!optimizations.GetSourceIsNonPrimitiveArray()) { |
| 2243 | DCHECK(optimizations.GetDestinationIsNonPrimitiveArray()); |
| 2244 | // Bail out if the source is not a non primitive array. |
| 2245 | // /* HeapReference<Class> */ temp1 = src->klass_ |
| 2246 | __ Ldr(temp1, HeapOperand(src.W(), class_offset)); |
| 2247 | codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1); |
| 2248 | // /* HeapReference<Class> */ temp3 = temp1->component_type_ |
| 2249 | __ Ldr(temp3, HeapOperand(temp1, component_offset)); |
| 2250 | __ Cbz(temp3, slow_path->GetEntryLabel()); |
| 2251 | codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3); |
| 2252 | __ Ldrh(temp3, HeapOperand(temp3, primitive_offset)); |
| 2253 | static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot"); |
| 2254 | __ Cbnz(temp3, slow_path->GetEntryLabel()); |
| 2255 | } |
| 2256 | |
| 2257 | Register src_curr_addr = temp1.X(); |
| 2258 | Register dst_curr_addr = temp2.X(); |
| 2259 | Register src_stop_addr = temp3.X(); |
| 2260 | |
| 2261 | GenSystemArrayCopyAddresses(masm, |
| 2262 | Primitive::kPrimNot, |
| 2263 | src, |
| 2264 | src_pos, |
| 2265 | dest, |
| 2266 | dest_pos, |
| 2267 | length, |
| 2268 | src_curr_addr, |
| 2269 | dst_curr_addr, |
| 2270 | src_stop_addr); |
| 2271 | |
| 2272 | // Iterate over the arrays and do a raw copy of the objects. We don't need to |
| 2273 | // poison/unpoison, nor do any read barrier as the next uses of the destination |
| 2274 | // array will do it. |
| 2275 | vixl::Label loop, done; |
| 2276 | const int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot); |
| 2277 | __ Bind(&loop); |
| 2278 | __ Cmp(src_curr_addr, src_stop_addr); |
| 2279 | __ B(&done, eq); |
| 2280 | { |
| 2281 | Register tmp = temps.AcquireW(); |
| 2282 | __ Ldr(tmp, MemOperand(src_curr_addr, element_size, vixl::PostIndex)); |
| 2283 | __ Str(tmp, MemOperand(dst_curr_addr, element_size, vixl::PostIndex)); |
| 2284 | } |
| 2285 | __ B(&loop); |
| 2286 | __ Bind(&done); |
| 2287 | } |
| 2288 | // We only need one card marking on the destination array. |
| 2289 | codegen_->MarkGCCard(dest.W(), Register(), /* value_can_be_null */ false); |
| 2290 | |
| 2291 | __ Bind(slow_path->GetExitLabel()); |
| 2292 | } |
| 2293 | |
Anton Kirilov | a3ffea2 | 2016-04-07 17:02:37 +0100 | [diff] [blame] | 2294 | static void GenIsInfinite(LocationSummary* locations, |
| 2295 | bool is64bit, |
| 2296 | vixl::MacroAssembler* masm) { |
| 2297 | Operand infinity; |
| 2298 | Register out; |
| 2299 | |
| 2300 | if (is64bit) { |
| 2301 | infinity = kPositiveInfinityDouble; |
| 2302 | out = XRegisterFrom(locations->Out()); |
| 2303 | } else { |
| 2304 | infinity = kPositiveInfinityFloat; |
| 2305 | out = WRegisterFrom(locations->Out()); |
| 2306 | } |
| 2307 | |
| 2308 | const Register zero = vixl::Assembler::AppropriateZeroRegFor(out); |
| 2309 | |
| 2310 | MoveFPToInt(locations, is64bit, masm); |
| 2311 | __ Eor(out, out, infinity); |
| 2312 | // We don't care about the sign bit, so shift left. |
| 2313 | __ Cmp(zero, Operand(out, LSL, 1)); |
| 2314 | __ Cset(out, eq); |
| 2315 | } |
| 2316 | |
| 2317 | void IntrinsicLocationsBuilderARM64::VisitFloatIsInfinite(HInvoke* invoke) { |
| 2318 | CreateFPToIntLocations(arena_, invoke); |
| 2319 | } |
| 2320 | |
| 2321 | void IntrinsicCodeGeneratorARM64::VisitFloatIsInfinite(HInvoke* invoke) { |
| 2322 | GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); |
| 2323 | } |
| 2324 | |
| 2325 | void IntrinsicLocationsBuilderARM64::VisitDoubleIsInfinite(HInvoke* invoke) { |
| 2326 | CreateFPToIntLocations(arena_, invoke); |
| 2327 | } |
| 2328 | |
| 2329 | void IntrinsicCodeGeneratorARM64::VisitDoubleIsInfinite(HInvoke* invoke) { |
| 2330 | GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); |
| 2331 | } |
| 2332 | |
Aart Bik | 2f9fcc9 | 2016-03-01 15:16:54 -0800 | [diff] [blame] | 2333 | UNIMPLEMENTED_INTRINSIC(ARM64, ReferenceGetReferent) |
Aart Bik | 2f9fcc9 | 2016-03-01 15:16:54 -0800 | [diff] [blame] | 2334 | UNIMPLEMENTED_INTRINSIC(ARM64, IntegerHighestOneBit) |
| 2335 | UNIMPLEMENTED_INTRINSIC(ARM64, LongHighestOneBit) |
| 2336 | UNIMPLEMENTED_INTRINSIC(ARM64, IntegerLowestOneBit) |
| 2337 | UNIMPLEMENTED_INTRINSIC(ARM64, LongLowestOneBit) |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 2338 | |
Aart Bik | 0e54c01 | 2016-03-04 12:08:31 -0800 | [diff] [blame] | 2339 | // 1.8. |
| 2340 | UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndAddInt) |
| 2341 | UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndAddLong) |
| 2342 | UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetInt) |
| 2343 | UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetLong) |
| 2344 | UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetObject) |
Aart Bik | 0e54c01 | 2016-03-04 12:08:31 -0800 | [diff] [blame] | 2345 | |
Aart Bik | 2f9fcc9 | 2016-03-01 15:16:54 -0800 | [diff] [blame] | 2346 | UNREACHABLE_INTRINSICS(ARM64) |
Roland Levillain | 4d02711 | 2015-07-01 15:41:14 +0100 | [diff] [blame] | 2347 | |
| 2348 | #undef __ |
| 2349 | |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 2350 | } // namespace arm64 |
| 2351 | } // namespace art |