Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "intrinsics_arm64.h" |
| 18 | |
| 19 | #include "code_generator_arm64.h" |
| 20 | #include "common_arm64.h" |
| 21 | #include "entrypoints/quick/quick_entrypoints.h" |
| 22 | #include "intrinsics.h" |
| 23 | #include "mirror/array-inl.h" |
| 24 | #include "mirror/art_method.h" |
| 25 | #include "mirror/string.h" |
| 26 | #include "thread.h" |
| 27 | #include "utils/arm64/assembler_arm64.h" |
| 28 | #include "utils/arm64/constants_arm64.h" |
| 29 | |
| 30 | #include "a64/disasm-a64.h" |
| 31 | #include "a64/macro-assembler-a64.h" |
| 32 | |
| 33 | using namespace vixl; // NOLINT(build/namespaces) |
| 34 | |
| 35 | namespace art { |
| 36 | |
| 37 | namespace arm64 { |
| 38 | |
| 39 | using helpers::DRegisterFrom; |
| 40 | using helpers::FPRegisterFrom; |
| 41 | using helpers::HeapOperand; |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 42 | using helpers::RegisterFrom; |
| 43 | using helpers::SRegisterFrom; |
| 44 | using helpers::WRegisterFrom; |
| 45 | using helpers::XRegisterFrom; |
| 46 | |
| 47 | |
| 48 | namespace { |
| 49 | |
| 50 | ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) { |
| 51 | return MemOperand(XRegisterFrom(location), offset); |
| 52 | } |
| 53 | |
| 54 | } // namespace |
| 55 | |
| 56 | vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() { |
| 57 | return codegen_->GetAssembler()->vixl_masm_; |
| 58 | } |
| 59 | |
| 60 | ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() { |
| 61 | return codegen_->GetGraph()->GetArena(); |
| 62 | } |
| 63 | |
| 64 | #define __ codegen->GetAssembler()->vixl_masm_-> |
| 65 | |
| 66 | static void MoveFromReturnRegister(Location trg, |
| 67 | Primitive::Type type, |
| 68 | CodeGeneratorARM64* codegen) { |
| 69 | if (!trg.IsValid()) { |
| 70 | DCHECK(type == Primitive::kPrimVoid); |
| 71 | return; |
| 72 | } |
| 73 | |
| 74 | DCHECK_NE(type, Primitive::kPrimVoid); |
| 75 | |
Alexandre Rames | 542361f | 2015-01-29 16:57:31 +0000 | [diff] [blame^] | 76 | if (Primitive::IsIntegralType(type)) { |
Andreas Gampe | 878d58c | 2015-01-15 23:24:00 -0800 | [diff] [blame] | 77 | Register trg_reg = RegisterFrom(trg, type); |
| 78 | Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type); |
| 79 | __ Mov(trg_reg, res_reg, kDiscardForSameWReg); |
| 80 | } else { |
| 81 | FPRegister trg_reg = FPRegisterFrom(trg, type); |
| 82 | FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type); |
| 83 | __ Fmov(trg_reg, res_reg); |
| 84 | } |
| 85 | } |
| 86 | |
| 87 | static void MoveArguments(HInvoke* invoke, ArenaAllocator* arena, CodeGeneratorARM64* codegen) { |
| 88 | if (invoke->InputCount() == 0) { |
| 89 | return; |
| 90 | } |
| 91 | |
| 92 | LocationSummary* locations = invoke->GetLocations(); |
| 93 | InvokeDexCallingConventionVisitor calling_convention_visitor; |
| 94 | |
| 95 | // We're moving potentially two or more locations to locations that could overlap, so we need |
| 96 | // a parallel move resolver. |
| 97 | HParallelMove parallel_move(arena); |
| 98 | |
| 99 | for (size_t i = 0; i < invoke->InputCount(); i++) { |
| 100 | HInstruction* input = invoke->InputAt(i); |
| 101 | Location cc_loc = calling_convention_visitor.GetNextLocation(input->GetType()); |
| 102 | Location actual_loc = locations->InAt(i); |
| 103 | |
| 104 | parallel_move.AddMove(actual_loc, cc_loc, nullptr); |
| 105 | } |
| 106 | |
| 107 | codegen->GetMoveResolver()->EmitNativeCode(¶llel_move); |
| 108 | } |
| 109 | |
| 110 | // Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified |
| 111 | // call. This will copy the arguments into the positions for a regular call. |
| 112 | // |
| 113 | // Note: The actual parameters are required to be in the locations given by the invoke's location |
| 114 | // summary. If an intrinsic modifies those locations before a slowpath call, they must be |
| 115 | // restored! |
| 116 | class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 { |
| 117 | public: |
| 118 | explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { } |
| 119 | |
| 120 | void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE { |
| 121 | CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in); |
| 122 | __ Bind(GetEntryLabel()); |
| 123 | |
| 124 | codegen->SaveLiveRegisters(invoke_->GetLocations()); |
| 125 | |
| 126 | MoveArguments(invoke_, codegen->GetGraph()->GetArena(), codegen); |
| 127 | |
| 128 | if (invoke_->IsInvokeStaticOrDirect()) { |
| 129 | codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister); |
| 130 | } else { |
| 131 | UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented"; |
| 132 | UNREACHABLE(); |
| 133 | } |
| 134 | |
| 135 | // Copy the result back to the expected output. |
| 136 | Location out = invoke_->GetLocations()->Out(); |
| 137 | if (out.IsValid()) { |
| 138 | DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory. |
| 139 | DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg())); |
| 140 | MoveFromReturnRegister(out, invoke_->GetType(), codegen); |
| 141 | } |
| 142 | |
| 143 | codegen->RestoreLiveRegisters(invoke_->GetLocations()); |
| 144 | __ B(GetExitLabel()); |
| 145 | } |
| 146 | |
| 147 | private: |
| 148 | // The instruction where this slow path is happening. |
| 149 | HInvoke* const invoke_; |
| 150 | |
| 151 | DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64); |
| 152 | }; |
| 153 | |
| 154 | #undef __ |
| 155 | |
| 156 | bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) { |
| 157 | Dispatch(invoke); |
| 158 | LocationSummary* res = invoke->GetLocations(); |
| 159 | return res != nullptr && res->Intrinsified(); |
| 160 | } |
| 161 | |
| 162 | #define __ masm-> |
| 163 | |
| 164 | static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 165 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 166 | LocationSummary::kNoCall, |
| 167 | kIntrinsified); |
| 168 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 169 | locations->SetOut(Location::RequiresRegister()); |
| 170 | } |
| 171 | |
| 172 | static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 173 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 174 | LocationSummary::kNoCall, |
| 175 | kIntrinsified); |
| 176 | locations->SetInAt(0, Location::RequiresRegister()); |
| 177 | locations->SetOut(Location::RequiresFpuRegister()); |
| 178 | } |
| 179 | |
| 180 | static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) { |
| 181 | Location input = locations->InAt(0); |
| 182 | Location output = locations->Out(); |
| 183 | __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output), |
| 184 | is64bit ? DRegisterFrom(input) : SRegisterFrom(input)); |
| 185 | } |
| 186 | |
| 187 | static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) { |
| 188 | Location input = locations->InAt(0); |
| 189 | Location output = locations->Out(); |
| 190 | __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output), |
| 191 | is64bit ? XRegisterFrom(input) : WRegisterFrom(input)); |
| 192 | } |
| 193 | |
| 194 | void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { |
| 195 | CreateFPToIntLocations(arena_, invoke); |
| 196 | } |
| 197 | void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { |
| 198 | CreateIntToFPLocations(arena_, invoke); |
| 199 | } |
| 200 | |
| 201 | void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) { |
| 202 | MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler()); |
| 203 | } |
| 204 | void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) { |
| 205 | MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler()); |
| 206 | } |
| 207 | |
| 208 | void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { |
| 209 | CreateFPToIntLocations(arena_, invoke); |
| 210 | } |
| 211 | void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) { |
| 212 | CreateIntToFPLocations(arena_, invoke); |
| 213 | } |
| 214 | |
| 215 | void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) { |
| 216 | MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler()); |
| 217 | } |
| 218 | void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) { |
| 219 | MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler()); |
| 220 | } |
| 221 | |
| 222 | static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 223 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 224 | LocationSummary::kNoCall, |
| 225 | kIntrinsified); |
| 226 | locations->SetInAt(0, Location::RequiresRegister()); |
| 227 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 228 | } |
| 229 | |
| 230 | static void GenReverseBytes(LocationSummary* locations, |
| 231 | Primitive::Type type, |
| 232 | vixl::MacroAssembler* masm) { |
| 233 | Location in = locations->InAt(0); |
| 234 | Location out = locations->Out(); |
| 235 | |
| 236 | switch (type) { |
| 237 | case Primitive::kPrimShort: |
| 238 | __ Rev16(WRegisterFrom(out), WRegisterFrom(in)); |
| 239 | __ Sxth(WRegisterFrom(out), WRegisterFrom(out)); |
| 240 | break; |
| 241 | case Primitive::kPrimInt: |
| 242 | case Primitive::kPrimLong: |
| 243 | __ Rev(RegisterFrom(out, type), RegisterFrom(in, type)); |
| 244 | break; |
| 245 | default: |
| 246 | LOG(FATAL) << "Unexpected size for reverse-bytes: " << type; |
| 247 | UNREACHABLE(); |
| 248 | } |
| 249 | } |
| 250 | |
| 251 | void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) { |
| 252 | CreateIntToIntLocations(arena_, invoke); |
| 253 | } |
| 254 | |
| 255 | void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) { |
| 256 | GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); |
| 257 | } |
| 258 | |
| 259 | void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) { |
| 260 | CreateIntToIntLocations(arena_, invoke); |
| 261 | } |
| 262 | |
| 263 | void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) { |
| 264 | GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); |
| 265 | } |
| 266 | |
| 267 | void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) { |
| 268 | CreateIntToIntLocations(arena_, invoke); |
| 269 | } |
| 270 | |
| 271 | void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) { |
| 272 | GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler()); |
| 273 | } |
| 274 | |
| 275 | static void GenReverse(LocationSummary* locations, |
| 276 | Primitive::Type type, |
| 277 | vixl::MacroAssembler* masm) { |
| 278 | DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong); |
| 279 | |
| 280 | Location in = locations->InAt(0); |
| 281 | Location out = locations->Out(); |
| 282 | |
| 283 | __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type)); |
| 284 | } |
| 285 | |
| 286 | void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) { |
| 287 | CreateIntToIntLocations(arena_, invoke); |
| 288 | } |
| 289 | |
| 290 | void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) { |
| 291 | GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); |
| 292 | } |
| 293 | |
| 294 | void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) { |
| 295 | CreateIntToIntLocations(arena_, invoke); |
| 296 | } |
| 297 | |
| 298 | void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) { |
| 299 | GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); |
| 300 | } |
| 301 | |
| 302 | static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 303 | // We only support FP registers here. |
| 304 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 305 | LocationSummary::kNoCall, |
| 306 | kIntrinsified); |
| 307 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 308 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 309 | } |
| 310 | |
| 311 | static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) { |
| 312 | Location in = locations->InAt(0); |
| 313 | Location out = locations->Out(); |
| 314 | |
| 315 | FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in); |
| 316 | FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out); |
| 317 | |
| 318 | __ Fabs(out_reg, in_reg); |
| 319 | } |
| 320 | |
| 321 | void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) { |
| 322 | CreateFPToFPLocations(arena_, invoke); |
| 323 | } |
| 324 | |
| 325 | void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) { |
| 326 | MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler()); |
| 327 | } |
| 328 | |
| 329 | void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) { |
| 330 | CreateFPToFPLocations(arena_, invoke); |
| 331 | } |
| 332 | |
| 333 | void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) { |
| 334 | MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler()); |
| 335 | } |
| 336 | |
| 337 | static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) { |
| 338 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 339 | LocationSummary::kNoCall, |
| 340 | kIntrinsified); |
| 341 | locations->SetInAt(0, Location::RequiresRegister()); |
| 342 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 343 | } |
| 344 | |
| 345 | static void GenAbsInteger(LocationSummary* locations, |
| 346 | bool is64bit, |
| 347 | vixl::MacroAssembler* masm) { |
| 348 | Location in = locations->InAt(0); |
| 349 | Location output = locations->Out(); |
| 350 | |
| 351 | Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in); |
| 352 | Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output); |
| 353 | |
| 354 | __ Cmp(in_reg, Operand(0)); |
| 355 | __ Cneg(out_reg, in_reg, lt); |
| 356 | } |
| 357 | |
| 358 | void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) { |
| 359 | CreateIntToInt(arena_, invoke); |
| 360 | } |
| 361 | |
| 362 | void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) { |
| 363 | GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler()); |
| 364 | } |
| 365 | |
| 366 | void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) { |
| 367 | CreateIntToInt(arena_, invoke); |
| 368 | } |
| 369 | |
| 370 | void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) { |
| 371 | GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler()); |
| 372 | } |
| 373 | |
| 374 | static void GenMinMaxFP(LocationSummary* locations, |
| 375 | bool is_min, |
| 376 | bool is_double, |
| 377 | vixl::MacroAssembler* masm) { |
| 378 | Location op1 = locations->InAt(0); |
| 379 | Location op2 = locations->InAt(1); |
| 380 | Location out = locations->Out(); |
| 381 | |
| 382 | FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1); |
| 383 | FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2); |
| 384 | FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out); |
| 385 | if (is_min) { |
| 386 | __ Fmin(out_reg, op1_reg, op2_reg); |
| 387 | } else { |
| 388 | __ Fmax(out_reg, op1_reg, op2_reg); |
| 389 | } |
| 390 | } |
| 391 | |
| 392 | static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 393 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 394 | LocationSummary::kNoCall, |
| 395 | kIntrinsified); |
| 396 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 397 | locations->SetInAt(1, Location::RequiresFpuRegister()); |
| 398 | locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); |
| 399 | } |
| 400 | |
| 401 | void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) { |
| 402 | CreateFPFPToFPLocations(arena_, invoke); |
| 403 | } |
| 404 | |
| 405 | void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) { |
| 406 | GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler()); |
| 407 | } |
| 408 | |
| 409 | void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) { |
| 410 | CreateFPFPToFPLocations(arena_, invoke); |
| 411 | } |
| 412 | |
| 413 | void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) { |
| 414 | GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler()); |
| 415 | } |
| 416 | |
| 417 | void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { |
| 418 | CreateFPFPToFPLocations(arena_, invoke); |
| 419 | } |
| 420 | |
| 421 | void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) { |
| 422 | GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler()); |
| 423 | } |
| 424 | |
| 425 | void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { |
| 426 | CreateFPFPToFPLocations(arena_, invoke); |
| 427 | } |
| 428 | |
| 429 | void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) { |
| 430 | GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler()); |
| 431 | } |
| 432 | |
| 433 | static void GenMinMax(LocationSummary* locations, |
| 434 | bool is_min, |
| 435 | bool is_long, |
| 436 | vixl::MacroAssembler* masm) { |
| 437 | Location op1 = locations->InAt(0); |
| 438 | Location op2 = locations->InAt(1); |
| 439 | Location out = locations->Out(); |
| 440 | |
| 441 | Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1); |
| 442 | Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2); |
| 443 | Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out); |
| 444 | |
| 445 | __ Cmp(op1_reg, op2_reg); |
| 446 | __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt); |
| 447 | } |
| 448 | |
| 449 | static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 450 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 451 | LocationSummary::kNoCall, |
| 452 | kIntrinsified); |
| 453 | locations->SetInAt(0, Location::RequiresRegister()); |
| 454 | locations->SetInAt(1, Location::RequiresRegister()); |
| 455 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 456 | } |
| 457 | |
| 458 | void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) { |
| 459 | CreateIntIntToIntLocations(arena_, invoke); |
| 460 | } |
| 461 | |
| 462 | void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) { |
| 463 | GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler()); |
| 464 | } |
| 465 | |
| 466 | void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) { |
| 467 | CreateIntIntToIntLocations(arena_, invoke); |
| 468 | } |
| 469 | |
| 470 | void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) { |
| 471 | GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler()); |
| 472 | } |
| 473 | |
| 474 | void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) { |
| 475 | CreateIntIntToIntLocations(arena_, invoke); |
| 476 | } |
| 477 | |
| 478 | void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) { |
| 479 | GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler()); |
| 480 | } |
| 481 | |
| 482 | void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) { |
| 483 | CreateIntIntToIntLocations(arena_, invoke); |
| 484 | } |
| 485 | |
| 486 | void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) { |
| 487 | GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler()); |
| 488 | } |
| 489 | |
| 490 | void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) { |
| 491 | CreateFPToFPLocations(arena_, invoke); |
| 492 | } |
| 493 | |
| 494 | void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) { |
| 495 | LocationSummary* locations = invoke->GetLocations(); |
| 496 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 497 | __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); |
| 498 | } |
| 499 | |
| 500 | void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) { |
| 501 | CreateFPToFPLocations(arena_, invoke); |
| 502 | } |
| 503 | |
| 504 | void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) { |
| 505 | LocationSummary* locations = invoke->GetLocations(); |
| 506 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 507 | __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); |
| 508 | } |
| 509 | |
| 510 | void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) { |
| 511 | CreateFPToFPLocations(arena_, invoke); |
| 512 | } |
| 513 | |
| 514 | void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) { |
| 515 | LocationSummary* locations = invoke->GetLocations(); |
| 516 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 517 | __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); |
| 518 | } |
| 519 | |
| 520 | void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) { |
| 521 | CreateFPToFPLocations(arena_, invoke); |
| 522 | } |
| 523 | |
| 524 | void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) { |
| 525 | LocationSummary* locations = invoke->GetLocations(); |
| 526 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 527 | __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0))); |
| 528 | } |
| 529 | |
| 530 | static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 531 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 532 | LocationSummary::kNoCall, |
| 533 | kIntrinsified); |
| 534 | locations->SetInAt(0, Location::RequiresFpuRegister()); |
| 535 | locations->SetOut(Location::RequiresRegister()); |
| 536 | } |
| 537 | |
| 538 | static void GenMathRound(LocationSummary* locations, |
| 539 | bool is_double, |
| 540 | vixl::MacroAssembler* masm) { |
| 541 | FPRegister in_reg = is_double ? |
| 542 | DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0)); |
| 543 | Register out_reg = is_double ? |
| 544 | XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out()); |
| 545 | UseScratchRegisterScope temps(masm); |
| 546 | FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg); |
| 547 | |
| 548 | // 0.5 can be encoded as an immediate, so use fmov. |
| 549 | if (is_double) { |
| 550 | __ Fmov(temp1_reg, static_cast<double>(0.5)); |
| 551 | } else { |
| 552 | __ Fmov(temp1_reg, static_cast<float>(0.5)); |
| 553 | } |
| 554 | __ Fadd(temp1_reg, in_reg, temp1_reg); |
| 555 | __ Fcvtms(out_reg, temp1_reg); |
| 556 | } |
| 557 | |
| 558 | void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) { |
| 559 | CreateFPToIntPlusTempLocations(arena_, invoke); |
| 560 | } |
| 561 | |
| 562 | void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) { |
| 563 | GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler()); |
| 564 | } |
| 565 | |
| 566 | void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) { |
| 567 | CreateFPToIntPlusTempLocations(arena_, invoke); |
| 568 | } |
| 569 | |
| 570 | void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) { |
| 571 | GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler()); |
| 572 | } |
| 573 | |
| 574 | void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) { |
| 575 | CreateIntToIntLocations(arena_, invoke); |
| 576 | } |
| 577 | |
| 578 | void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) { |
| 579 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 580 | __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()), |
| 581 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 582 | } |
| 583 | |
| 584 | void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) { |
| 585 | CreateIntToIntLocations(arena_, invoke); |
| 586 | } |
| 587 | |
| 588 | void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) { |
| 589 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 590 | __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()), |
| 591 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 592 | } |
| 593 | |
| 594 | void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) { |
| 595 | CreateIntToIntLocations(arena_, invoke); |
| 596 | } |
| 597 | |
| 598 | void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) { |
| 599 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 600 | __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()), |
| 601 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 602 | } |
| 603 | |
| 604 | void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) { |
| 605 | CreateIntToIntLocations(arena_, invoke); |
| 606 | } |
| 607 | |
| 608 | void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) { |
| 609 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 610 | __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()), |
| 611 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 612 | } |
| 613 | |
| 614 | static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 615 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 616 | LocationSummary::kNoCall, |
| 617 | kIntrinsified); |
| 618 | locations->SetInAt(0, Location::RequiresRegister()); |
| 619 | locations->SetInAt(1, Location::RequiresRegister()); |
| 620 | } |
| 621 | |
| 622 | void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) { |
| 623 | CreateIntIntToVoidLocations(arena_, invoke); |
| 624 | } |
| 625 | |
| 626 | void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) { |
| 627 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 628 | __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)), |
| 629 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 630 | } |
| 631 | |
| 632 | void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) { |
| 633 | CreateIntIntToVoidLocations(arena_, invoke); |
| 634 | } |
| 635 | |
| 636 | void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) { |
| 637 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 638 | __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)), |
| 639 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 640 | } |
| 641 | |
| 642 | void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) { |
| 643 | CreateIntIntToVoidLocations(arena_, invoke); |
| 644 | } |
| 645 | |
| 646 | void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) { |
| 647 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 648 | __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)), |
| 649 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 650 | } |
| 651 | |
| 652 | void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) { |
| 653 | CreateIntIntToVoidLocations(arena_, invoke); |
| 654 | } |
| 655 | |
| 656 | void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) { |
| 657 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 658 | __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)), |
| 659 | AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0)); |
| 660 | } |
| 661 | |
| 662 | void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) { |
| 663 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 664 | LocationSummary::kNoCall, |
| 665 | kIntrinsified); |
| 666 | locations->SetOut(Location::RequiresRegister()); |
| 667 | } |
| 668 | |
| 669 | void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) { |
| 670 | codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()), |
| 671 | MemOperand(tr, Thread::PeerOffset<8>().Int32Value())); |
| 672 | } |
| 673 | |
| 674 | static void GenUnsafeGet(HInvoke* invoke, |
| 675 | Primitive::Type type, |
| 676 | bool is_volatile, |
| 677 | CodeGeneratorARM64* codegen) { |
| 678 | LocationSummary* locations = invoke->GetLocations(); |
| 679 | DCHECK((type == Primitive::kPrimInt) || |
| 680 | (type == Primitive::kPrimLong) || |
| 681 | (type == Primitive::kPrimNot)); |
| 682 | vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; |
| 683 | Register base = WRegisterFrom(locations->InAt(1)); // Object pointer. |
| 684 | Register offset = XRegisterFrom(locations->InAt(2)); // Long offset. |
| 685 | Register trg = RegisterFrom(locations->Out(), type); |
| 686 | |
| 687 | MemOperand mem_op(base.X(), offset); |
| 688 | if (is_volatile) { |
| 689 | if (kUseAcquireRelease) { |
| 690 | codegen->LoadAcquire(invoke, trg, mem_op); |
| 691 | } else { |
| 692 | codegen->Load(type, trg, mem_op); |
| 693 | __ Dmb(InnerShareable, BarrierReads); |
| 694 | } |
| 695 | } else { |
| 696 | codegen->Load(type, trg, mem_op); |
| 697 | } |
| 698 | } |
| 699 | |
| 700 | static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) { |
| 701 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 702 | LocationSummary::kNoCall, |
| 703 | kIntrinsified); |
| 704 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 705 | locations->SetInAt(1, Location::RequiresRegister()); |
| 706 | locations->SetInAt(2, Location::RequiresRegister()); |
| 707 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 708 | } |
| 709 | |
| 710 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) { |
| 711 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 712 | } |
| 713 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) { |
| 714 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 715 | } |
| 716 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) { |
| 717 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 718 | } |
| 719 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { |
| 720 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 721 | } |
| 722 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) { |
| 723 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 724 | } |
| 725 | void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { |
| 726 | CreateIntIntIntToIntLocations(arena_, invoke); |
| 727 | } |
| 728 | |
| 729 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) { |
| 730 | GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_); |
| 731 | } |
| 732 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) { |
| 733 | GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_); |
| 734 | } |
| 735 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) { |
| 736 | GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_); |
| 737 | } |
| 738 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) { |
| 739 | GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_); |
| 740 | } |
| 741 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) { |
| 742 | GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_); |
| 743 | } |
| 744 | void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) { |
| 745 | GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_); |
| 746 | } |
| 747 | |
| 748 | static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) { |
| 749 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 750 | LocationSummary::kNoCall, |
| 751 | kIntrinsified); |
| 752 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 753 | locations->SetInAt(1, Location::RequiresRegister()); |
| 754 | locations->SetInAt(2, Location::RequiresRegister()); |
| 755 | locations->SetInAt(3, Location::RequiresRegister()); |
| 756 | } |
| 757 | |
| 758 | void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) { |
| 759 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 760 | } |
| 761 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) { |
| 762 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 763 | } |
| 764 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) { |
| 765 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 766 | } |
| 767 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) { |
| 768 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 769 | } |
| 770 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { |
| 771 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 772 | } |
| 773 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { |
| 774 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 775 | } |
| 776 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) { |
| 777 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 778 | } |
| 779 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) { |
| 780 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 781 | } |
| 782 | void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) { |
| 783 | CreateIntIntIntIntToVoid(arena_, invoke); |
| 784 | } |
| 785 | |
| 786 | static void GenUnsafePut(LocationSummary* locations, |
| 787 | Primitive::Type type, |
| 788 | bool is_volatile, |
| 789 | bool is_ordered, |
| 790 | CodeGeneratorARM64* codegen) { |
| 791 | vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; |
| 792 | |
| 793 | Register base = WRegisterFrom(locations->InAt(1)); // Object pointer. |
| 794 | Register offset = XRegisterFrom(locations->InAt(2)); // Long offset. |
| 795 | Register value = RegisterFrom(locations->InAt(3), type); |
| 796 | |
| 797 | MemOperand mem_op(base.X(), offset); |
| 798 | |
| 799 | if (is_volatile || is_ordered) { |
| 800 | if (kUseAcquireRelease) { |
| 801 | codegen->StoreRelease(type, value, mem_op); |
| 802 | } else { |
| 803 | __ Dmb(InnerShareable, BarrierAll); |
| 804 | codegen->Store(type, value, mem_op); |
| 805 | if (is_volatile) { |
| 806 | __ Dmb(InnerShareable, BarrierReads); |
| 807 | } |
| 808 | } |
| 809 | } else { |
| 810 | codegen->Store(type, value, mem_op); |
| 811 | } |
| 812 | |
| 813 | if (type == Primitive::kPrimNot) { |
| 814 | codegen->MarkGCCard(base, value); |
| 815 | } |
| 816 | } |
| 817 | |
| 818 | void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) { |
| 819 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_); |
| 820 | } |
| 821 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) { |
| 822 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_); |
| 823 | } |
| 824 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) { |
| 825 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_); |
| 826 | } |
| 827 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) { |
| 828 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_); |
| 829 | } |
| 830 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) { |
| 831 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_); |
| 832 | } |
| 833 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) { |
| 834 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_); |
| 835 | } |
| 836 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) { |
| 837 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_); |
| 838 | } |
| 839 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) { |
| 840 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_); |
| 841 | } |
| 842 | void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) { |
| 843 | GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_); |
| 844 | } |
| 845 | |
| 846 | static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) { |
| 847 | LocationSummary* locations = new (arena) LocationSummary(invoke, |
| 848 | LocationSummary::kNoCall, |
| 849 | kIntrinsified); |
| 850 | locations->SetInAt(0, Location::NoLocation()); // Unused receiver. |
| 851 | locations->SetInAt(1, Location::RequiresRegister()); |
| 852 | locations->SetInAt(2, Location::RequiresRegister()); |
| 853 | locations->SetInAt(3, Location::RequiresRegister()); |
| 854 | locations->SetInAt(4, Location::RequiresRegister()); |
| 855 | |
| 856 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 857 | } |
| 858 | |
| 859 | static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) { |
| 860 | // TODO: Currently we use acquire-release load-stores in the CAS loop. One could reasonably write |
| 861 | // a version relying on simple exclusive load-stores and barriers instead. |
| 862 | static_assert(kUseAcquireRelease, "Non-acquire-release inlined CAS not implemented, yet."); |
| 863 | |
| 864 | vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_; |
| 865 | |
| 866 | Register out = WRegisterFrom(locations->Out()); // Boolean result. |
| 867 | |
| 868 | Register base = WRegisterFrom(locations->InAt(1)); // Object pointer. |
| 869 | Register offset = XRegisterFrom(locations->InAt(2)); // Long offset. |
| 870 | Register expected = RegisterFrom(locations->InAt(3), type); // Expected. |
| 871 | Register value = RegisterFrom(locations->InAt(4), type); // Value. |
| 872 | |
| 873 | // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps. |
| 874 | if (type == Primitive::kPrimNot) { |
| 875 | // Mark card for object assuming new value is stored. |
| 876 | codegen->MarkGCCard(base, value); |
| 877 | } |
| 878 | |
| 879 | UseScratchRegisterScope temps(masm); |
| 880 | Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory. |
| 881 | Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory. |
| 882 | |
| 883 | Register tmp_32 = tmp_value.W(); |
| 884 | |
| 885 | __ Add(tmp_ptr, base.X(), Operand(offset)); |
| 886 | |
| 887 | // do { |
| 888 | // tmp_value = [tmp_ptr] - expected; |
| 889 | // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value)); |
| 890 | // result = tmp_value != 0; |
| 891 | |
| 892 | vixl::Label loop_head, exit_loop; |
| 893 | __ Bind(&loop_head); |
| 894 | |
| 895 | __ Ldaxr(tmp_value, MemOperand(tmp_ptr)); |
| 896 | __ Cmp(tmp_value, expected); |
| 897 | __ B(&exit_loop, ne); |
| 898 | |
| 899 | __ Stlxr(tmp_32, value, MemOperand(tmp_ptr)); |
| 900 | __ Cbnz(tmp_32, &loop_head); |
| 901 | |
| 902 | __ Bind(&exit_loop); |
| 903 | __ Cset(out, eq); |
| 904 | } |
| 905 | |
| 906 | void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) { |
| 907 | CreateIntIntIntIntIntToInt(arena_, invoke); |
| 908 | } |
| 909 | void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) { |
| 910 | CreateIntIntIntIntIntToInt(arena_, invoke); |
| 911 | } |
| 912 | void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) { |
| 913 | CreateIntIntIntIntIntToInt(arena_, invoke); |
| 914 | } |
| 915 | |
| 916 | void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) { |
| 917 | GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_); |
| 918 | } |
| 919 | void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) { |
| 920 | GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_); |
| 921 | } |
| 922 | void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) { |
| 923 | GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_); |
| 924 | } |
| 925 | |
| 926 | void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) { |
| 927 | // The inputs plus one temp. |
| 928 | LocationSummary* locations = new (arena_) LocationSummary(invoke, |
| 929 | LocationSummary::kCallOnSlowPath, |
| 930 | kIntrinsified); |
| 931 | locations->SetInAt(0, Location::RequiresRegister()); |
| 932 | locations->SetInAt(1, Location::RequiresRegister()); |
| 933 | locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); |
| 934 | } |
| 935 | |
| 936 | void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) { |
| 937 | vixl::MacroAssembler* masm = GetVIXLAssembler(); |
| 938 | LocationSummary* locations = invoke->GetLocations(); |
| 939 | |
| 940 | // Location of reference to data array |
| 941 | const MemberOffset value_offset = mirror::String::ValueOffset(); |
| 942 | // Location of count |
| 943 | const MemberOffset count_offset = mirror::String::CountOffset(); |
| 944 | // Starting offset within data array |
| 945 | const MemberOffset offset_offset = mirror::String::OffsetOffset(); |
| 946 | // Start of char data with array_ |
| 947 | const MemberOffset data_offset = mirror::Array::DataOffset(sizeof(uint16_t)); |
| 948 | |
| 949 | Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer. |
| 950 | Register idx = WRegisterFrom(locations->InAt(1)); // Index of character. |
| 951 | Register out = WRegisterFrom(locations->Out()); // Result character. |
| 952 | |
| 953 | UseScratchRegisterScope temps(masm); |
| 954 | Register temp = temps.AcquireW(); |
| 955 | Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling. |
| 956 | |
| 957 | // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth |
| 958 | // the cost. |
| 959 | // TODO: For simplicity, the index parameter is requested in a register, so different from Quick |
| 960 | // we will not optimize the code for constants (which would save a register). |
| 961 | |
| 962 | SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke); |
| 963 | codegen_->AddSlowPath(slow_path); |
| 964 | |
| 965 | __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length. |
| 966 | codegen_->MaybeRecordImplicitNullCheck(invoke); |
| 967 | __ Cmp(idx, temp); |
| 968 | __ B(hs, slow_path->GetEntryLabel()); |
| 969 | |
| 970 | // Index computation. |
| 971 | __ Ldr(temp, HeapOperand(obj, offset_offset)); // temp := str.offset. |
| 972 | __ Ldr(array_temp, HeapOperand(obj, value_offset)); // array_temp := str.offset. |
| 973 | __ Add(temp, temp, idx); |
| 974 | DCHECK_EQ(data_offset.Int32Value() % 2, 0); // We'll compensate by shifting. |
| 975 | __ Add(temp, temp, Operand(data_offset.Int32Value() / 2)); |
| 976 | |
| 977 | // Load the value. |
| 978 | __ Ldrh(out, MemOperand(array_temp.X(), temp, UXTW, 1)); // out := array_temp[temp]. |
| 979 | |
| 980 | __ Bind(slow_path->GetExitLabel()); |
| 981 | } |
| 982 | |
| 983 | // Unimplemented intrinsics. |
| 984 | |
| 985 | #define UNIMPLEMENTED_INTRINSIC(Name) \ |
| 986 | void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ |
| 987 | } \ |
| 988 | void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \ |
| 989 | } |
| 990 | |
| 991 | UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar) |
| 992 | UNIMPLEMENTED_INTRINSIC(StringCompareTo) |
| 993 | UNIMPLEMENTED_INTRINSIC(StringIsEmpty) // Might not want to do these two anyways, inlining should |
| 994 | UNIMPLEMENTED_INTRINSIC(StringLength) // be good enough here. |
| 995 | UNIMPLEMENTED_INTRINSIC(StringIndexOf) |
| 996 | UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter) |
| 997 | UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent) |
| 998 | |
| 999 | } // namespace arm64 |
| 1000 | } // namespace art |