blob: b92075053ec9c7754560562283fc2781c1350ed6 [file] [log] [blame]
Anton Kirilov5ec62182016-10-13 20:16:02 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm_vixl.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080020#include "art_method.h"
Anton Kirilov5ec62182016-10-13 20:16:02 +010021#include "code_generator_arm_vixl.h"
22#include "common_arm.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070023#include "heap_poisoning.h"
Anton Kirilov5ec62182016-10-13 20:16:02 +010024#include "lock_word.h"
25#include "mirror/array-inl.h"
Andreas Gampec15a2f42017-04-21 12:09:39 -070026#include "mirror/object_array-inl.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080027#include "mirror/reference.h"
Vladimir Marko5924a4a2018-05-29 17:40:41 +010028#include "mirror/string-inl.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080029#include "scoped_thread_state_change-inl.h"
Andreas Gampeb486a982017-06-01 13:45:54 -070030#include "thread-current-inl.h"
Anton Kirilov5ec62182016-10-13 20:16:02 +010031
32#include "aarch32/constants-aarch32.h"
33
34namespace art {
35namespace arm {
36
37#define __ assembler->GetVIXLAssembler()->
38
39using helpers::DRegisterFrom;
40using helpers::HighRegisterFrom;
41using helpers::InputDRegisterAt;
42using helpers::InputRegisterAt;
43using helpers::InputSRegisterAt;
Anton Kirilov5ec62182016-10-13 20:16:02 +010044using helpers::Int32ConstantFrom;
45using helpers::LocationFrom;
46using helpers::LowRegisterFrom;
47using helpers::LowSRegisterFrom;
xueliang.zhong53463ba2017-02-16 15:18:03 +000048using helpers::HighSRegisterFrom;
Anton Kirilov5ec62182016-10-13 20:16:02 +010049using helpers::OutputDRegister;
50using helpers::OutputRegister;
Anton Kirilov5ec62182016-10-13 20:16:02 +010051using helpers::RegisterFrom;
52using helpers::SRegisterFrom;
53
54using namespace vixl::aarch32; // NOLINT(build/namespaces)
55
Artem Serov0fb37192016-12-06 18:13:40 +000056using vixl::ExactAssemblyScope;
57using vixl::CodeBufferCheckScope;
58
Anton Kirilov5ec62182016-10-13 20:16:02 +010059ArmVIXLAssembler* IntrinsicCodeGeneratorARMVIXL::GetAssembler() {
60 return codegen_->GetAssembler();
61}
62
63ArenaAllocator* IntrinsicCodeGeneratorARMVIXL::GetAllocator() {
Vladimir Markoca6fff82017-10-03 14:49:14 +010064 return codegen_->GetGraph()->GetAllocator();
Anton Kirilov5ec62182016-10-13 20:16:02 +010065}
66
67// Default slow-path for fallback (calling the managed code to handle the intrinsic) in an
68// intrinsified call. This will copy the arguments into the positions for a regular call.
69//
70// Note: The actual parameters are required to be in the locations given by the invoke's location
71// summary. If an intrinsic modifies those locations before a slowpath call, they must be
72// restored!
73//
74// Note: If an invoke wasn't sharpened, we will put down an invoke-virtual here. That's potentially
75// sub-optimal (compared to a direct pointer call), but this is a slow-path.
76
77class IntrinsicSlowPathARMVIXL : public SlowPathCodeARMVIXL {
78 public:
79 explicit IntrinsicSlowPathARMVIXL(HInvoke* invoke)
80 : SlowPathCodeARMVIXL(invoke), invoke_(invoke) {}
81
82 Location MoveArguments(CodeGenerator* codegen) {
Artem Serovd4cc5b22016-11-04 11:19:09 +000083 InvokeDexCallingConventionVisitorARMVIXL calling_convention_visitor;
Anton Kirilov5ec62182016-10-13 20:16:02 +010084 IntrinsicVisitor::MoveArguments(invoke_, codegen, &calling_convention_visitor);
85 return calling_convention_visitor.GetMethodLocation();
86 }
87
88 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
89 ArmVIXLAssembler* assembler = down_cast<ArmVIXLAssembler*>(codegen->GetAssembler());
90 __ Bind(GetEntryLabel());
91
92 SaveLiveRegisters(codegen, invoke_->GetLocations());
93
94 Location method_loc = MoveArguments(codegen);
95
96 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +010097 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), method_loc, this);
Anton Kirilov5ec62182016-10-13 20:16:02 +010098 } else {
Vladimir Markoe7197bf2017-06-02 17:00:23 +010099 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), method_loc, this);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100100 }
Anton Kirilov5ec62182016-10-13 20:16:02 +0100101
102 // Copy the result back to the expected output.
103 Location out = invoke_->GetLocations()->Out();
104 if (out.IsValid()) {
105 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
106 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
107 codegen->MoveFromReturnRegister(out, invoke_->GetType());
108 }
109
110 RestoreLiveRegisters(codegen, invoke_->GetLocations());
111 __ B(GetExitLabel());
112 }
113
114 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPath"; }
115
116 private:
117 // The instruction where this slow path is happening.
118 HInvoke* const invoke_;
119
120 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARMVIXL);
121};
122
Roland Levillain9cc0ea82017-03-16 11:25:59 +0000123// Compute base address for the System.arraycopy intrinsic in `base`.
124static void GenSystemArrayCopyBaseAddress(ArmVIXLAssembler* assembler,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100125 DataType::Type type,
Roland Levillain9cc0ea82017-03-16 11:25:59 +0000126 const vixl32::Register& array,
127 const Location& pos,
128 const vixl32::Register& base) {
129 // This routine is only used by the SystemArrayCopy intrinsic at the
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100130 // moment. We can allow DataType::Type::kReference as `type` to implement
Roland Levillain9cc0ea82017-03-16 11:25:59 +0000131 // the SystemArrayCopyChar intrinsic.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100132 DCHECK_EQ(type, DataType::Type::kReference);
133 const int32_t element_size = DataType::Size(type);
134 const uint32_t element_size_shift = DataType::SizeShift(type);
Roland Levillain9cc0ea82017-03-16 11:25:59 +0000135 const uint32_t data_offset = mirror::Array::DataOffset(element_size).Uint32Value();
136
137 if (pos.IsConstant()) {
138 int32_t constant = Int32ConstantFrom(pos);
139 __ Add(base, array, element_size * constant + data_offset);
140 } else {
141 __ Add(base, array, Operand(RegisterFrom(pos), vixl32::LSL, element_size_shift));
142 __ Add(base, base, data_offset);
143 }
144}
145
146// Compute end address for the System.arraycopy intrinsic in `end`.
147static void GenSystemArrayCopyEndAddress(ArmVIXLAssembler* assembler,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100148 DataType::Type type,
Roland Levillain9cc0ea82017-03-16 11:25:59 +0000149 const Location& copy_length,
150 const vixl32::Register& base,
151 const vixl32::Register& end) {
152 // This routine is only used by the SystemArrayCopy intrinsic at the
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100153 // moment. We can allow DataType::Type::kReference as `type` to implement
Roland Levillain9cc0ea82017-03-16 11:25:59 +0000154 // the SystemArrayCopyChar intrinsic.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100155 DCHECK_EQ(type, DataType::Type::kReference);
156 const int32_t element_size = DataType::Size(type);
157 const uint32_t element_size_shift = DataType::SizeShift(type);
Roland Levillain9cc0ea82017-03-16 11:25:59 +0000158
159 if (copy_length.IsConstant()) {
160 int32_t constant = Int32ConstantFrom(copy_length);
161 __ Add(end, base, element_size * constant);
162 } else {
163 __ Add(end, base, Operand(RegisterFrom(copy_length), vixl32::LSL, element_size_shift));
164 }
165}
166
Anton Kirilov5ec62182016-10-13 20:16:02 +0100167// Slow path implementing the SystemArrayCopy intrinsic copy loop with read barriers.
168class ReadBarrierSystemArrayCopySlowPathARMVIXL : public SlowPathCodeARMVIXL {
169 public:
170 explicit ReadBarrierSystemArrayCopySlowPathARMVIXL(HInstruction* instruction)
171 : SlowPathCodeARMVIXL(instruction) {
172 DCHECK(kEmitCompilerReadBarrier);
173 DCHECK(kUseBakerReadBarrier);
174 }
175
176 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
177 CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
178 ArmVIXLAssembler* assembler = arm_codegen->GetAssembler();
179 LocationSummary* locations = instruction_->GetLocations();
180 DCHECK(locations->CanCall());
181 DCHECK(instruction_->IsInvokeStaticOrDirect())
182 << "Unexpected instruction in read barrier arraycopy slow path: "
183 << instruction_->DebugName();
184 DCHECK(instruction_->GetLocations()->Intrinsified());
185 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kSystemArrayCopy);
186
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100187 DataType::Type type = DataType::Type::kReference;
188 const int32_t element_size = DataType::Size(type);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100189
190 vixl32::Register dest = InputRegisterAt(instruction_, 2);
191 Location dest_pos = locations->InAt(3);
192 vixl32::Register src_curr_addr = RegisterFrom(locations->GetTemp(0));
193 vixl32::Register dst_curr_addr = RegisterFrom(locations->GetTemp(1));
194 vixl32::Register src_stop_addr = RegisterFrom(locations->GetTemp(2));
195 vixl32::Register tmp = RegisterFrom(locations->GetTemp(3));
196
197 __ Bind(GetEntryLabel());
198 // Compute the base destination address in `dst_curr_addr`.
Roland Levillain9cc0ea82017-03-16 11:25:59 +0000199 GenSystemArrayCopyBaseAddress(assembler, type, dest, dest_pos, dst_curr_addr);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100200
201 vixl32::Label loop;
202 __ Bind(&loop);
203 __ Ldr(tmp, MemOperand(src_curr_addr, element_size, PostIndex));
204 assembler->MaybeUnpoisonHeapReference(tmp);
205 // TODO: Inline the mark bit check before calling the runtime?
206 // tmp = ReadBarrier::Mark(tmp);
207 // No need to save live registers; it's taken care of by the
208 // entrypoint. Also, there is no need to update the stack mask,
209 // as this runtime call will not trigger a garbage collection.
210 // (See ReadBarrierMarkSlowPathARM::EmitNativeCode for more
211 // explanations.)
212 DCHECK(!tmp.IsSP());
213 DCHECK(!tmp.IsLR());
214 DCHECK(!tmp.IsPC());
215 // IP is used internally by the ReadBarrierMarkRegX entry point
216 // as a temporary (and not preserved). It thus cannot be used by
217 // any live register in this slow path.
218 DCHECK(!src_curr_addr.Is(ip));
219 DCHECK(!dst_curr_addr.Is(ip));
220 DCHECK(!src_stop_addr.Is(ip));
221 DCHECK(!tmp.Is(ip));
222 DCHECK(tmp.IsRegister()) << tmp;
Roland Levillain9cc0ea82017-03-16 11:25:59 +0000223 // TODO: Load the entrypoint once before the loop, instead of
224 // loading it at every iteration.
Anton Kirilov5ec62182016-10-13 20:16:02 +0100225 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100226 Thread::ReadBarrierMarkEntryPointsOffset<kArmPointerSize>(tmp.GetCode());
Anton Kirilov5ec62182016-10-13 20:16:02 +0100227 // This runtime call does not require a stack map.
228 arm_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
229 assembler->MaybePoisonHeapReference(tmp);
230 __ Str(tmp, MemOperand(dst_curr_addr, element_size, PostIndex));
231 __ Cmp(src_curr_addr, src_stop_addr);
Artem Serov517d9f62016-12-12 15:51:15 +0000232 __ B(ne, &loop, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100233 __ B(GetExitLabel());
234 }
235
236 const char* GetDescription() const OVERRIDE {
237 return "ReadBarrierSystemArrayCopySlowPathARMVIXL";
238 }
239
240 private:
241 DISALLOW_COPY_AND_ASSIGN(ReadBarrierSystemArrayCopySlowPathARMVIXL);
242};
243
244IntrinsicLocationsBuilderARMVIXL::IntrinsicLocationsBuilderARMVIXL(CodeGeneratorARMVIXL* codegen)
Vladimir Markoca6fff82017-10-03 14:49:14 +0100245 : allocator_(codegen->GetGraph()->GetAllocator()),
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000246 codegen_(codegen),
Anton Kirilov5ec62182016-10-13 20:16:02 +0100247 assembler_(codegen->GetAssembler()),
248 features_(codegen->GetInstructionSetFeatures()) {}
249
250bool IntrinsicLocationsBuilderARMVIXL::TryDispatch(HInvoke* invoke) {
251 Dispatch(invoke);
252 LocationSummary* res = invoke->GetLocations();
253 if (res == nullptr) {
254 return false;
255 }
256 return res->Intrinsified();
257}
258
Vladimir Markoca6fff82017-10-03 14:49:14 +0100259static void CreateFPToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
260 LocationSummary* locations =
261 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100262 locations->SetInAt(0, Location::RequiresFpuRegister());
263 locations->SetOut(Location::RequiresRegister());
264}
265
Vladimir Markoca6fff82017-10-03 14:49:14 +0100266static void CreateIntToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
267 LocationSummary* locations =
268 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100269 locations->SetInAt(0, Location::RequiresRegister());
270 locations->SetOut(Location::RequiresFpuRegister());
271}
272
273static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmVIXLAssembler* assembler) {
274 Location input = locations->InAt(0);
275 Location output = locations->Out();
276 if (is64bit) {
277 __ Vmov(LowRegisterFrom(output), HighRegisterFrom(output), DRegisterFrom(input));
278 } else {
279 __ Vmov(RegisterFrom(output), SRegisterFrom(input));
280 }
281}
282
283static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmVIXLAssembler* assembler) {
284 Location input = locations->InAt(0);
285 Location output = locations->Out();
286 if (is64bit) {
287 __ Vmov(DRegisterFrom(output), LowRegisterFrom(input), HighRegisterFrom(input));
288 } else {
289 __ Vmov(SRegisterFrom(output), RegisterFrom(input));
290 }
291}
292
293void IntrinsicLocationsBuilderARMVIXL::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100294 CreateFPToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100295}
296void IntrinsicLocationsBuilderARMVIXL::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100297 CreateIntToFPLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100298}
299
300void IntrinsicCodeGeneratorARMVIXL::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
301 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
302}
303void IntrinsicCodeGeneratorARMVIXL::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
304 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
305}
306
307void IntrinsicLocationsBuilderARMVIXL::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100308 CreateFPToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100309}
310void IntrinsicLocationsBuilderARMVIXL::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100311 CreateIntToFPLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100312}
313
314void IntrinsicCodeGeneratorARMVIXL::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
315 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
316}
317void IntrinsicCodeGeneratorARMVIXL::VisitFloatIntBitsToFloat(HInvoke* invoke) {
318 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
319}
320
Vladimir Markoca6fff82017-10-03 14:49:14 +0100321static void CreateIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
322 LocationSummary* locations =
323 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100324 locations->SetInAt(0, Location::RequiresRegister());
325 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
326}
327
Vladimir Markoca6fff82017-10-03 14:49:14 +0100328static void CreateLongToLongLocationsWithOverlap(ArenaAllocator* allocator, HInvoke* invoke) {
329 LocationSummary* locations =
330 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +0100331 locations->SetInAt(0, Location::RequiresRegister());
332 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
333}
334
Vladimir Markoca6fff82017-10-03 14:49:14 +0100335static void CreateFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
336 LocationSummary* locations =
337 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100338 locations->SetInAt(0, Location::RequiresFpuRegister());
339 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
340}
341
Anton Kirilov6f644202017-02-27 18:29:45 +0000342static void GenNumberOfLeadingZeros(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100343 DataType::Type type,
Anton Kirilov6f644202017-02-27 18:29:45 +0000344 CodeGeneratorARMVIXL* codegen) {
345 ArmVIXLAssembler* assembler = codegen->GetAssembler();
346 LocationSummary* locations = invoke->GetLocations();
Anton Kirilov5ec62182016-10-13 20:16:02 +0100347 Location in = locations->InAt(0);
348 vixl32::Register out = RegisterFrom(locations->Out());
349
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100350 DCHECK((type == DataType::Type::kInt32) || (type == DataType::Type::kInt64));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100351
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100352 if (type == DataType::Type::kInt64) {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100353 vixl32::Register in_reg_lo = LowRegisterFrom(in);
354 vixl32::Register in_reg_hi = HighRegisterFrom(in);
355 vixl32::Label end;
Anton Kirilov6f644202017-02-27 18:29:45 +0000356 vixl32::Label* final_label = codegen->GetFinalLabel(invoke, &end);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100357 __ Clz(out, in_reg_hi);
Anton Kirilov6f644202017-02-27 18:29:45 +0000358 __ CompareAndBranchIfNonZero(in_reg_hi, final_label, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100359 __ Clz(out, in_reg_lo);
360 __ Add(out, out, 32);
Anton Kirilov6f644202017-02-27 18:29:45 +0000361 if (end.IsReferenced()) {
362 __ Bind(&end);
363 }
Anton Kirilov5ec62182016-10-13 20:16:02 +0100364 } else {
365 __ Clz(out, RegisterFrom(in));
366 }
367}
368
369void IntrinsicLocationsBuilderARMVIXL::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100370 CreateIntToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100371}
372
373void IntrinsicCodeGeneratorARMVIXL::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100374 GenNumberOfLeadingZeros(invoke, DataType::Type::kInt32, codegen_);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100375}
376
377void IntrinsicLocationsBuilderARMVIXL::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100378 CreateLongToLongLocationsWithOverlap(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100379}
380
381void IntrinsicCodeGeneratorARMVIXL::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100382 GenNumberOfLeadingZeros(invoke, DataType::Type::kInt64, codegen_);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100383}
384
Anton Kirilov6f644202017-02-27 18:29:45 +0000385static void GenNumberOfTrailingZeros(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100386 DataType::Type type,
Anton Kirilov6f644202017-02-27 18:29:45 +0000387 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100388 DCHECK((type == DataType::Type::kInt32) || (type == DataType::Type::kInt64));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100389
Anton Kirilov6f644202017-02-27 18:29:45 +0000390 ArmVIXLAssembler* assembler = codegen->GetAssembler();
391 LocationSummary* locations = invoke->GetLocations();
Anton Kirilov5ec62182016-10-13 20:16:02 +0100392 vixl32::Register out = RegisterFrom(locations->Out());
393
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100394 if (type == DataType::Type::kInt64) {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100395 vixl32::Register in_reg_lo = LowRegisterFrom(locations->InAt(0));
396 vixl32::Register in_reg_hi = HighRegisterFrom(locations->InAt(0));
397 vixl32::Label end;
Anton Kirilov6f644202017-02-27 18:29:45 +0000398 vixl32::Label* final_label = codegen->GetFinalLabel(invoke, &end);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100399 __ Rbit(out, in_reg_lo);
400 __ Clz(out, out);
Anton Kirilov6f644202017-02-27 18:29:45 +0000401 __ CompareAndBranchIfNonZero(in_reg_lo, final_label, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100402 __ Rbit(out, in_reg_hi);
403 __ Clz(out, out);
404 __ Add(out, out, 32);
Anton Kirilov6f644202017-02-27 18:29:45 +0000405 if (end.IsReferenced()) {
406 __ Bind(&end);
407 }
Anton Kirilov5ec62182016-10-13 20:16:02 +0100408 } else {
409 vixl32::Register in = RegisterFrom(locations->InAt(0));
410 __ Rbit(out, in);
411 __ Clz(out, out);
412 }
413}
414
415void IntrinsicLocationsBuilderARMVIXL::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100416 CreateIntToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100417}
418
419void IntrinsicCodeGeneratorARMVIXL::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100420 GenNumberOfTrailingZeros(invoke, DataType::Type::kInt32, codegen_);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100421}
422
423void IntrinsicLocationsBuilderARMVIXL::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100424 CreateLongToLongLocationsWithOverlap(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100425}
426
427void IntrinsicCodeGeneratorARMVIXL::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100428 GenNumberOfTrailingZeros(invoke, DataType::Type::kInt64, codegen_);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100429}
430
Anton Kirilov5ec62182016-10-13 20:16:02 +0100431void IntrinsicLocationsBuilderARMVIXL::VisitMathSqrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100432 CreateFPToFPLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100433}
434
435void IntrinsicCodeGeneratorARMVIXL::VisitMathSqrt(HInvoke* invoke) {
436 ArmVIXLAssembler* assembler = GetAssembler();
437 __ Vsqrt(OutputDRegister(invoke), InputDRegisterAt(invoke, 0));
438}
439
xueliang.zhong6099d5e2016-04-20 18:44:56 +0100440void IntrinsicLocationsBuilderARMVIXL::VisitMathRint(HInvoke* invoke) {
441 if (features_.HasARMv8AInstructions()) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100442 CreateFPToFPLocations(allocator_, invoke);
xueliang.zhong6099d5e2016-04-20 18:44:56 +0100443 }
444}
445
446void IntrinsicCodeGeneratorARMVIXL::VisitMathRint(HInvoke* invoke) {
447 DCHECK(codegen_->GetInstructionSetFeatures().HasARMv8AInstructions());
448 ArmVIXLAssembler* assembler = GetAssembler();
449 __ Vrintn(F64, F64, OutputDRegister(invoke), InputDRegisterAt(invoke, 0));
450}
451
xueliang.zhong53463ba2017-02-16 15:18:03 +0000452void IntrinsicLocationsBuilderARMVIXL::VisitMathRoundFloat(HInvoke* invoke) {
453 if (features_.HasARMv8AInstructions()) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100454 LocationSummary* locations =
455 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
xueliang.zhong53463ba2017-02-16 15:18:03 +0000456 locations->SetInAt(0, Location::RequiresFpuRegister());
457 locations->SetOut(Location::RequiresRegister());
458 locations->AddTemp(Location::RequiresFpuRegister());
459 }
460}
461
462void IntrinsicCodeGeneratorARMVIXL::VisitMathRoundFloat(HInvoke* invoke) {
463 DCHECK(codegen_->GetInstructionSetFeatures().HasARMv8AInstructions());
464
465 ArmVIXLAssembler* assembler = GetAssembler();
466 vixl32::SRegister in_reg = InputSRegisterAt(invoke, 0);
467 vixl32::Register out_reg = OutputRegister(invoke);
468 vixl32::SRegister temp1 = LowSRegisterFrom(invoke->GetLocations()->GetTemp(0));
469 vixl32::SRegister temp2 = HighSRegisterFrom(invoke->GetLocations()->GetTemp(0));
470 vixl32::Label done;
471 vixl32::Label* final_label = codegen_->GetFinalLabel(invoke, &done);
472
473 // Round to nearest integer, ties away from zero.
474 __ Vcvta(S32, F32, temp1, in_reg);
475 __ Vmov(out_reg, temp1);
476
477 // For positive, zero or NaN inputs, rounding is done.
478 __ Cmp(out_reg, 0);
479 __ B(ge, final_label, /* far_target */ false);
480
481 // Handle input < 0 cases.
482 // If input is negative but not a tie, previous result (round to nearest) is valid.
483 // If input is a negative tie, change rounding direction to positive infinity, out_reg += 1.
484 __ Vrinta(F32, F32, temp1, in_reg);
485 __ Vmov(temp2, 0.5);
486 __ Vsub(F32, temp1, in_reg, temp1);
487 __ Vcmp(F32, temp1, temp2);
488 __ Vmrs(RegisterOrAPSR_nzcv(kPcCode), FPSCR);
489 {
490 // Use ExactAsemblyScope here because we are using IT.
491 ExactAssemblyScope it_scope(assembler->GetVIXLAssembler(),
492 2 * kMaxInstructionSizeInBytes,
493 CodeBufferCheckScope::kMaximumSize);
494 __ it(eq);
495 __ add(eq, out_reg, out_reg, 1);
496 }
497
498 if (done.IsReferenced()) {
499 __ Bind(&done);
500 }
501}
502
Anton Kirilov5ec62182016-10-13 20:16:02 +0100503void IntrinsicLocationsBuilderARMVIXL::VisitMemoryPeekByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100504 CreateIntToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100505}
506
507void IntrinsicCodeGeneratorARMVIXL::VisitMemoryPeekByte(HInvoke* invoke) {
508 ArmVIXLAssembler* assembler = GetAssembler();
509 // Ignore upper 4B of long address.
Scott Wakelingb77051e2016-11-21 19:46:00 +0000510 __ Ldrsb(OutputRegister(invoke), MemOperand(LowRegisterFrom(invoke->GetLocations()->InAt(0))));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100511}
512
513void IntrinsicLocationsBuilderARMVIXL::VisitMemoryPeekIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100514 CreateIntToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100515}
516
517void IntrinsicCodeGeneratorARMVIXL::VisitMemoryPeekIntNative(HInvoke* invoke) {
518 ArmVIXLAssembler* assembler = GetAssembler();
519 // Ignore upper 4B of long address.
Scott Wakelingb77051e2016-11-21 19:46:00 +0000520 __ Ldr(OutputRegister(invoke), MemOperand(LowRegisterFrom(invoke->GetLocations()->InAt(0))));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100521}
522
523void IntrinsicLocationsBuilderARMVIXL::VisitMemoryPeekLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100524 CreateIntToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100525}
526
527void IntrinsicCodeGeneratorARMVIXL::VisitMemoryPeekLongNative(HInvoke* invoke) {
528 ArmVIXLAssembler* assembler = GetAssembler();
529 // Ignore upper 4B of long address.
530 vixl32::Register addr = LowRegisterFrom(invoke->GetLocations()->InAt(0));
531 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
532 // exception. So we can't use ldrd as addr may be unaligned.
533 vixl32::Register lo = LowRegisterFrom(invoke->GetLocations()->Out());
534 vixl32::Register hi = HighRegisterFrom(invoke->GetLocations()->Out());
535 if (addr.Is(lo)) {
536 __ Ldr(hi, MemOperand(addr, 4));
Scott Wakelingb77051e2016-11-21 19:46:00 +0000537 __ Ldr(lo, MemOperand(addr));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100538 } else {
Scott Wakelingb77051e2016-11-21 19:46:00 +0000539 __ Ldr(lo, MemOperand(addr));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100540 __ Ldr(hi, MemOperand(addr, 4));
541 }
542}
543
544void IntrinsicLocationsBuilderARMVIXL::VisitMemoryPeekShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100545 CreateIntToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100546}
547
548void IntrinsicCodeGeneratorARMVIXL::VisitMemoryPeekShortNative(HInvoke* invoke) {
549 ArmVIXLAssembler* assembler = GetAssembler();
550 // Ignore upper 4B of long address.
Scott Wakelingb77051e2016-11-21 19:46:00 +0000551 __ Ldrsh(OutputRegister(invoke), MemOperand(LowRegisterFrom(invoke->GetLocations()->InAt(0))));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100552}
553
Vladimir Markoca6fff82017-10-03 14:49:14 +0100554static void CreateIntIntToVoidLocations(ArenaAllocator* allocator, HInvoke* invoke) {
555 LocationSummary* locations =
556 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100557 locations->SetInAt(0, Location::RequiresRegister());
558 locations->SetInAt(1, Location::RequiresRegister());
559}
560
561void IntrinsicLocationsBuilderARMVIXL::VisitMemoryPokeByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100562 CreateIntIntToVoidLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100563}
564
565void IntrinsicCodeGeneratorARMVIXL::VisitMemoryPokeByte(HInvoke* invoke) {
566 ArmVIXLAssembler* assembler = GetAssembler();
Scott Wakelingb77051e2016-11-21 19:46:00 +0000567 __ Strb(InputRegisterAt(invoke, 1), MemOperand(LowRegisterFrom(invoke->GetLocations()->InAt(0))));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100568}
569
570void IntrinsicLocationsBuilderARMVIXL::VisitMemoryPokeIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100571 CreateIntIntToVoidLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100572}
573
574void IntrinsicCodeGeneratorARMVIXL::VisitMemoryPokeIntNative(HInvoke* invoke) {
575 ArmVIXLAssembler* assembler = GetAssembler();
Scott Wakelingb77051e2016-11-21 19:46:00 +0000576 __ Str(InputRegisterAt(invoke, 1), MemOperand(LowRegisterFrom(invoke->GetLocations()->InAt(0))));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100577}
578
579void IntrinsicLocationsBuilderARMVIXL::VisitMemoryPokeLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100580 CreateIntIntToVoidLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100581}
582
583void IntrinsicCodeGeneratorARMVIXL::VisitMemoryPokeLongNative(HInvoke* invoke) {
584 ArmVIXLAssembler* assembler = GetAssembler();
585 // Ignore upper 4B of long address.
586 vixl32::Register addr = LowRegisterFrom(invoke->GetLocations()->InAt(0));
587 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
588 // exception. So we can't use ldrd as addr may be unaligned.
Scott Wakelingb77051e2016-11-21 19:46:00 +0000589 __ Str(LowRegisterFrom(invoke->GetLocations()->InAt(1)), MemOperand(addr));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100590 __ Str(HighRegisterFrom(invoke->GetLocations()->InAt(1)), MemOperand(addr, 4));
591}
592
593void IntrinsicLocationsBuilderARMVIXL::VisitMemoryPokeShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100594 CreateIntIntToVoidLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100595}
596
597void IntrinsicCodeGeneratorARMVIXL::VisitMemoryPokeShortNative(HInvoke* invoke) {
598 ArmVIXLAssembler* assembler = GetAssembler();
Scott Wakelingb77051e2016-11-21 19:46:00 +0000599 __ Strh(InputRegisterAt(invoke, 1), MemOperand(LowRegisterFrom(invoke->GetLocations()->InAt(0))));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100600}
601
602void IntrinsicLocationsBuilderARMVIXL::VisitThreadCurrentThread(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100603 LocationSummary* locations =
604 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100605 locations->SetOut(Location::RequiresRegister());
606}
607
608void IntrinsicCodeGeneratorARMVIXL::VisitThreadCurrentThread(HInvoke* invoke) {
609 ArmVIXLAssembler* assembler = GetAssembler();
610 __ Ldr(OutputRegister(invoke),
611 MemOperand(tr, Thread::PeerOffset<kArmPointerSize>().Int32Value()));
612}
613
614static void GenUnsafeGet(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100615 DataType::Type type,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100616 bool is_volatile,
617 CodeGeneratorARMVIXL* codegen) {
618 LocationSummary* locations = invoke->GetLocations();
619 ArmVIXLAssembler* assembler = codegen->GetAssembler();
620 Location base_loc = locations->InAt(1);
621 vixl32::Register base = InputRegisterAt(invoke, 1); // Object pointer.
622 Location offset_loc = locations->InAt(2);
623 vixl32::Register offset = LowRegisterFrom(offset_loc); // Long offset, lo part only.
624 Location trg_loc = locations->Out();
625
626 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100627 case DataType::Type::kInt32: {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100628 vixl32::Register trg = RegisterFrom(trg_loc);
629 __ Ldr(trg, MemOperand(base, offset));
630 if (is_volatile) {
631 __ Dmb(vixl32::ISH);
632 }
633 break;
634 }
635
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100636 case DataType::Type::kReference: {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100637 vixl32::Register trg = RegisterFrom(trg_loc);
638 if (kEmitCompilerReadBarrier) {
639 if (kUseBakerReadBarrier) {
640 Location temp = locations->GetTemp(0);
Vladimir Marko248141f2018-08-10 10:40:07 +0100641 // Piggy-back on the field load path using introspection for the Baker read barrier.
642 __ Add(RegisterFrom(temp), base, Operand(offset));
643 MemOperand src(RegisterFrom(temp), 0);
644 codegen->GenerateFieldLoadWithBakerReadBarrier(
645 invoke, trg_loc, base, src, /* needs_null_check */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100646 if (is_volatile) {
647 __ Dmb(vixl32::ISH);
648 }
649 } else {
650 __ Ldr(trg, MemOperand(base, offset));
651 if (is_volatile) {
652 __ Dmb(vixl32::ISH);
653 }
654 codegen->GenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
655 }
656 } else {
657 __ Ldr(trg, MemOperand(base, offset));
658 if (is_volatile) {
659 __ Dmb(vixl32::ISH);
660 }
661 assembler->MaybeUnpoisonHeapReference(trg);
662 }
663 break;
664 }
665
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100666 case DataType::Type::kInt64: {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100667 vixl32::Register trg_lo = LowRegisterFrom(trg_loc);
668 vixl32::Register trg_hi = HighRegisterFrom(trg_loc);
669 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
Artem Serov657022c2016-11-23 14:19:38 +0000670 UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
671 const vixl32::Register temp_reg = temps.Acquire();
672 __ Add(temp_reg, base, offset);
673 __ Ldrexd(trg_lo, trg_hi, MemOperand(temp_reg));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100674 } else {
675 __ Ldrd(trg_lo, trg_hi, MemOperand(base, offset));
676 }
677 if (is_volatile) {
678 __ Dmb(vixl32::ISH);
679 }
680 break;
681 }
682
683 default:
684 LOG(FATAL) << "Unexpected type " << type;
685 UNREACHABLE();
686 }
687}
688
Vladimir Markoca6fff82017-10-03 14:49:14 +0100689static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100690 HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100691 DataType::Type type) {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100692 bool can_call = kEmitCompilerReadBarrier &&
693 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
694 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Vladimir Markoca6fff82017-10-03 14:49:14 +0100695 LocationSummary* locations =
696 new (allocator) LocationSummary(invoke,
697 can_call
698 ? LocationSummary::kCallOnSlowPath
699 : LocationSummary::kNoCall,
700 kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100701 if (can_call && kUseBakerReadBarrier) {
702 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
703 }
704 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
705 locations->SetInAt(1, Location::RequiresRegister());
706 locations->SetInAt(2, Location::RequiresRegister());
707 locations->SetOut(Location::RequiresRegister(),
708 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100709 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100710 // We need a temporary register for the read barrier marking slow
Roland Levillain9983e302017-07-14 14:34:22 +0100711 // path in CodeGeneratorARMVIXL::GenerateReferenceLoadWithBakerReadBarrier.
Anton Kirilov5ec62182016-10-13 20:16:02 +0100712 locations->AddTemp(Location::RequiresRegister());
713 }
714}
715
716void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100717 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100718}
719void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100720 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100721}
722void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100723 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100724}
725void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100726 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100727}
728void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100729 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100730}
731void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100732 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100733}
734
735void IntrinsicCodeGeneratorARMVIXL::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100736 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ false, codegen_);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100737}
738void IntrinsicCodeGeneratorARMVIXL::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100739 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile */ true, codegen_);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100740}
741void IntrinsicCodeGeneratorARMVIXL::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100742 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ false, codegen_);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100743}
744void IntrinsicCodeGeneratorARMVIXL::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100745 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile */ true, codegen_);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100746}
747void IntrinsicCodeGeneratorARMVIXL::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100748 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ false, codegen_);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100749}
750void IntrinsicCodeGeneratorARMVIXL::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100751 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile */ true, codegen_);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100752}
753
Vladimir Markoca6fff82017-10-03 14:49:14 +0100754static void CreateIntIntIntIntToVoid(ArenaAllocator* allocator,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100755 const ArmInstructionSetFeatures& features,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100756 DataType::Type type,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100757 bool is_volatile,
758 HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100759 LocationSummary* locations =
760 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100761 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
762 locations->SetInAt(1, Location::RequiresRegister());
763 locations->SetInAt(2, Location::RequiresRegister());
764 locations->SetInAt(3, Location::RequiresRegister());
765
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100766 if (type == DataType::Type::kInt64) {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100767 // Potentially need temps for ldrexd-strexd loop.
768 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
769 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
770 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
771 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100772 } else if (type == DataType::Type::kReference) {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100773 // Temps for card-marking.
774 locations->AddTemp(Location::RequiresRegister()); // Temp.
775 locations->AddTemp(Location::RequiresRegister()); // Card.
776 }
777}
778
779void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePut(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100780 CreateIntIntIntIntToVoid(
Vladimir Markoca6fff82017-10-03 14:49:14 +0100781 allocator_, features_, DataType::Type::kInt32, /* is_volatile */ false, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100782}
783void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePutOrdered(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100784 CreateIntIntIntIntToVoid(
Vladimir Markoca6fff82017-10-03 14:49:14 +0100785 allocator_, features_, DataType::Type::kInt32, /* is_volatile */ false, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100786}
787void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePutVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100788 CreateIntIntIntIntToVoid(
Vladimir Markoca6fff82017-10-03 14:49:14 +0100789 allocator_, features_, DataType::Type::kInt32, /* is_volatile */ true, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100790}
791void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePutObject(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100792 CreateIntIntIntIntToVoid(
Vladimir Markoca6fff82017-10-03 14:49:14 +0100793 allocator_, features_, DataType::Type::kReference, /* is_volatile */ false, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100794}
795void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100796 CreateIntIntIntIntToVoid(
Vladimir Markoca6fff82017-10-03 14:49:14 +0100797 allocator_, features_, DataType::Type::kReference, /* is_volatile */ false, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100798}
799void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100800 CreateIntIntIntIntToVoid(
Vladimir Markoca6fff82017-10-03 14:49:14 +0100801 allocator_, features_, DataType::Type::kReference, /* is_volatile */ true, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100802}
803void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePutLong(HInvoke* invoke) {
804 CreateIntIntIntIntToVoid(
Vladimir Markoca6fff82017-10-03 14:49:14 +0100805 allocator_, features_, DataType::Type::kInt64, /* is_volatile */ false, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100806}
807void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePutLongOrdered(HInvoke* invoke) {
808 CreateIntIntIntIntToVoid(
Vladimir Markoca6fff82017-10-03 14:49:14 +0100809 allocator_, features_, DataType::Type::kInt64, /* is_volatile */ false, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100810}
811void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePutLongVolatile(HInvoke* invoke) {
812 CreateIntIntIntIntToVoid(
Vladimir Markoca6fff82017-10-03 14:49:14 +0100813 allocator_, features_, DataType::Type::kInt64, /* is_volatile */ true, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100814}
815
816static void GenUnsafePut(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100817 DataType::Type type,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100818 bool is_volatile,
819 bool is_ordered,
820 CodeGeneratorARMVIXL* codegen) {
821 ArmVIXLAssembler* assembler = codegen->GetAssembler();
822
823 vixl32::Register base = RegisterFrom(locations->InAt(1)); // Object pointer.
824 vixl32::Register offset = LowRegisterFrom(locations->InAt(2)); // Long offset, lo part only.
825 vixl32::Register value;
826
827 if (is_volatile || is_ordered) {
828 __ Dmb(vixl32::ISH);
829 }
830
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100831 if (type == DataType::Type::kInt64) {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100832 vixl32::Register value_lo = LowRegisterFrom(locations->InAt(3));
833 vixl32::Register value_hi = HighRegisterFrom(locations->InAt(3));
834 value = value_lo;
835 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
836 vixl32::Register temp_lo = RegisterFrom(locations->GetTemp(0));
837 vixl32::Register temp_hi = RegisterFrom(locations->GetTemp(1));
838 UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
839 const vixl32::Register temp_reg = temps.Acquire();
840
841 __ Add(temp_reg, base, offset);
842 vixl32::Label loop_head;
843 __ Bind(&loop_head);
Scott Wakelingb77051e2016-11-21 19:46:00 +0000844 __ Ldrexd(temp_lo, temp_hi, MemOperand(temp_reg));
845 __ Strexd(temp_lo, value_lo, value_hi, MemOperand(temp_reg));
Anton Kirilov5ec62182016-10-13 20:16:02 +0100846 __ Cmp(temp_lo, 0);
Artem Serov517d9f62016-12-12 15:51:15 +0000847 __ B(ne, &loop_head, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100848 } else {
849 __ Strd(value_lo, value_hi, MemOperand(base, offset));
850 }
851 } else {
852 value = RegisterFrom(locations->InAt(3));
853 vixl32::Register source = value;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100854 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100855 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
856 __ Mov(temp, value);
857 assembler->PoisonHeapReference(temp);
858 source = temp;
859 }
860 __ Str(source, MemOperand(base, offset));
861 }
862
863 if (is_volatile) {
864 __ Dmb(vixl32::ISH);
865 }
866
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100867 if (type == DataType::Type::kReference) {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100868 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
869 vixl32::Register card = RegisterFrom(locations->GetTemp(1));
870 bool value_can_be_null = true; // TODO: Worth finding out this information?
871 codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
872 }
873}
874
875void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePut(HInvoke* invoke) {
876 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100877 DataType::Type::kInt32,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100878 /* is_volatile */ false,
879 /* is_ordered */ false,
880 codegen_);
881}
882void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePutOrdered(HInvoke* invoke) {
883 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100884 DataType::Type::kInt32,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100885 /* is_volatile */ false,
886 /* is_ordered */ true,
887 codegen_);
888}
889void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePutVolatile(HInvoke* invoke) {
890 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100891 DataType::Type::kInt32,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100892 /* is_volatile */ true,
893 /* is_ordered */ false,
894 codegen_);
895}
896void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePutObject(HInvoke* invoke) {
897 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100898 DataType::Type::kReference,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100899 /* is_volatile */ false,
900 /* is_ordered */ false,
901 codegen_);
902}
903void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
904 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100905 DataType::Type::kReference,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100906 /* is_volatile */ false,
907 /* is_ordered */ true,
908 codegen_);
909}
910void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
911 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100912 DataType::Type::kReference,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100913 /* is_volatile */ true,
914 /* is_ordered */ false,
915 codegen_);
916}
917void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePutLong(HInvoke* invoke) {
918 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100919 DataType::Type::kInt64,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100920 /* is_volatile */ false,
921 /* is_ordered */ false,
922 codegen_);
923}
924void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePutLongOrdered(HInvoke* invoke) {
925 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100926 DataType::Type::kInt64,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100927 /* is_volatile */ false,
928 /* is_ordered */ true,
929 codegen_);
930}
931void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePutLongVolatile(HInvoke* invoke) {
932 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100933 DataType::Type::kInt64,
Anton Kirilov5ec62182016-10-13 20:16:02 +0100934 /* is_volatile */ true,
935 /* is_ordered */ false,
936 codegen_);
937}
938
Vladimir Markof28be432018-08-14 12:20:51 +0000939static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* allocator,
940 HInvoke* invoke,
941 DataType::Type type) {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100942 bool can_call = kEmitCompilerReadBarrier &&
943 kUseBakerReadBarrier &&
944 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Vladimir Markoca6fff82017-10-03 14:49:14 +0100945 LocationSummary* locations =
946 new (allocator) LocationSummary(invoke,
947 can_call
948 ? LocationSummary::kCallOnSlowPath
949 : LocationSummary::kNoCall,
950 kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100951 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
952 locations->SetInAt(1, Location::RequiresRegister());
953 locations->SetInAt(2, Location::RequiresRegister());
954 locations->SetInAt(3, Location::RequiresRegister());
955 locations->SetInAt(4, Location::RequiresRegister());
956
Vladimir Markof28be432018-08-14 12:20:51 +0000957 // If heap poisoning is enabled, we don't want the unpoisoning
958 // operations to potentially clobber the output. Likewise when
959 // emitting a (Baker) read barrier, which may call.
960 Location::OutputOverlap overlaps =
961 ((kPoisonHeapReferences && type == DataType::Type::kReference) || can_call)
962 ? Location::kOutputOverlap
963 : Location::kNoOutputOverlap;
964 locations->SetOut(Location::RequiresRegister(), overlaps);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100965
966 // Temporary registers used in CAS. In the object case
967 // (UnsafeCASObject intrinsic), these are also used for
968 // card-marking, and possibly for (Baker) read barrier.
969 locations->AddTemp(Location::RequiresRegister()); // Pointer.
970 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
971}
972
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100973static void GenCas(HInvoke* invoke, DataType::Type type, CodeGeneratorARMVIXL* codegen) {
974 DCHECK_NE(type, DataType::Type::kInt64);
Anton Kirilov5ec62182016-10-13 20:16:02 +0100975
976 ArmVIXLAssembler* assembler = codegen->GetAssembler();
977 LocationSummary* locations = invoke->GetLocations();
978
Vladimir Markof28be432018-08-14 12:20:51 +0000979 Location out_loc = locations->Out();
Anton Kirilov5ec62182016-10-13 20:16:02 +0100980 vixl32::Register out = OutputRegister(invoke); // Boolean result.
981
982 vixl32::Register base = InputRegisterAt(invoke, 1); // Object pointer.
Vladimir Markof28be432018-08-14 12:20:51 +0000983 Location offset_loc = locations->InAt(2);
984 vixl32::Register offset = LowRegisterFrom(offset_loc); // Offset (discard high 4B).
Anton Kirilov5ec62182016-10-13 20:16:02 +0100985 vixl32::Register expected = InputRegisterAt(invoke, 3); // Expected.
986 vixl32::Register value = InputRegisterAt(invoke, 4); // Value.
987
Vladimir Markof28be432018-08-14 12:20:51 +0000988 Location tmp_ptr_loc = locations->GetTemp(0);
989 vixl32::Register tmp_ptr = RegisterFrom(tmp_ptr_loc); // Pointer to actual memory.
990 vixl32::Register tmp = RegisterFrom(locations->GetTemp(1)); // Value in memory.
Anton Kirilov5ec62182016-10-13 20:16:02 +0100991
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100992 if (type == DataType::Type::kReference) {
Anton Kirilov5ec62182016-10-13 20:16:02 +0100993 // The only read barrier implementation supporting the
994 // UnsafeCASObject intrinsic is the Baker-style read barriers.
995 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
996
997 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
998 // object and scan the receiver at the next GC for nothing.
999 bool value_can_be_null = true; // TODO: Worth finding out this information?
1000 codegen->MarkGCCard(tmp_ptr, tmp, base, value, value_can_be_null);
1001
1002 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Vladimir Markof28be432018-08-14 12:20:51 +00001003 // Need to make sure the reference stored in the field is a to-space
1004 // one before attempting the CAS or the CAS could fail incorrectly.
1005 codegen->UpdateReferenceFieldWithBakerReadBarrier(
1006 invoke,
1007 out_loc, // Unused, used only as a "temporary" within the read barrier.
1008 base,
1009 /* field_offset */ offset_loc,
1010 tmp_ptr_loc,
1011 /* needs_null_check */ false,
1012 tmp);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001013 }
1014 }
1015
1016 // Prevent reordering with prior memory operations.
1017 // Emit a DMB ISH instruction instead of an DMB ISHST one, as the
Vladimir Markof28be432018-08-14 12:20:51 +00001018 // latter allows a preceding load to be delayed past the STXR
Anton Kirilov5ec62182016-10-13 20:16:02 +01001019 // instruction below.
1020 __ Dmb(vixl32::ISH);
1021
1022 __ Add(tmp_ptr, base, offset);
1023
Vladimir Markof28be432018-08-14 12:20:51 +00001024 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
1025 codegen->GetAssembler()->PoisonHeapReference(expected);
1026 if (value.Is(expected)) {
1027 // Do not poison `value`, as it is the same register as
1028 // `expected`, which has just been poisoned.
1029 } else {
1030 codegen->GetAssembler()->PoisonHeapReference(value);
1031 }
1032 }
1033
Anton Kirilov5ec62182016-10-13 20:16:02 +01001034 // do {
1035 // tmp = [r_ptr] - expected;
1036 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
Vladimir Markof28be432018-08-14 12:20:51 +00001037 // result = tmp != 0;
Anton Kirilov5ec62182016-10-13 20:16:02 +01001038
1039 vixl32::Label loop_head;
1040 __ Bind(&loop_head);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001041
Vladimir Markof28be432018-08-14 12:20:51 +00001042 __ Ldrex(tmp, MemOperand(tmp_ptr));
1043
1044 __ Subs(tmp, tmp, expected);
1045
1046 {
1047 ExactAssemblyScope aas(assembler->GetVIXLAssembler(),
1048 3 * kMaxInstructionSizeInBytes,
1049 CodeBufferCheckScope::kMaximumSize);
1050
1051 __ itt(eq);
1052 __ strex(eq, tmp, value, MemOperand(tmp_ptr));
1053 __ cmp(eq, tmp, 1);
1054 }
1055
1056 __ B(eq, &loop_head, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001057
1058 __ Dmb(vixl32::ISH);
1059
Vladimir Markof28be432018-08-14 12:20:51 +00001060 __ Rsbs(out, tmp, 1);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001061
Vladimir Markof28be432018-08-14 12:20:51 +00001062 {
1063 ExactAssemblyScope aas(assembler->GetVIXLAssembler(),
1064 2 * kMaxInstructionSizeInBytes,
1065 CodeBufferCheckScope::kMaximumSize);
1066
1067 __ it(cc);
1068 __ mov(cc, out, 0);
1069 }
1070
1071 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
1072 codegen->GetAssembler()->UnpoisonHeapReference(expected);
1073 if (value.Is(expected)) {
1074 // Do not unpoison `value`, as it is the same register as
1075 // `expected`, which has just been unpoisoned.
1076 } else {
1077 codegen->GetAssembler()->UnpoisonHeapReference(value);
1078 }
Anton Kirilov5ec62182016-10-13 20:16:02 +01001079 }
1080}
1081
1082void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Markof28be432018-08-14 12:20:51 +00001083 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke, DataType::Type::kInt32);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001084}
1085void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeCASObject(HInvoke* invoke) {
1086 // The only read barrier implementation supporting the
1087 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1088 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
1089 return;
1090 }
1091
Vladimir Markof28be432018-08-14 12:20:51 +00001092 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke, DataType::Type::kReference);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001093}
1094void IntrinsicCodeGeneratorARMVIXL::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001095 GenCas(invoke, DataType::Type::kInt32, codegen_);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001096}
1097void IntrinsicCodeGeneratorARMVIXL::VisitUnsafeCASObject(HInvoke* invoke) {
1098 // The only read barrier implementation supporting the
1099 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1100 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1101
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001102 GenCas(invoke, DataType::Type::kReference, codegen_);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001103}
1104
1105void IntrinsicLocationsBuilderARMVIXL::VisitStringCompareTo(HInvoke* invoke) {
1106 // The inputs plus one temp.
Vladimir Markoca6fff82017-10-03 14:49:14 +01001107 LocationSummary* locations =
1108 new (allocator_) LocationSummary(invoke,
1109 invoke->InputAt(1)->CanBeNull()
1110 ? LocationSummary::kCallOnSlowPath
1111 : LocationSummary::kNoCall,
1112 kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001113 locations->SetInAt(0, Location::RequiresRegister());
1114 locations->SetInAt(1, Location::RequiresRegister());
1115 locations->AddTemp(Location::RequiresRegister());
1116 locations->AddTemp(Location::RequiresRegister());
1117 locations->AddTemp(Location::RequiresRegister());
1118 // Need temporary registers for String compression's feature.
1119 if (mirror::kUseStringCompression) {
1120 locations->AddTemp(Location::RequiresRegister());
Anton Kirilov5ec62182016-10-13 20:16:02 +01001121 }
1122 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1123}
1124
Artem Serov672b9c12017-12-05 18:04:07 +00001125// Forward declaration.
1126//
1127// ART build system imposes a size limit (deviceFrameSizeLimit) on the stack frames generated
1128// by the compiler for every C++ function, and if this function gets inlined in
1129// IntrinsicCodeGeneratorARMVIXL::VisitStringCompareTo, the limit will be exceeded, resulting in a
1130// build failure. That is the reason why NO_INLINE attribute is used.
1131static void NO_INLINE GenerateStringCompareToLoop(ArmVIXLAssembler* assembler,
1132 HInvoke* invoke,
1133 vixl32::Label* end,
1134 vixl32::Label* different_compression);
1135
Anton Kirilov5ec62182016-10-13 20:16:02 +01001136void IntrinsicCodeGeneratorARMVIXL::VisitStringCompareTo(HInvoke* invoke) {
1137 ArmVIXLAssembler* assembler = GetAssembler();
1138 LocationSummary* locations = invoke->GetLocations();
1139
Artem Serov672b9c12017-12-05 18:04:07 +00001140 const vixl32::Register str = InputRegisterAt(invoke, 0);
1141 const vixl32::Register arg = InputRegisterAt(invoke, 1);
1142 const vixl32::Register out = OutputRegister(invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001143
Artem Serov672b9c12017-12-05 18:04:07 +00001144 const vixl32::Register temp0 = RegisterFrom(locations->GetTemp(0));
1145 const vixl32::Register temp1 = RegisterFrom(locations->GetTemp(1));
1146 const vixl32::Register temp2 = RegisterFrom(locations->GetTemp(2));
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001147 vixl32::Register temp3;
Anton Kirilov5ec62182016-10-13 20:16:02 +01001148 if (mirror::kUseStringCompression) {
1149 temp3 = RegisterFrom(locations->GetTemp(3));
Anton Kirilov5ec62182016-10-13 20:16:02 +01001150 }
1151
Anton Kirilov5ec62182016-10-13 20:16:02 +01001152 vixl32::Label end;
1153 vixl32::Label different_compression;
1154
1155 // Get offsets of count and value fields within a string object.
1156 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
Anton Kirilov5ec62182016-10-13 20:16:02 +01001157
1158 // Note that the null check must have been done earlier.
1159 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1160
1161 // Take slow path and throw if input can be and is null.
1162 SlowPathCodeARMVIXL* slow_path = nullptr;
1163 const bool can_slow_path = invoke->InputAt(1)->CanBeNull();
1164 if (can_slow_path) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001165 slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathARMVIXL(invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001166 codegen_->AddSlowPath(slow_path);
xueliang.zhongf51bc622016-11-04 09:23:32 +00001167 __ CompareAndBranchIfZero(arg, slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01001168 }
1169
1170 // Reference equality check, return 0 if same reference.
1171 __ Subs(out, str, arg);
1172 __ B(eq, &end);
1173
Anton Kirilov5ec62182016-10-13 20:16:02 +01001174 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001175 // Load `count` fields of this and argument strings.
Anton Kirilov5ec62182016-10-13 20:16:02 +01001176 __ Ldr(temp3, MemOperand(str, count_offset));
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001177 __ Ldr(temp2, MemOperand(arg, count_offset));
1178 // Extract lengths from the `count` fields.
1179 __ Lsr(temp0, temp3, 1u);
1180 __ Lsr(temp1, temp2, 1u);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001181 } else {
1182 // Load lengths of this and argument strings.
1183 __ Ldr(temp0, MemOperand(str, count_offset));
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001184 __ Ldr(temp1, MemOperand(arg, count_offset));
Anton Kirilov5ec62182016-10-13 20:16:02 +01001185 }
1186 // out = length diff.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001187 __ Subs(out, temp0, temp1);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001188 // temp0 = min(len(str), len(arg)).
1189
1190 {
Artem Serov0fb37192016-12-06 18:13:40 +00001191 ExactAssemblyScope aas(assembler->GetVIXLAssembler(),
1192 2 * kMaxInstructionSizeInBytes,
1193 CodeBufferCheckScope::kMaximumSize);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001194
1195 __ it(gt);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001196 __ mov(gt, temp0, temp1);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001197 }
1198
Anton Kirilov5ec62182016-10-13 20:16:02 +01001199 // Shorter string is empty?
xueliang.zhongf51bc622016-11-04 09:23:32 +00001200 // Note that mirror::kUseStringCompression==true introduces lots of instructions,
1201 // which makes &end label far away from this branch and makes it not 'CBZ-encodable'.
1202 __ CompareAndBranchIfZero(temp0, &end, mirror::kUseStringCompression);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001203
1204 if (mirror::kUseStringCompression) {
1205 // Check if both strings using same compression style to use this comparison loop.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001206 __ Eors(temp2, temp2, temp3);
1207 __ Lsrs(temp2, temp2, 1u);
1208 __ B(cs, &different_compression);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001209 // For string compression, calculate the number of bytes to compare (not chars).
1210 // This could in theory exceed INT32_MAX, so treat temp0 as unsigned.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001211 __ Lsls(temp3, temp3, 31u); // Extract purely the compression flag.
Anton Kirilov5ec62182016-10-13 20:16:02 +01001212
Artem Serov0fb37192016-12-06 18:13:40 +00001213 ExactAssemblyScope aas(assembler->GetVIXLAssembler(),
1214 2 * kMaxInstructionSizeInBytes,
1215 CodeBufferCheckScope::kMaximumSize);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001216
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001217 __ it(ne);
1218 __ add(ne, temp0, temp0, temp0);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001219 }
1220
Artem Serov672b9c12017-12-05 18:04:07 +00001221
1222 GenerateStringCompareToLoop(assembler, invoke, &end, &different_compression);
1223
1224 __ Bind(&end);
1225
1226 if (can_slow_path) {
1227 __ Bind(slow_path->GetExitLabel());
1228 }
1229}
1230
1231static void GenerateStringCompareToLoop(ArmVIXLAssembler* assembler,
1232 HInvoke* invoke,
1233 vixl32::Label* end,
1234 vixl32::Label* different_compression) {
1235 LocationSummary* locations = invoke->GetLocations();
1236
1237 const vixl32::Register str = InputRegisterAt(invoke, 0);
1238 const vixl32::Register arg = InputRegisterAt(invoke, 1);
1239 const vixl32::Register out = OutputRegister(invoke);
1240
1241 const vixl32::Register temp0 = RegisterFrom(locations->GetTemp(0));
1242 const vixl32::Register temp1 = RegisterFrom(locations->GetTemp(1));
1243 const vixl32::Register temp2 = RegisterFrom(locations->GetTemp(2));
1244 vixl32::Register temp3;
1245 if (mirror::kUseStringCompression) {
1246 temp3 = RegisterFrom(locations->GetTemp(3));
1247 }
1248
1249 vixl32::Label loop;
1250 vixl32::Label find_char_diff;
1251
1252 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001253 // Store offset of string value in preparation for comparison loop.
1254 __ Mov(temp1, value_offset);
1255
Anton Kirilov5ec62182016-10-13 20:16:02 +01001256 // Assertions that must hold in order to compare multiple characters at a time.
1257 CHECK_ALIGNED(value_offset, 8);
1258 static_assert(IsAligned<8>(kObjectAlignment),
1259 "String data must be 8-byte aligned for unrolled CompareTo loop.");
1260
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001261 const unsigned char_size = DataType::Size(DataType::Type::kUint16);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001262 DCHECK_EQ(char_size, 2u);
1263
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001264 UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
1265
Anton Kirilov5ec62182016-10-13 20:16:02 +01001266 vixl32::Label find_char_diff_2nd_cmp;
1267 // Unrolled loop comparing 4x16-bit chars per iteration (ok because of string data alignment).
1268 __ Bind(&loop);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001269 vixl32::Register temp_reg = temps.Acquire();
Anton Kirilov5ec62182016-10-13 20:16:02 +01001270 __ Ldr(temp_reg, MemOperand(str, temp1));
1271 __ Ldr(temp2, MemOperand(arg, temp1));
1272 __ Cmp(temp_reg, temp2);
Artem Serov517d9f62016-12-12 15:51:15 +00001273 __ B(ne, &find_char_diff, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001274 __ Add(temp1, temp1, char_size * 2);
1275
1276 __ Ldr(temp_reg, MemOperand(str, temp1));
1277 __ Ldr(temp2, MemOperand(arg, temp1));
1278 __ Cmp(temp_reg, temp2);
Artem Serov517d9f62016-12-12 15:51:15 +00001279 __ B(ne, &find_char_diff_2nd_cmp, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001280 __ Add(temp1, temp1, char_size * 2);
1281 // With string compression, we have compared 8 bytes, otherwise 4 chars.
1282 __ Subs(temp0, temp0, (mirror::kUseStringCompression ? 8 : 4));
Artem Serov517d9f62016-12-12 15:51:15 +00001283 __ B(hi, &loop, /* far_target */ false);
Artem Serov672b9c12017-12-05 18:04:07 +00001284 __ B(end);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001285
1286 __ Bind(&find_char_diff_2nd_cmp);
1287 if (mirror::kUseStringCompression) {
1288 __ Subs(temp0, temp0, 4); // 4 bytes previously compared.
Artem Serov672b9c12017-12-05 18:04:07 +00001289 __ B(ls, end, /* far_target */ false); // Was the second comparison fully beyond the end?
Anton Kirilov5ec62182016-10-13 20:16:02 +01001290 } else {
1291 // Without string compression, we can start treating temp0 as signed
1292 // and rely on the signed comparison below.
1293 __ Sub(temp0, temp0, 2);
1294 }
1295
1296 // Find the single character difference.
1297 __ Bind(&find_char_diff);
1298 // Get the bit position of the first character that differs.
1299 __ Eor(temp1, temp2, temp_reg);
1300 __ Rbit(temp1, temp1);
1301 __ Clz(temp1, temp1);
1302
1303 // temp0 = number of characters remaining to compare.
1304 // (Without string compression, it could be < 1 if a difference is found by the second CMP
1305 // in the comparison loop, and after the end of the shorter string data).
1306
1307 // Without string compression (temp1 >> 4) = character where difference occurs between the last
1308 // two words compared, in the interval [0,1].
1309 // (0 for low half-word different, 1 for high half-word different).
1310 // With string compression, (temp1 << 3) = byte where the difference occurs,
1311 // in the interval [0,3].
1312
1313 // If temp0 <= (temp1 >> (kUseStringCompression ? 3 : 4)), the difference occurs outside
1314 // the remaining string data, so just return length diff (out).
1315 // The comparison is unsigned for string compression, otherwise signed.
1316 __ Cmp(temp0, Operand(temp1, vixl32::LSR, (mirror::kUseStringCompression ? 3 : 4)));
Artem Serov672b9c12017-12-05 18:04:07 +00001317 __ B((mirror::kUseStringCompression ? ls : le), end, /* far_target */ false);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001318
Anton Kirilov5ec62182016-10-13 20:16:02 +01001319 // Extract the characters and calculate the difference.
Anton Kirilov5ec62182016-10-13 20:16:02 +01001320 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001321 // For compressed strings we need to clear 0x7 from temp1, for uncompressed we need to clear
1322 // 0xf. We also need to prepare the character extraction mask `uncompressed ? 0xffffu : 0xffu`.
1323 // The compression flag is now in the highest bit of temp3, so let's play some tricks.
Anton Kirilovb88c4842016-11-14 14:37:00 +00001324 __ Orr(temp3, temp3, 0xffu << 23); // uncompressed ? 0xff800000u : 0x7ff80000u
1325 __ Bic(temp1, temp1, Operand(temp3, vixl32::LSR, 31 - 3)); // &= ~(uncompressed ? 0xfu : 0x7u)
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001326 __ Asr(temp3, temp3, 7u); // uncompressed ? 0xffff0000u : 0xff0000u.
1327 __ Lsr(temp2, temp2, temp1); // Extract second character.
1328 __ Lsr(temp3, temp3, 16u); // uncompressed ? 0xffffu : 0xffu
1329 __ Lsr(out, temp_reg, temp1); // Extract first character.
Anton Kirilovb88c4842016-11-14 14:37:00 +00001330 __ And(temp2, temp2, temp3);
1331 __ And(out, out, temp3);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001332 } else {
Anton Kirilovb88c4842016-11-14 14:37:00 +00001333 __ Bic(temp1, temp1, 0xf);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001334 __ Lsr(temp2, temp2, temp1);
1335 __ Lsr(out, temp_reg, temp1);
Anton Kirilovb88c4842016-11-14 14:37:00 +00001336 __ Movt(temp2, 0);
1337 __ Movt(out, 0);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001338 }
Anton Kirilov5ec62182016-10-13 20:16:02 +01001339
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001340 __ Sub(out, out, temp2);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001341 temps.Release(temp_reg);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001342
1343 if (mirror::kUseStringCompression) {
Artem Serov672b9c12017-12-05 18:04:07 +00001344 __ B(end);
1345 __ Bind(different_compression);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001346
1347 // Comparison for different compression style.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001348 const size_t c_char_size = DataType::Size(DataType::Type::kInt8);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001349 DCHECK_EQ(c_char_size, 1u);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001350
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001351 // We want to free up the temp3, currently holding `str.count`, for comparison.
1352 // So, we move it to the bottom bit of the iteration count `temp0` which we tnen
1353 // need to treat as unsigned. Start by freeing the bit with an ADD and continue
1354 // further down by a LSRS+SBC which will flip the meaning of the flag but allow
1355 // `subs temp0, #2; bhi different_compression_loop` to serve as the loop condition.
Anton Kirilovb88c4842016-11-14 14:37:00 +00001356 __ Add(temp0, temp0, temp0); // Unlike LSL, this ADD is always 16-bit.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001357 // `temp1` will hold the compressed data pointer, `temp2` the uncompressed data pointer.
Anton Kirilovb88c4842016-11-14 14:37:00 +00001358 __ Mov(temp1, str);
1359 __ Mov(temp2, arg);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001360 __ Lsrs(temp3, temp3, 1u); // Continue the move of the compression flag.
1361 {
Artem Serov0fb37192016-12-06 18:13:40 +00001362 ExactAssemblyScope aas(assembler->GetVIXLAssembler(),
1363 3 * kMaxInstructionSizeInBytes,
1364 CodeBufferCheckScope::kMaximumSize);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001365 __ itt(cs); // Interleave with selection of temp1 and temp2.
1366 __ mov(cs, temp1, arg); // Preserves flags.
1367 __ mov(cs, temp2, str); // Preserves flags.
1368 }
Anton Kirilovb88c4842016-11-14 14:37:00 +00001369 __ Sbc(temp0, temp0, 0); // Complete the move of the compression flag.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001370
1371 // Adjust temp1 and temp2 from string pointers to data pointers.
Anton Kirilovb88c4842016-11-14 14:37:00 +00001372 __ Add(temp1, temp1, value_offset);
1373 __ Add(temp2, temp2, value_offset);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001374
1375 vixl32::Label different_compression_loop;
1376 vixl32::Label different_compression_diff;
1377
1378 // Main loop for different compression.
Anton Kirilov5ec62182016-10-13 20:16:02 +01001379 temp_reg = temps.Acquire();
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001380 __ Bind(&different_compression_loop);
1381 __ Ldrb(temp_reg, MemOperand(temp1, c_char_size, PostIndex));
1382 __ Ldrh(temp3, MemOperand(temp2, char_size, PostIndex));
Anton Kirilovb88c4842016-11-14 14:37:00 +00001383 __ Cmp(temp_reg, temp3);
Artem Serov517d9f62016-12-12 15:51:15 +00001384 __ B(ne, &different_compression_diff, /* far_target */ false);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001385 __ Subs(temp0, temp0, 2);
Artem Serov517d9f62016-12-12 15:51:15 +00001386 __ B(hi, &different_compression_loop, /* far_target */ false);
Artem Serov672b9c12017-12-05 18:04:07 +00001387 __ B(end);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001388
1389 // Calculate the difference.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001390 __ Bind(&different_compression_diff);
1391 __ Sub(out, temp_reg, temp3);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001392 temps.Release(temp_reg);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001393 // Flip the difference if the `arg` is compressed.
1394 // `temp0` contains inverted `str` compression flag, i.e the same as `arg` compression flag.
1395 __ Lsrs(temp0, temp0, 1u);
1396 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1397 "Expecting 0=compressed, 1=uncompressed");
1398
Artem Serov0fb37192016-12-06 18:13:40 +00001399 ExactAssemblyScope aas(assembler->GetVIXLAssembler(),
1400 2 * kMaxInstructionSizeInBytes,
1401 CodeBufferCheckScope::kMaximumSize);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001402 __ it(cc);
1403 __ rsb(cc, out, out, 0);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001404 }
Anton Kirilov5ec62182016-10-13 20:16:02 +01001405}
1406
Vladimir Marko984519c2017-08-23 10:45:29 +01001407// The cut off for unrolling the loop in String.equals() intrinsic for const strings.
1408// The normal loop plus the pre-header is 9 instructions (18-26 bytes) without string compression
1409// and 12 instructions (24-32 bytes) with string compression. We can compare up to 4 bytes in 4
1410// instructions (LDR+LDR+CMP+BNE) and up to 8 bytes in 6 instructions (LDRD+LDRD+CMP+BNE+CMP+BNE).
1411// Allow up to 12 instructions (32 bytes) for the unrolled loop.
1412constexpr size_t kShortConstStringEqualsCutoffInBytes = 16;
1413
1414static const char* GetConstString(HInstruction* candidate, uint32_t* utf16_length) {
1415 if (candidate->IsLoadString()) {
1416 HLoadString* load_string = candidate->AsLoadString();
1417 const DexFile& dex_file = load_string->GetDexFile();
1418 return dex_file.StringDataAndUtf16LengthByIdx(load_string->GetStringIndex(), utf16_length);
1419 }
1420 return nullptr;
1421}
1422
Anton Kirilov5ec62182016-10-13 20:16:02 +01001423void IntrinsicLocationsBuilderARMVIXL::VisitStringEquals(HInvoke* invoke) {
Vladimir Markoda283052017-11-07 21:17:24 +00001424 if (kEmitCompilerReadBarrier &&
1425 !StringEqualsOptimizations(invoke).GetArgumentIsString() &&
1426 !StringEqualsOptimizations(invoke).GetNoReadBarrierForStringClass()) {
1427 // No support for this odd case (String class is moveable, not in the boot image).
1428 return;
1429 }
1430
Vladimir Markoca6fff82017-10-03 14:49:14 +01001431 LocationSummary* locations =
1432 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001433 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1434 locations->SetInAt(0, Location::RequiresRegister());
1435 locations->SetInAt(1, Location::RequiresRegister());
Vladimir Marko984519c2017-08-23 10:45:29 +01001436
Anton Kirilov5ec62182016-10-13 20:16:02 +01001437 // Temporary registers to store lengths of strings and for calculations.
1438 // Using instruction cbz requires a low register, so explicitly set a temp to be R0.
1439 locations->AddTemp(LocationFrom(r0));
Anton Kirilov5ec62182016-10-13 20:16:02 +01001440
Vladimir Marko984519c2017-08-23 10:45:29 +01001441 // For the generic implementation and for long const strings we need an extra temporary.
1442 // We do not need it for short const strings, up to 4 bytes, see code generation below.
1443 uint32_t const_string_length = 0u;
1444 const char* const_string = GetConstString(invoke->InputAt(0), &const_string_length);
1445 if (const_string == nullptr) {
1446 const_string = GetConstString(invoke->InputAt(1), &const_string_length);
1447 }
1448 bool is_compressed =
1449 mirror::kUseStringCompression &&
1450 const_string != nullptr &&
1451 mirror::String::DexFileStringAllASCII(const_string, const_string_length);
1452 if (const_string == nullptr || const_string_length > (is_compressed ? 4u : 2u)) {
1453 locations->AddTemp(Location::RequiresRegister());
1454 }
1455
1456 // TODO: If the String.equals() is used only for an immediately following HIf, we can
1457 // mark it as emitted-at-use-site and emit branches directly to the appropriate blocks.
1458 // Then we shall need an extra temporary register instead of the output register.
Anton Kirilov5ec62182016-10-13 20:16:02 +01001459 locations->SetOut(Location::RequiresRegister());
1460}
1461
1462void IntrinsicCodeGeneratorARMVIXL::VisitStringEquals(HInvoke* invoke) {
1463 ArmVIXLAssembler* assembler = GetAssembler();
1464 LocationSummary* locations = invoke->GetLocations();
1465
1466 vixl32::Register str = InputRegisterAt(invoke, 0);
1467 vixl32::Register arg = InputRegisterAt(invoke, 1);
1468 vixl32::Register out = OutputRegister(invoke);
1469
1470 vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
Anton Kirilov5ec62182016-10-13 20:16:02 +01001471
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001472 vixl32::Label loop;
Anton Kirilov5ec62182016-10-13 20:16:02 +01001473 vixl32::Label end;
1474 vixl32::Label return_true;
1475 vixl32::Label return_false;
Anton Kirilov6f644202017-02-27 18:29:45 +00001476 vixl32::Label* final_label = codegen_->GetFinalLabel(invoke, &end);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001477
1478 // Get offsets of count, value, and class fields within a string object.
1479 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1480 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1481 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1482
1483 // Note that the null check must have been done earlier.
1484 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1485
1486 StringEqualsOptimizations optimizations(invoke);
1487 if (!optimizations.GetArgumentNotNull()) {
1488 // Check if input is null, return false if it is.
xueliang.zhongf51bc622016-11-04 09:23:32 +00001489 __ CompareAndBranchIfZero(arg, &return_false, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001490 }
1491
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001492 // Reference equality check, return true if same reference.
1493 __ Cmp(str, arg);
Artem Serov517d9f62016-12-12 15:51:15 +00001494 __ B(eq, &return_true, /* far_target */ false);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001495
Anton Kirilov5ec62182016-10-13 20:16:02 +01001496 if (!optimizations.GetArgumentIsString()) {
1497 // Instanceof check for the argument by comparing class fields.
1498 // All string objects must have the same type since String cannot be subclassed.
1499 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1500 // If the argument is a string object, its class field must be equal to receiver's class field.
1501 __ Ldr(temp, MemOperand(str, class_offset));
Vladimir Marko984519c2017-08-23 10:45:29 +01001502 __ Ldr(out, MemOperand(arg, class_offset));
1503 __ Cmp(temp, out);
Artem Serov517d9f62016-12-12 15:51:15 +00001504 __ B(ne, &return_false, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001505 }
1506
Vladimir Marko984519c2017-08-23 10:45:29 +01001507 // Check if one of the inputs is a const string. Do not special-case both strings
1508 // being const, such cases should be handled by constant folding if needed.
1509 uint32_t const_string_length = 0u;
1510 const char* const_string = GetConstString(invoke->InputAt(0), &const_string_length);
1511 if (const_string == nullptr) {
1512 const_string = GetConstString(invoke->InputAt(1), &const_string_length);
1513 if (const_string != nullptr) {
1514 std::swap(str, arg); // Make sure the const string is in `str`.
1515 }
1516 }
1517 bool is_compressed =
1518 mirror::kUseStringCompression &&
1519 const_string != nullptr &&
1520 mirror::String::DexFileStringAllASCII(const_string, const_string_length);
1521
1522 if (const_string != nullptr) {
1523 // Load `count` field of the argument string and check if it matches the const string.
1524 // Also compares the compression style, if differs return false.
1525 __ Ldr(temp, MemOperand(arg, count_offset));
1526 __ Cmp(temp, Operand(mirror::String::GetFlaggedCount(const_string_length, is_compressed)));
1527 __ B(ne, &return_false, /* far_target */ false);
1528 } else {
1529 // Load `count` fields of this and argument strings.
1530 __ Ldr(temp, MemOperand(str, count_offset));
1531 __ Ldr(out, MemOperand(arg, count_offset));
1532 // Check if `count` fields are equal, return false if they're not.
1533 // Also compares the compression style, if differs return false.
1534 __ Cmp(temp, out);
1535 __ B(ne, &return_false, /* far_target */ false);
1536 }
Anton Kirilov5ec62182016-10-13 20:16:02 +01001537
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001538 // Assertions that must hold in order to compare strings 4 bytes at a time.
Vladimir Marko984519c2017-08-23 10:45:29 +01001539 // Ok to do this because strings are zero-padded to kObjectAlignment.
Anton Kirilov5ec62182016-10-13 20:16:02 +01001540 DCHECK_ALIGNED(value_offset, 4);
1541 static_assert(IsAligned<4>(kObjectAlignment), "String data must be aligned for fast compare.");
1542
Vladimir Marko984519c2017-08-23 10:45:29 +01001543 if (const_string != nullptr &&
1544 const_string_length <= (is_compressed ? kShortConstStringEqualsCutoffInBytes
1545 : kShortConstStringEqualsCutoffInBytes / 2u)) {
1546 // Load and compare the contents. Though we know the contents of the short const string
1547 // at compile time, materializing constants may be more code than loading from memory.
1548 int32_t offset = value_offset;
1549 size_t remaining_bytes =
1550 RoundUp(is_compressed ? const_string_length : const_string_length * 2u, 4u);
1551 while (remaining_bytes > sizeof(uint32_t)) {
1552 vixl32::Register temp1 = RegisterFrom(locations->GetTemp(1));
1553 UseScratchRegisterScope scratch_scope(assembler->GetVIXLAssembler());
1554 vixl32::Register temp2 = scratch_scope.Acquire();
1555 __ Ldrd(temp, temp1, MemOperand(str, offset));
1556 __ Ldrd(temp2, out, MemOperand(arg, offset));
1557 __ Cmp(temp, temp2);
1558 __ B(ne, &return_false, /* far_label */ false);
1559 __ Cmp(temp1, out);
1560 __ B(ne, &return_false, /* far_label */ false);
1561 offset += 2u * sizeof(uint32_t);
1562 remaining_bytes -= 2u * sizeof(uint32_t);
1563 }
1564 if (remaining_bytes != 0u) {
1565 __ Ldr(temp, MemOperand(str, offset));
1566 __ Ldr(out, MemOperand(arg, offset));
1567 __ Cmp(temp, out);
1568 __ B(ne, &return_false, /* far_label */ false);
1569 }
1570 } else {
1571 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1572 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1573 "Expecting 0=compressed, 1=uncompressed");
1574 __ CompareAndBranchIfZero(temp, &return_true, /* far_target */ false);
1575
1576 if (mirror::kUseStringCompression) {
1577 // For string compression, calculate the number of bytes to compare (not chars).
1578 // This could in theory exceed INT32_MAX, so treat temp as unsigned.
1579 __ Lsrs(temp, temp, 1u); // Extract length and check compression flag.
1580 ExactAssemblyScope aas(assembler->GetVIXLAssembler(),
1581 2 * kMaxInstructionSizeInBytes,
1582 CodeBufferCheckScope::kMaximumSize);
1583 __ it(cs); // If uncompressed,
1584 __ add(cs, temp, temp, temp); // double the byte count.
1585 }
1586
1587 vixl32::Register temp1 = RegisterFrom(locations->GetTemp(1));
1588 UseScratchRegisterScope scratch_scope(assembler->GetVIXLAssembler());
1589 vixl32::Register temp2 = scratch_scope.Acquire();
1590
1591 // Store offset of string value in preparation for comparison loop.
1592 __ Mov(temp1, value_offset);
1593
1594 // Loop to compare strings 4 bytes at a time starting at the front of the string.
1595 __ Bind(&loop);
1596 __ Ldr(out, MemOperand(str, temp1));
1597 __ Ldr(temp2, MemOperand(arg, temp1));
1598 __ Add(temp1, temp1, Operand::From(sizeof(uint32_t)));
1599 __ Cmp(out, temp2);
1600 __ B(ne, &return_false, /* far_target */ false);
1601 // With string compression, we have compared 4 bytes, otherwise 2 chars.
1602 __ Subs(temp, temp, mirror::kUseStringCompression ? 4 : 2);
1603 __ B(hi, &loop, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001604 }
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001605
Anton Kirilov5ec62182016-10-13 20:16:02 +01001606 // Return true and exit the function.
1607 // If loop does not result in returning false, we return true.
1608 __ Bind(&return_true);
1609 __ Mov(out, 1);
Anton Kirilov6f644202017-02-27 18:29:45 +00001610 __ B(final_label);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001611
1612 // Return false and exit the function.
1613 __ Bind(&return_false);
1614 __ Mov(out, 0);
Anton Kirilov6f644202017-02-27 18:29:45 +00001615
1616 if (end.IsReferenced()) {
1617 __ Bind(&end);
1618 }
Anton Kirilov5ec62182016-10-13 20:16:02 +01001619}
1620
1621static void GenerateVisitStringIndexOf(HInvoke* invoke,
1622 ArmVIXLAssembler* assembler,
1623 CodeGeneratorARMVIXL* codegen,
Anton Kirilov5ec62182016-10-13 20:16:02 +01001624 bool start_at_zero) {
1625 LocationSummary* locations = invoke->GetLocations();
1626
1627 // Note that the null check must have been done earlier.
1628 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1629
1630 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1631 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
1632 SlowPathCodeARMVIXL* slow_path = nullptr;
1633 HInstruction* code_point = invoke->InputAt(1);
1634 if (code_point->IsIntConstant()) {
Anton Kirilov644032c2016-12-06 17:51:43 +00001635 if (static_cast<uint32_t>(Int32ConstantFrom(code_point)) >
Anton Kirilov5ec62182016-10-13 20:16:02 +01001636 std::numeric_limits<uint16_t>::max()) {
1637 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1638 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
Vladimir Marko174b2e22017-10-12 13:34:49 +01001639 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathARMVIXL(invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001640 codegen->AddSlowPath(slow_path);
1641 __ B(slow_path->GetEntryLabel());
1642 __ Bind(slow_path->GetExitLabel());
1643 return;
1644 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001645 } else if (code_point->GetType() != DataType::Type::kUint16) {
Anton Kirilov5ec62182016-10-13 20:16:02 +01001646 vixl32::Register char_reg = InputRegisterAt(invoke, 1);
1647 // 0xffff is not modified immediate but 0x10000 is, so use `>= 0x10000` instead of `> 0xffff`.
1648 __ Cmp(char_reg, static_cast<uint32_t>(std::numeric_limits<uint16_t>::max()) + 1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01001649 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathARMVIXL(invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001650 codegen->AddSlowPath(slow_path);
1651 __ B(hs, slow_path->GetEntryLabel());
1652 }
1653
1654 if (start_at_zero) {
1655 vixl32::Register tmp_reg = RegisterFrom(locations->GetTemp(0));
1656 DCHECK(tmp_reg.Is(r2));
1657 // Start-index = 0.
1658 __ Mov(tmp_reg, 0);
1659 }
1660
1661 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
1662 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
1663
1664 if (slow_path != nullptr) {
1665 __ Bind(slow_path->GetExitLabel());
1666 }
1667}
1668
1669void IntrinsicLocationsBuilderARMVIXL::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001670 LocationSummary* locations = new (allocator_) LocationSummary(
1671 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001672 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1673 // best to align the inputs accordingly.
1674 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1675 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1676 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1677 locations->SetOut(LocationFrom(r0));
1678
1679 // Need to send start-index=0.
1680 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1681}
1682
1683void IntrinsicCodeGeneratorARMVIXL::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001684 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero */ true);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001685}
1686
1687void IntrinsicLocationsBuilderARMVIXL::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001688 LocationSummary* locations = new (allocator_) LocationSummary(
1689 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001690 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1691 // best to align the inputs accordingly.
1692 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1693 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1694 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1695 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1696 locations->SetOut(LocationFrom(r0));
1697}
1698
1699void IntrinsicCodeGeneratorARMVIXL::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001700 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, /* start_at_zero */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001701}
1702
1703void IntrinsicLocationsBuilderARMVIXL::VisitStringNewStringFromBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001704 LocationSummary* locations = new (allocator_) LocationSummary(
1705 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001706 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1707 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1708 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1709 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1710 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1711 locations->SetOut(LocationFrom(r0));
1712}
1713
1714void IntrinsicCodeGeneratorARMVIXL::VisitStringNewStringFromBytes(HInvoke* invoke) {
1715 ArmVIXLAssembler* assembler = GetAssembler();
1716 vixl32::Register byte_array = InputRegisterAt(invoke, 0);
1717 __ Cmp(byte_array, 0);
Vladimir Marko174b2e22017-10-12 13:34:49 +01001718 SlowPathCodeARMVIXL* slow_path =
1719 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathARMVIXL(invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001720 codegen_->AddSlowPath(slow_path);
1721 __ B(eq, slow_path->GetEntryLabel());
1722
1723 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
1724 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
1725 __ Bind(slow_path->GetExitLabel());
1726}
1727
1728void IntrinsicLocationsBuilderARMVIXL::VisitStringNewStringFromChars(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001729 LocationSummary* locations =
1730 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001731 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1732 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1733 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1734 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1735 locations->SetOut(LocationFrom(r0));
1736}
1737
1738void IntrinsicCodeGeneratorARMVIXL::VisitStringNewStringFromChars(HInvoke* invoke) {
1739 // No need to emit code checking whether `locations->InAt(2)` is a null
1740 // pointer, as callers of the native method
1741 //
1742 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1743 //
1744 // all include a null check on `data` before calling that method.
1745 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
1746 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
1747}
1748
1749void IntrinsicLocationsBuilderARMVIXL::VisitStringNewStringFromString(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001750 LocationSummary* locations = new (allocator_) LocationSummary(
1751 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001752 InvokeRuntimeCallingConventionARMVIXL calling_convention;
1753 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1754 locations->SetOut(LocationFrom(r0));
1755}
1756
1757void IntrinsicCodeGeneratorARMVIXL::VisitStringNewStringFromString(HInvoke* invoke) {
1758 ArmVIXLAssembler* assembler = GetAssembler();
1759 vixl32::Register string_to_copy = InputRegisterAt(invoke, 0);
1760 __ Cmp(string_to_copy, 0);
Vladimir Marko174b2e22017-10-12 13:34:49 +01001761 SlowPathCodeARMVIXL* slow_path =
1762 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathARMVIXL(invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001763 codegen_->AddSlowPath(slow_path);
1764 __ B(eq, slow_path->GetEntryLabel());
1765
1766 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
1767 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
1768
1769 __ Bind(slow_path->GetExitLabel());
1770}
1771
1772void IntrinsicLocationsBuilderARMVIXL::VisitSystemArrayCopy(HInvoke* invoke) {
1773 // The only read barrier implementation supporting the
1774 // SystemArrayCopy intrinsic is the Baker-style read barriers.
1775 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
1776 return;
1777 }
1778
1779 CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
1780 LocationSummary* locations = invoke->GetLocations();
1781 if (locations == nullptr) {
1782 return;
1783 }
1784
1785 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1786 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1787 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1788
1789 if (src_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(src_pos->GetValue())) {
1790 locations->SetInAt(1, Location::RequiresRegister());
1791 }
1792 if (dest_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(dest_pos->GetValue())) {
1793 locations->SetInAt(3, Location::RequiresRegister());
1794 }
1795 if (length != nullptr && !assembler_->ShifterOperandCanAlwaysHold(length->GetValue())) {
1796 locations->SetInAt(4, Location::RequiresRegister());
1797 }
1798 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1799 // Temporary register IP cannot be used in
1800 // ReadBarrierSystemArrayCopySlowPathARM (because that register
1801 // is clobbered by ReadBarrierMarkRegX entry points). Get an extra
1802 // temporary register from the register allocator.
1803 locations->AddTemp(Location::RequiresRegister());
1804 }
1805}
1806
1807static void CheckPosition(ArmVIXLAssembler* assembler,
1808 Location pos,
1809 vixl32::Register input,
1810 Location length,
1811 SlowPathCodeARMVIXL* slow_path,
1812 vixl32::Register temp,
1813 bool length_is_input_length = false) {
1814 // Where is the length in the Array?
1815 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1816
1817 if (pos.IsConstant()) {
1818 int32_t pos_const = Int32ConstantFrom(pos);
1819 if (pos_const == 0) {
1820 if (!length_is_input_length) {
1821 // Check that length(input) >= length.
1822 __ Ldr(temp, MemOperand(input, length_offset));
1823 if (length.IsConstant()) {
1824 __ Cmp(temp, Int32ConstantFrom(length));
1825 } else {
1826 __ Cmp(temp, RegisterFrom(length));
1827 }
1828 __ B(lt, slow_path->GetEntryLabel());
1829 }
1830 } else {
1831 // Check that length(input) >= pos.
1832 __ Ldr(temp, MemOperand(input, length_offset));
1833 __ Subs(temp, temp, pos_const);
1834 __ B(lt, slow_path->GetEntryLabel());
1835
1836 // Check that (length(input) - pos) >= length.
1837 if (length.IsConstant()) {
1838 __ Cmp(temp, Int32ConstantFrom(length));
1839 } else {
1840 __ Cmp(temp, RegisterFrom(length));
1841 }
1842 __ B(lt, slow_path->GetEntryLabel());
1843 }
1844 } else if (length_is_input_length) {
1845 // The only way the copy can succeed is if pos is zero.
1846 vixl32::Register pos_reg = RegisterFrom(pos);
xueliang.zhongf51bc622016-11-04 09:23:32 +00001847 __ CompareAndBranchIfNonZero(pos_reg, slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01001848 } else {
1849 // Check that pos >= 0.
1850 vixl32::Register pos_reg = RegisterFrom(pos);
1851 __ Cmp(pos_reg, 0);
1852 __ B(lt, slow_path->GetEntryLabel());
1853
1854 // Check that pos <= length(input).
1855 __ Ldr(temp, MemOperand(input, length_offset));
1856 __ Subs(temp, temp, pos_reg);
1857 __ B(lt, slow_path->GetEntryLabel());
1858
1859 // Check that (length(input) - pos) >= length.
1860 if (length.IsConstant()) {
1861 __ Cmp(temp, Int32ConstantFrom(length));
1862 } else {
1863 __ Cmp(temp, RegisterFrom(length));
1864 }
1865 __ B(lt, slow_path->GetEntryLabel());
1866 }
1867}
1868
1869void IntrinsicCodeGeneratorARMVIXL::VisitSystemArrayCopy(HInvoke* invoke) {
1870 // The only read barrier implementation supporting the
1871 // SystemArrayCopy intrinsic is the Baker-style read barriers.
1872 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1873
1874 ArmVIXLAssembler* assembler = GetAssembler();
1875 LocationSummary* locations = invoke->GetLocations();
1876
1877 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1878 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
1879 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
1880 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
1881 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
1882
1883 vixl32::Register src = InputRegisterAt(invoke, 0);
1884 Location src_pos = locations->InAt(1);
1885 vixl32::Register dest = InputRegisterAt(invoke, 2);
1886 Location dest_pos = locations->InAt(3);
1887 Location length = locations->InAt(4);
1888 Location temp1_loc = locations->GetTemp(0);
1889 vixl32::Register temp1 = RegisterFrom(temp1_loc);
1890 Location temp2_loc = locations->GetTemp(1);
1891 vixl32::Register temp2 = RegisterFrom(temp2_loc);
1892 Location temp3_loc = locations->GetTemp(2);
1893 vixl32::Register temp3 = RegisterFrom(temp3_loc);
1894
Vladimir Marko174b2e22017-10-12 13:34:49 +01001895 SlowPathCodeARMVIXL* intrinsic_slow_path =
1896 new (codegen_->GetScopedAllocator()) IntrinsicSlowPathARMVIXL(invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001897 codegen_->AddSlowPath(intrinsic_slow_path);
1898
1899 vixl32::Label conditions_on_positions_validated;
1900 SystemArrayCopyOptimizations optimizations(invoke);
1901
1902 // If source and destination are the same, we go to slow path if we need to do
1903 // forward copying.
1904 if (src_pos.IsConstant()) {
1905 int32_t src_pos_constant = Int32ConstantFrom(src_pos);
1906 if (dest_pos.IsConstant()) {
1907 int32_t dest_pos_constant = Int32ConstantFrom(dest_pos);
1908 if (optimizations.GetDestinationIsSource()) {
1909 // Checked when building locations.
1910 DCHECK_GE(src_pos_constant, dest_pos_constant);
1911 } else if (src_pos_constant < dest_pos_constant) {
1912 __ Cmp(src, dest);
1913 __ B(eq, intrinsic_slow_path->GetEntryLabel());
1914 }
1915
1916 // Checked when building locations.
1917 DCHECK(!optimizations.GetDestinationIsSource()
1918 || (src_pos_constant >= Int32ConstantFrom(dest_pos)));
1919 } else {
1920 if (!optimizations.GetDestinationIsSource()) {
1921 __ Cmp(src, dest);
Artem Serov517d9f62016-12-12 15:51:15 +00001922 __ B(ne, &conditions_on_positions_validated, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001923 }
1924 __ Cmp(RegisterFrom(dest_pos), src_pos_constant);
1925 __ B(gt, intrinsic_slow_path->GetEntryLabel());
1926 }
1927 } else {
1928 if (!optimizations.GetDestinationIsSource()) {
1929 __ Cmp(src, dest);
Artem Serov517d9f62016-12-12 15:51:15 +00001930 __ B(ne, &conditions_on_positions_validated, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01001931 }
1932 if (dest_pos.IsConstant()) {
1933 int32_t dest_pos_constant = Int32ConstantFrom(dest_pos);
1934 __ Cmp(RegisterFrom(src_pos), dest_pos_constant);
1935 } else {
1936 __ Cmp(RegisterFrom(src_pos), RegisterFrom(dest_pos));
1937 }
1938 __ B(lt, intrinsic_slow_path->GetEntryLabel());
1939 }
1940
1941 __ Bind(&conditions_on_positions_validated);
1942
1943 if (!optimizations.GetSourceIsNotNull()) {
1944 // Bail out if the source is null.
xueliang.zhongf51bc622016-11-04 09:23:32 +00001945 __ CompareAndBranchIfZero(src, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01001946 }
1947
1948 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
1949 // Bail out if the destination is null.
xueliang.zhongf51bc622016-11-04 09:23:32 +00001950 __ CompareAndBranchIfZero(dest, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01001951 }
1952
1953 // If the length is negative, bail out.
1954 // We have already checked in the LocationsBuilder for the constant case.
1955 if (!length.IsConstant() &&
1956 !optimizations.GetCountIsSourceLength() &&
1957 !optimizations.GetCountIsDestinationLength()) {
1958 __ Cmp(RegisterFrom(length), 0);
1959 __ B(lt, intrinsic_slow_path->GetEntryLabel());
1960 }
1961
1962 // Validity checks: source.
1963 CheckPosition(assembler,
1964 src_pos,
1965 src,
1966 length,
1967 intrinsic_slow_path,
1968 temp1,
1969 optimizations.GetCountIsSourceLength());
1970
1971 // Validity checks: dest.
1972 CheckPosition(assembler,
1973 dest_pos,
1974 dest,
1975 length,
1976 intrinsic_slow_path,
1977 temp1,
1978 optimizations.GetCountIsDestinationLength());
1979
1980 if (!optimizations.GetDoesNotNeedTypeCheck()) {
1981 // Check whether all elements of the source array are assignable to the component
1982 // type of the destination array. We do two checks: the classes are the same,
1983 // or the destination is Object[]. If none of these checks succeed, we go to the
1984 // slow path.
1985
1986 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1987 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1988 // /* HeapReference<Class> */ temp1 = src->klass_
1989 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1990 invoke, temp1_loc, src, class_offset, temp2_loc, /* needs_null_check */ false);
1991 // Bail out if the source is not a non primitive array.
1992 // /* HeapReference<Class> */ temp1 = temp1->component_type_
1993 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1994 invoke, temp1_loc, temp1, component_offset, temp2_loc, /* needs_null_check */ false);
xueliang.zhongf51bc622016-11-04 09:23:32 +00001995 __ CompareAndBranchIfZero(temp1, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01001996 // If heap poisoning is enabled, `temp1` has been unpoisoned
1997 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
1998 // /* uint16_t */ temp1 = static_cast<uint16>(temp1->primitive_type_);
1999 __ Ldrh(temp1, MemOperand(temp1, primitive_offset));
2000 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
xueliang.zhongf51bc622016-11-04 09:23:32 +00002001 __ CompareAndBranchIfNonZero(temp1, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002002 }
2003
2004 // /* HeapReference<Class> */ temp1 = dest->klass_
2005 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2006 invoke, temp1_loc, dest, class_offset, temp2_loc, /* needs_null_check */ false);
2007
2008 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
2009 // Bail out if the destination is not a non primitive array.
2010 //
2011 // Register `temp1` is not trashed by the read barrier emitted
2012 // by GenerateFieldLoadWithBakerReadBarrier below, as that
2013 // method produces a call to a ReadBarrierMarkRegX entry point,
2014 // which saves all potentially live registers, including
2015 // temporaries such a `temp1`.
2016 // /* HeapReference<Class> */ temp2 = temp1->component_type_
2017 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2018 invoke, temp2_loc, temp1, component_offset, temp3_loc, /* needs_null_check */ false);
xueliang.zhongf51bc622016-11-04 09:23:32 +00002019 __ CompareAndBranchIfZero(temp2, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002020 // If heap poisoning is enabled, `temp2` has been unpoisoned
2021 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2022 // /* uint16_t */ temp2 = static_cast<uint16>(temp2->primitive_type_);
2023 __ Ldrh(temp2, MemOperand(temp2, primitive_offset));
2024 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
xueliang.zhongf51bc622016-11-04 09:23:32 +00002025 __ CompareAndBranchIfNonZero(temp2, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002026 }
2027
2028 // For the same reason given earlier, `temp1` is not trashed by the
2029 // read barrier emitted by GenerateFieldLoadWithBakerReadBarrier below.
2030 // /* HeapReference<Class> */ temp2 = src->klass_
2031 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2032 invoke, temp2_loc, src, class_offset, temp3_loc, /* needs_null_check */ false);
2033 // Note: if heap poisoning is on, we are comparing two unpoisoned references here.
2034 __ Cmp(temp1, temp2);
2035
2036 if (optimizations.GetDestinationIsTypedObjectArray()) {
2037 vixl32::Label do_copy;
Artem Serov517d9f62016-12-12 15:51:15 +00002038 __ B(eq, &do_copy, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002039 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2040 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2041 invoke, temp1_loc, temp1, component_offset, temp2_loc, /* needs_null_check */ false);
2042 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2043 // We do not need to emit a read barrier for the following
2044 // heap reference load, as `temp1` is only used in a
2045 // comparison with null below, and this reference is not
2046 // kept afterwards.
2047 __ Ldr(temp1, MemOperand(temp1, super_offset));
xueliang.zhongf51bc622016-11-04 09:23:32 +00002048 __ CompareAndBranchIfNonZero(temp1, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002049 __ Bind(&do_copy);
2050 } else {
2051 __ B(ne, intrinsic_slow_path->GetEntryLabel());
2052 }
2053 } else {
2054 // Non read barrier code.
2055
2056 // /* HeapReference<Class> */ temp1 = dest->klass_
2057 __ Ldr(temp1, MemOperand(dest, class_offset));
2058 // /* HeapReference<Class> */ temp2 = src->klass_
2059 __ Ldr(temp2, MemOperand(src, class_offset));
2060 bool did_unpoison = false;
2061 if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
2062 !optimizations.GetSourceIsNonPrimitiveArray()) {
2063 // One or two of the references need to be unpoisoned. Unpoison them
2064 // both to make the identity check valid.
2065 assembler->MaybeUnpoisonHeapReference(temp1);
2066 assembler->MaybeUnpoisonHeapReference(temp2);
2067 did_unpoison = true;
2068 }
2069
2070 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
2071 // Bail out if the destination is not a non primitive array.
2072 // /* HeapReference<Class> */ temp3 = temp1->component_type_
2073 __ Ldr(temp3, MemOperand(temp1, component_offset));
xueliang.zhongf51bc622016-11-04 09:23:32 +00002074 __ CompareAndBranchIfZero(temp3, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002075 assembler->MaybeUnpoisonHeapReference(temp3);
2076 // /* uint16_t */ temp3 = static_cast<uint16>(temp3->primitive_type_);
2077 __ Ldrh(temp3, MemOperand(temp3, primitive_offset));
2078 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
xueliang.zhongf51bc622016-11-04 09:23:32 +00002079 __ CompareAndBranchIfNonZero(temp3, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002080 }
2081
2082 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
2083 // Bail out if the source is not a non primitive array.
2084 // /* HeapReference<Class> */ temp3 = temp2->component_type_
2085 __ Ldr(temp3, MemOperand(temp2, component_offset));
xueliang.zhongf51bc622016-11-04 09:23:32 +00002086 __ CompareAndBranchIfZero(temp3, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002087 assembler->MaybeUnpoisonHeapReference(temp3);
2088 // /* uint16_t */ temp3 = static_cast<uint16>(temp3->primitive_type_);
2089 __ Ldrh(temp3, MemOperand(temp3, primitive_offset));
2090 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
xueliang.zhongf51bc622016-11-04 09:23:32 +00002091 __ CompareAndBranchIfNonZero(temp3, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002092 }
2093
2094 __ Cmp(temp1, temp2);
2095
2096 if (optimizations.GetDestinationIsTypedObjectArray()) {
2097 vixl32::Label do_copy;
Artem Serov517d9f62016-12-12 15:51:15 +00002098 __ B(eq, &do_copy, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002099 if (!did_unpoison) {
2100 assembler->MaybeUnpoisonHeapReference(temp1);
2101 }
2102 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2103 __ Ldr(temp1, MemOperand(temp1, component_offset));
2104 assembler->MaybeUnpoisonHeapReference(temp1);
2105 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2106 __ Ldr(temp1, MemOperand(temp1, super_offset));
2107 // No need to unpoison the result, we're comparing against null.
xueliang.zhongf51bc622016-11-04 09:23:32 +00002108 __ CompareAndBranchIfNonZero(temp1, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002109 __ Bind(&do_copy);
2110 } else {
2111 __ B(ne, intrinsic_slow_path->GetEntryLabel());
2112 }
2113 }
2114 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
2115 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
2116 // Bail out if the source is not a non primitive array.
2117 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2118 // /* HeapReference<Class> */ temp1 = src->klass_
2119 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2120 invoke, temp1_loc, src, class_offset, temp2_loc, /* needs_null_check */ false);
2121 // /* HeapReference<Class> */ temp3 = temp1->component_type_
2122 codegen_->GenerateFieldLoadWithBakerReadBarrier(
2123 invoke, temp3_loc, temp1, component_offset, temp2_loc, /* needs_null_check */ false);
xueliang.zhongf51bc622016-11-04 09:23:32 +00002124 __ CompareAndBranchIfZero(temp3, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002125 // If heap poisoning is enabled, `temp3` has been unpoisoned
2126 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2127 } else {
2128 // /* HeapReference<Class> */ temp1 = src->klass_
2129 __ Ldr(temp1, MemOperand(src, class_offset));
2130 assembler->MaybeUnpoisonHeapReference(temp1);
2131 // /* HeapReference<Class> */ temp3 = temp1->component_type_
2132 __ Ldr(temp3, MemOperand(temp1, component_offset));
xueliang.zhongf51bc622016-11-04 09:23:32 +00002133 __ CompareAndBranchIfZero(temp3, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002134 assembler->MaybeUnpoisonHeapReference(temp3);
2135 }
2136 // /* uint16_t */ temp3 = static_cast<uint16>(temp3->primitive_type_);
2137 __ Ldrh(temp3, MemOperand(temp3, primitive_offset));
2138 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
xueliang.zhongf51bc622016-11-04 09:23:32 +00002139 __ CompareAndBranchIfNonZero(temp3, intrinsic_slow_path->GetEntryLabel());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002140 }
2141
Roland Levillain1663d162017-03-17 15:15:21 +00002142 if (length.IsConstant() && Int32ConstantFrom(length) == 0) {
2143 // Null constant length: not need to emit the loop code at all.
Anton Kirilov5ec62182016-10-13 20:16:02 +01002144 } else {
Roland Levillain1663d162017-03-17 15:15:21 +00002145 vixl32::Label done;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002146 const DataType::Type type = DataType::Type::kReference;
2147 const int32_t element_size = DataType::Size(type);
Roland Levillain1663d162017-03-17 15:15:21 +00002148
2149 if (length.IsRegister()) {
2150 // Don't enter the copy loop if the length is null.
2151 __ CompareAndBranchIfZero(RegisterFrom(length), &done, /* is_far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002152 }
Roland Levillain1663d162017-03-17 15:15:21 +00002153
2154 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2155 // TODO: Also convert this intrinsic to the IsGcMarking strategy?
2156
2157 // SystemArrayCopy implementation for Baker read barriers (see
Roland Levillain9983e302017-07-14 14:34:22 +01002158 // also CodeGeneratorARMVIXL::GenerateReferenceLoadWithBakerReadBarrier):
Roland Levillain1663d162017-03-17 15:15:21 +00002159 //
2160 // uint32_t rb_state = Lockword(src->monitor_).ReadBarrierState();
2161 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
2162 // bool is_gray = (rb_state == ReadBarrier::GrayState());
2163 // if (is_gray) {
2164 // // Slow-path copy.
2165 // do {
2166 // *dest_ptr++ = MaybePoison(ReadBarrier::Mark(MaybeUnpoison(*src_ptr++)));
2167 // } while (src_ptr != end_ptr)
2168 // } else {
2169 // // Fast-path copy.
2170 // do {
2171 // *dest_ptr++ = *src_ptr++;
2172 // } while (src_ptr != end_ptr)
2173 // }
2174
2175 // /* int32_t */ monitor = src->monitor_
2176 __ Ldr(temp2, MemOperand(src, monitor_offset));
2177 // /* LockWord */ lock_word = LockWord(monitor)
2178 static_assert(sizeof(LockWord) == sizeof(int32_t),
2179 "art::LockWord and int32_t have different sizes.");
2180
2181 // Introduce a dependency on the lock_word including the rb_state,
2182 // which shall prevent load-load reordering without using
2183 // a memory barrier (which would be more expensive).
2184 // `src` is unchanged by this operation, but its value now depends
2185 // on `temp2`.
2186 __ Add(src, src, Operand(temp2, vixl32::LSR, 32));
2187
2188 // Compute the base source address in `temp1`.
2189 // Note that `temp1` (the base source address) is computed from
2190 // `src` (and `src_pos`) here, and thus honors the artificial
2191 // dependency of `src` on `temp2`.
2192 GenSystemArrayCopyBaseAddress(GetAssembler(), type, src, src_pos, temp1);
2193 // Compute the end source address in `temp3`.
2194 GenSystemArrayCopyEndAddress(GetAssembler(), type, length, temp1, temp3);
2195 // The base destination address is computed later, as `temp2` is
2196 // used for intermediate computations.
2197
2198 // Slow path used to copy array when `src` is gray.
2199 // Note that the base destination address is computed in `temp2`
2200 // by the slow path code.
2201 SlowPathCodeARMVIXL* read_barrier_slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002202 new (codegen_->GetScopedAllocator()) ReadBarrierSystemArrayCopySlowPathARMVIXL(invoke);
Roland Levillain1663d162017-03-17 15:15:21 +00002203 codegen_->AddSlowPath(read_barrier_slow_path);
2204
2205 // Given the numeric representation, it's enough to check the low bit of the
2206 // rb_state. We do that by shifting the bit out of the lock word with LSRS
2207 // which can be a 16-bit instruction unlike the TST immediate.
2208 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
2209 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
2210 __ Lsrs(temp2, temp2, LockWord::kReadBarrierStateShift + 1);
2211 // Carry flag is the last bit shifted out by LSRS.
2212 __ B(cs, read_barrier_slow_path->GetEntryLabel());
2213
2214 // Fast-path copy.
2215 // Compute the base destination address in `temp2`.
2216 GenSystemArrayCopyBaseAddress(GetAssembler(), type, dest, dest_pos, temp2);
2217 // Iterate over the arrays and do a raw copy of the objects. We don't need to
2218 // poison/unpoison.
2219 vixl32::Label loop;
2220 __ Bind(&loop);
2221 {
2222 UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
2223 const vixl32::Register temp_reg = temps.Acquire();
2224 __ Ldr(temp_reg, MemOperand(temp1, element_size, PostIndex));
2225 __ Str(temp_reg, MemOperand(temp2, element_size, PostIndex));
2226 }
2227 __ Cmp(temp1, temp3);
2228 __ B(ne, &loop, /* far_target */ false);
2229
2230 __ Bind(read_barrier_slow_path->GetExitLabel());
2231 } else {
2232 // Non read barrier code.
2233 // Compute the base source address in `temp1`.
2234 GenSystemArrayCopyBaseAddress(GetAssembler(), type, src, src_pos, temp1);
2235 // Compute the base destination address in `temp2`.
2236 GenSystemArrayCopyBaseAddress(GetAssembler(), type, dest, dest_pos, temp2);
2237 // Compute the end source address in `temp3`.
2238 GenSystemArrayCopyEndAddress(GetAssembler(), type, length, temp1, temp3);
2239 // Iterate over the arrays and do a raw copy of the objects. We don't need to
2240 // poison/unpoison.
2241 vixl32::Label loop;
2242 __ Bind(&loop);
2243 {
2244 UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
2245 const vixl32::Register temp_reg = temps.Acquire();
2246 __ Ldr(temp_reg, MemOperand(temp1, element_size, PostIndex));
2247 __ Str(temp_reg, MemOperand(temp2, element_size, PostIndex));
2248 }
2249 __ Cmp(temp1, temp3);
2250 __ B(ne, &loop, /* far_target */ false);
2251 }
Anton Kirilov5ec62182016-10-13 20:16:02 +01002252 __ Bind(&done);
2253 }
2254
2255 // We only need one card marking on the destination array.
2256 codegen_->MarkGCCard(temp1, temp2, dest, NoReg, /* value_can_be_null */ false);
2257
2258 __ Bind(intrinsic_slow_path->GetExitLabel());
2259}
2260
Vladimir Markoca6fff82017-10-03 14:49:14 +01002261static void CreateFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
Anton Kirilov5ec62182016-10-13 20:16:02 +01002262 // If the graph is debuggable, all callee-saved floating-point registers are blocked by
2263 // the code generator. Furthermore, the register allocator creates fixed live intervals
2264 // for all caller-saved registers because we are doing a function call. As a result, if
2265 // the input and output locations are unallocated, the register allocator runs out of
2266 // registers and fails; however, a debuggable graph is not the common case.
2267 if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
2268 return;
2269 }
2270
2271 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002272 DCHECK_EQ(invoke->InputAt(0)->GetType(), DataType::Type::kFloat64);
2273 DCHECK_EQ(invoke->GetType(), DataType::Type::kFloat64);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002274
Vladimir Markoca6fff82017-10-03 14:49:14 +01002275 LocationSummary* const locations =
2276 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002277 const InvokeRuntimeCallingConventionARMVIXL calling_convention;
2278
2279 locations->SetInAt(0, Location::RequiresFpuRegister());
2280 locations->SetOut(Location::RequiresFpuRegister());
2281 // Native code uses the soft float ABI.
2282 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2283 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2284}
2285
Vladimir Markoca6fff82017-10-03 14:49:14 +01002286static void CreateFPFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
Anton Kirilov5ec62182016-10-13 20:16:02 +01002287 // If the graph is debuggable, all callee-saved floating-point registers are blocked by
2288 // the code generator. Furthermore, the register allocator creates fixed live intervals
2289 // for all caller-saved registers because we are doing a function call. As a result, if
2290 // the input and output locations are unallocated, the register allocator runs out of
2291 // registers and fails; however, a debuggable graph is not the common case.
2292 if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
2293 return;
2294 }
2295
2296 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002297 DCHECK_EQ(invoke->InputAt(0)->GetType(), DataType::Type::kFloat64);
2298 DCHECK_EQ(invoke->InputAt(1)->GetType(), DataType::Type::kFloat64);
2299 DCHECK_EQ(invoke->GetType(), DataType::Type::kFloat64);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002300
Vladimir Markoca6fff82017-10-03 14:49:14 +01002301 LocationSummary* const locations =
2302 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002303 const InvokeRuntimeCallingConventionARMVIXL calling_convention;
2304
2305 locations->SetInAt(0, Location::RequiresFpuRegister());
2306 locations->SetInAt(1, Location::RequiresFpuRegister());
2307 locations->SetOut(Location::RequiresFpuRegister());
2308 // Native code uses the soft float ABI.
2309 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(0)));
2310 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(1)));
2311 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
2312 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(3)));
2313}
2314
2315static void GenFPToFPCall(HInvoke* invoke,
2316 ArmVIXLAssembler* assembler,
2317 CodeGeneratorARMVIXL* codegen,
2318 QuickEntrypointEnum entry) {
2319 LocationSummary* const locations = invoke->GetLocations();
2320
2321 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
2322 DCHECK(locations->WillCall() && locations->Intrinsified());
2323
2324 // Native code uses the soft float ABI.
2325 __ Vmov(RegisterFrom(locations->GetTemp(0)),
2326 RegisterFrom(locations->GetTemp(1)),
2327 InputDRegisterAt(invoke, 0));
2328 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2329 __ Vmov(OutputDRegister(invoke),
2330 RegisterFrom(locations->GetTemp(0)),
2331 RegisterFrom(locations->GetTemp(1)));
2332}
2333
2334static void GenFPFPToFPCall(HInvoke* invoke,
2335 ArmVIXLAssembler* assembler,
2336 CodeGeneratorARMVIXL* codegen,
2337 QuickEntrypointEnum entry) {
2338 LocationSummary* const locations = invoke->GetLocations();
2339
2340 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
2341 DCHECK(locations->WillCall() && locations->Intrinsified());
2342
2343 // Native code uses the soft float ABI.
2344 __ Vmov(RegisterFrom(locations->GetTemp(0)),
2345 RegisterFrom(locations->GetTemp(1)),
2346 InputDRegisterAt(invoke, 0));
2347 __ Vmov(RegisterFrom(locations->GetTemp(2)),
2348 RegisterFrom(locations->GetTemp(3)),
2349 InputDRegisterAt(invoke, 1));
2350 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2351 __ Vmov(OutputDRegister(invoke),
2352 RegisterFrom(locations->GetTemp(0)),
2353 RegisterFrom(locations->GetTemp(1)));
2354}
2355
2356void IntrinsicLocationsBuilderARMVIXL::VisitMathCos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002357 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002358}
2359
2360void IntrinsicCodeGeneratorARMVIXL::VisitMathCos(HInvoke* invoke) {
2361 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCos);
2362}
2363
2364void IntrinsicLocationsBuilderARMVIXL::VisitMathSin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002365 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002366}
2367
2368void IntrinsicCodeGeneratorARMVIXL::VisitMathSin(HInvoke* invoke) {
2369 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSin);
2370}
2371
2372void IntrinsicLocationsBuilderARMVIXL::VisitMathAcos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002373 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002374}
2375
2376void IntrinsicCodeGeneratorARMVIXL::VisitMathAcos(HInvoke* invoke) {
2377 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAcos);
2378}
2379
2380void IntrinsicLocationsBuilderARMVIXL::VisitMathAsin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002381 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002382}
2383
2384void IntrinsicCodeGeneratorARMVIXL::VisitMathAsin(HInvoke* invoke) {
2385 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAsin);
2386}
2387
2388void IntrinsicLocationsBuilderARMVIXL::VisitMathAtan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002389 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002390}
2391
2392void IntrinsicCodeGeneratorARMVIXL::VisitMathAtan(HInvoke* invoke) {
2393 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan);
2394}
2395
2396void IntrinsicLocationsBuilderARMVIXL::VisitMathCbrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002397 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002398}
2399
2400void IntrinsicCodeGeneratorARMVIXL::VisitMathCbrt(HInvoke* invoke) {
2401 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCbrt);
2402}
2403
2404void IntrinsicLocationsBuilderARMVIXL::VisitMathCosh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002405 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002406}
2407
2408void IntrinsicCodeGeneratorARMVIXL::VisitMathCosh(HInvoke* invoke) {
2409 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCosh);
2410}
2411
2412void IntrinsicLocationsBuilderARMVIXL::VisitMathExp(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002413 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002414}
2415
2416void IntrinsicCodeGeneratorARMVIXL::VisitMathExp(HInvoke* invoke) {
2417 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExp);
2418}
2419
2420void IntrinsicLocationsBuilderARMVIXL::VisitMathExpm1(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002421 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002422}
2423
2424void IntrinsicCodeGeneratorARMVIXL::VisitMathExpm1(HInvoke* invoke) {
2425 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExpm1);
2426}
2427
2428void IntrinsicLocationsBuilderARMVIXL::VisitMathLog(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002429 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002430}
2431
2432void IntrinsicCodeGeneratorARMVIXL::VisitMathLog(HInvoke* invoke) {
2433 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog);
2434}
2435
2436void IntrinsicLocationsBuilderARMVIXL::VisitMathLog10(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002437 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002438}
2439
2440void IntrinsicCodeGeneratorARMVIXL::VisitMathLog10(HInvoke* invoke) {
2441 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog10);
2442}
2443
2444void IntrinsicLocationsBuilderARMVIXL::VisitMathSinh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002445 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002446}
2447
2448void IntrinsicCodeGeneratorARMVIXL::VisitMathSinh(HInvoke* invoke) {
2449 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSinh);
2450}
2451
2452void IntrinsicLocationsBuilderARMVIXL::VisitMathTan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002453 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002454}
2455
2456void IntrinsicCodeGeneratorARMVIXL::VisitMathTan(HInvoke* invoke) {
2457 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTan);
2458}
2459
2460void IntrinsicLocationsBuilderARMVIXL::VisitMathTanh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002461 CreateFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002462}
2463
2464void IntrinsicCodeGeneratorARMVIXL::VisitMathTanh(HInvoke* invoke) {
2465 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTanh);
2466}
2467
2468void IntrinsicLocationsBuilderARMVIXL::VisitMathAtan2(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002469 CreateFPFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002470}
2471
2472void IntrinsicCodeGeneratorARMVIXL::VisitMathAtan2(HInvoke* invoke) {
2473 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan2);
2474}
2475
Vladimir Marko4d179872018-01-19 14:50:10 +00002476void IntrinsicLocationsBuilderARMVIXL::VisitMathPow(HInvoke* invoke) {
2477 CreateFPFPToFPCallLocations(allocator_, invoke);
2478}
2479
2480void IntrinsicCodeGeneratorARMVIXL::VisitMathPow(HInvoke* invoke) {
2481 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickPow);
2482}
2483
Anton Kirilov5ec62182016-10-13 20:16:02 +01002484void IntrinsicLocationsBuilderARMVIXL::VisitMathHypot(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002485 CreateFPFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002486}
2487
2488void IntrinsicCodeGeneratorARMVIXL::VisitMathHypot(HInvoke* invoke) {
2489 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickHypot);
2490}
2491
2492void IntrinsicLocationsBuilderARMVIXL::VisitMathNextAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002493 CreateFPFPToFPCallLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002494}
2495
2496void IntrinsicCodeGeneratorARMVIXL::VisitMathNextAfter(HInvoke* invoke) {
2497 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickNextAfter);
2498}
2499
2500void IntrinsicLocationsBuilderARMVIXL::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002501 CreateIntToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002502}
2503
2504void IntrinsicCodeGeneratorARMVIXL::VisitIntegerReverse(HInvoke* invoke) {
2505 ArmVIXLAssembler* assembler = GetAssembler();
2506 __ Rbit(OutputRegister(invoke), InputRegisterAt(invoke, 0));
2507}
2508
2509void IntrinsicLocationsBuilderARMVIXL::VisitLongReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002510 CreateLongToLongLocationsWithOverlap(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002511}
2512
2513void IntrinsicCodeGeneratorARMVIXL::VisitLongReverse(HInvoke* invoke) {
2514 ArmVIXLAssembler* assembler = GetAssembler();
2515 LocationSummary* locations = invoke->GetLocations();
2516
2517 vixl32::Register in_reg_lo = LowRegisterFrom(locations->InAt(0));
2518 vixl32::Register in_reg_hi = HighRegisterFrom(locations->InAt(0));
2519 vixl32::Register out_reg_lo = LowRegisterFrom(locations->Out());
2520 vixl32::Register out_reg_hi = HighRegisterFrom(locations->Out());
2521
2522 __ Rbit(out_reg_lo, in_reg_hi);
2523 __ Rbit(out_reg_hi, in_reg_lo);
2524}
2525
2526void IntrinsicLocationsBuilderARMVIXL::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002527 CreateIntToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002528}
2529
2530void IntrinsicCodeGeneratorARMVIXL::VisitIntegerReverseBytes(HInvoke* invoke) {
2531 ArmVIXLAssembler* assembler = GetAssembler();
2532 __ Rev(OutputRegister(invoke), InputRegisterAt(invoke, 0));
2533}
2534
2535void IntrinsicLocationsBuilderARMVIXL::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002536 CreateLongToLongLocationsWithOverlap(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002537}
2538
2539void IntrinsicCodeGeneratorARMVIXL::VisitLongReverseBytes(HInvoke* invoke) {
2540 ArmVIXLAssembler* assembler = GetAssembler();
2541 LocationSummary* locations = invoke->GetLocations();
2542
2543 vixl32::Register in_reg_lo = LowRegisterFrom(locations->InAt(0));
2544 vixl32::Register in_reg_hi = HighRegisterFrom(locations->InAt(0));
2545 vixl32::Register out_reg_lo = LowRegisterFrom(locations->Out());
2546 vixl32::Register out_reg_hi = HighRegisterFrom(locations->Out());
2547
2548 __ Rev(out_reg_lo, in_reg_hi);
2549 __ Rev(out_reg_hi, in_reg_lo);
2550}
2551
2552void IntrinsicLocationsBuilderARMVIXL::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002553 CreateIntToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002554}
2555
2556void IntrinsicCodeGeneratorARMVIXL::VisitShortReverseBytes(HInvoke* invoke) {
2557 ArmVIXLAssembler* assembler = GetAssembler();
2558 __ Revsh(OutputRegister(invoke), InputRegisterAt(invoke, 0));
2559}
2560
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002561static void GenBitCount(HInvoke* instr, DataType::Type type, ArmVIXLAssembler* assembler) {
2562 DCHECK(DataType::IsIntOrLongType(type)) << type;
2563 DCHECK_EQ(instr->GetType(), DataType::Type::kInt32);
2564 DCHECK_EQ(DataType::Kind(instr->InputAt(0)->GetType()), type);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002565
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002566 bool is_long = type == DataType::Type::kInt64;
Anton Kirilov5ec62182016-10-13 20:16:02 +01002567 LocationSummary* locations = instr->GetLocations();
2568 Location in = locations->InAt(0);
2569 vixl32::Register src_0 = is_long ? LowRegisterFrom(in) : RegisterFrom(in);
2570 vixl32::Register src_1 = is_long ? HighRegisterFrom(in) : src_0;
2571 vixl32::SRegister tmp_s = LowSRegisterFrom(locations->GetTemp(0));
2572 vixl32::DRegister tmp_d = DRegisterFrom(locations->GetTemp(0));
2573 vixl32::Register out_r = OutputRegister(instr);
2574
2575 // Move data from core register(s) to temp D-reg for bit count calculation, then move back.
2576 // According to Cortex A57 and A72 optimization guides, compared to transferring to full D-reg,
2577 // transferring data from core reg to upper or lower half of vfp D-reg requires extra latency,
2578 // That's why for integer bit count, we use 'vmov d0, r0, r0' instead of 'vmov d0[0], r0'.
2579 __ Vmov(tmp_d, src_1, src_0); // Temp DReg |--src_1|--src_0|
2580 __ Vcnt(Untyped8, tmp_d, tmp_d); // Temp DReg |c|c|c|c|c|c|c|c|
2581 __ Vpaddl(U8, tmp_d, tmp_d); // Temp DReg |--c|--c|--c|--c|
2582 __ Vpaddl(U16, tmp_d, tmp_d); // Temp DReg |------c|------c|
2583 if (is_long) {
2584 __ Vpaddl(U32, tmp_d, tmp_d); // Temp DReg |--------------c|
2585 }
2586 __ Vmov(out_r, tmp_s);
2587}
2588
2589void IntrinsicLocationsBuilderARMVIXL::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002590 CreateIntToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002591 invoke->GetLocations()->AddTemp(Location::RequiresFpuRegister());
2592}
2593
2594void IntrinsicCodeGeneratorARMVIXL::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002595 GenBitCount(invoke, DataType::Type::kInt32, GetAssembler());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002596}
2597
2598void IntrinsicLocationsBuilderARMVIXL::VisitLongBitCount(HInvoke* invoke) {
2599 VisitIntegerBitCount(invoke);
2600}
2601
2602void IntrinsicCodeGeneratorARMVIXL::VisitLongBitCount(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002603 GenBitCount(invoke, DataType::Type::kInt64, GetAssembler());
Anton Kirilov5ec62182016-10-13 20:16:02 +01002604}
2605
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002606static void GenHighestOneBit(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002607 DataType::Type type,
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002608 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002609 DCHECK(DataType::IsIntOrLongType(type));
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002610
2611 ArmVIXLAssembler* assembler = codegen->GetAssembler();
2612 UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
2613 const vixl32::Register temp = temps.Acquire();
2614
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002615 if (type == DataType::Type::kInt64) {
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002616 LocationSummary* locations = invoke->GetLocations();
2617 Location in = locations->InAt(0);
2618 Location out = locations->Out();
2619
2620 vixl32::Register in_reg_lo = LowRegisterFrom(in);
2621 vixl32::Register in_reg_hi = HighRegisterFrom(in);
2622 vixl32::Register out_reg_lo = LowRegisterFrom(out);
2623 vixl32::Register out_reg_hi = HighRegisterFrom(out);
2624
2625 __ Mov(temp, 0x80000000); // Modified immediate.
2626 __ Clz(out_reg_lo, in_reg_lo);
2627 __ Clz(out_reg_hi, in_reg_hi);
2628 __ Lsr(out_reg_lo, temp, out_reg_lo);
2629 __ Lsrs(out_reg_hi, temp, out_reg_hi);
2630
2631 // Discard result for lowest 32 bits if highest 32 bits are not zero.
2632 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
2633 // we check that the output is in a low register, so that a 16-bit MOV
2634 // encoding can be used. If output is in a high register, then we generate
2635 // 4 more bytes of code to avoid a branch.
2636 Operand mov_src(0);
2637 if (!out_reg_lo.IsLow()) {
2638 __ Mov(LeaveFlags, temp, 0);
2639 mov_src = Operand(temp);
2640 }
2641 ExactAssemblyScope it_scope(codegen->GetVIXLAssembler(),
2642 2 * vixl32::k16BitT32InstructionSizeInBytes,
2643 CodeBufferCheckScope::kExactSize);
2644 __ it(ne);
2645 __ mov(ne, out_reg_lo, mov_src);
2646 } else {
2647 vixl32::Register out = OutputRegister(invoke);
2648 vixl32::Register in = InputRegisterAt(invoke, 0);
2649
2650 __ Mov(temp, 0x80000000); // Modified immediate.
2651 __ Clz(out, in);
2652 __ Lsr(out, temp, out);
2653 }
2654}
2655
2656void IntrinsicLocationsBuilderARMVIXL::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002657 CreateIntToIntLocations(allocator_, invoke);
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002658}
2659
2660void IntrinsicCodeGeneratorARMVIXL::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002661 GenHighestOneBit(invoke, DataType::Type::kInt32, codegen_);
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002662}
2663
2664void IntrinsicLocationsBuilderARMVIXL::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002665 CreateLongToLongLocationsWithOverlap(allocator_, invoke);
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002666}
2667
2668void IntrinsicCodeGeneratorARMVIXL::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002669 GenHighestOneBit(invoke, DataType::Type::kInt64, codegen_);
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002670}
2671
2672static void GenLowestOneBit(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002673 DataType::Type type,
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002674 CodeGeneratorARMVIXL* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002675 DCHECK(DataType::IsIntOrLongType(type));
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002676
2677 ArmVIXLAssembler* assembler = codegen->GetAssembler();
2678 UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
2679 const vixl32::Register temp = temps.Acquire();
2680
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002681 if (type == DataType::Type::kInt64) {
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002682 LocationSummary* locations = invoke->GetLocations();
2683 Location in = locations->InAt(0);
2684 Location out = locations->Out();
2685
2686 vixl32::Register in_reg_lo = LowRegisterFrom(in);
2687 vixl32::Register in_reg_hi = HighRegisterFrom(in);
2688 vixl32::Register out_reg_lo = LowRegisterFrom(out);
2689 vixl32::Register out_reg_hi = HighRegisterFrom(out);
2690
2691 __ Rsb(out_reg_hi, in_reg_hi, 0);
2692 __ Rsb(out_reg_lo, in_reg_lo, 0);
2693 __ And(out_reg_hi, out_reg_hi, in_reg_hi);
2694 // The result of this operation is 0 iff in_reg_lo is 0
2695 __ Ands(out_reg_lo, out_reg_lo, in_reg_lo);
2696
2697 // Discard result for highest 32 bits if lowest 32 bits are not zero.
2698 // Since IT blocks longer than a 16-bit instruction are deprecated by ARMv8,
2699 // we check that the output is in a low register, so that a 16-bit MOV
2700 // encoding can be used. If output is in a high register, then we generate
2701 // 4 more bytes of code to avoid a branch.
2702 Operand mov_src(0);
2703 if (!out_reg_lo.IsLow()) {
2704 __ Mov(LeaveFlags, temp, 0);
2705 mov_src = Operand(temp);
2706 }
2707 ExactAssemblyScope it_scope(codegen->GetVIXLAssembler(),
2708 2 * vixl32::k16BitT32InstructionSizeInBytes,
2709 CodeBufferCheckScope::kExactSize);
2710 __ it(ne);
2711 __ mov(ne, out_reg_hi, mov_src);
2712 } else {
2713 vixl32::Register out = OutputRegister(invoke);
2714 vixl32::Register in = InputRegisterAt(invoke, 0);
2715
2716 __ Rsb(temp, in, 0);
2717 __ And(out, temp, in);
2718 }
2719}
2720
2721void IntrinsicLocationsBuilderARMVIXL::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002722 CreateIntToIntLocations(allocator_, invoke);
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002723}
2724
2725void IntrinsicCodeGeneratorARMVIXL::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002726 GenLowestOneBit(invoke, DataType::Type::kInt32, codegen_);
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002727}
2728
2729void IntrinsicLocationsBuilderARMVIXL::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002730 CreateLongToLongLocationsWithOverlap(allocator_, invoke);
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002731}
2732
2733void IntrinsicCodeGeneratorARMVIXL::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002734 GenLowestOneBit(invoke, DataType::Type::kInt64, codegen_);
Petre-Ionut Tudor27292e62017-08-04 16:06:45 +01002735}
2736
Anton Kirilov5ec62182016-10-13 20:16:02 +01002737void IntrinsicLocationsBuilderARMVIXL::VisitStringGetCharsNoCheck(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002738 LocationSummary* locations =
2739 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002740 locations->SetInAt(0, Location::RequiresRegister());
2741 locations->SetInAt(1, Location::RequiresRegister());
2742 locations->SetInAt(2, Location::RequiresRegister());
2743 locations->SetInAt(3, Location::RequiresRegister());
2744 locations->SetInAt(4, Location::RequiresRegister());
2745
2746 // Temporary registers to store lengths of strings and for calculations.
2747 locations->AddTemp(Location::RequiresRegister());
2748 locations->AddTemp(Location::RequiresRegister());
2749 locations->AddTemp(Location::RequiresRegister());
2750}
2751
2752void IntrinsicCodeGeneratorARMVIXL::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2753 ArmVIXLAssembler* assembler = GetAssembler();
2754 LocationSummary* locations = invoke->GetLocations();
2755
2756 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002757 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002758 DCHECK_EQ(char_size, 2u);
2759
2760 // Location of data in char array buffer.
2761 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2762
2763 // Location of char array data in string.
2764 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
2765
2766 // void getCharsNoCheck(int srcBegin, int srcEnd, char[] dst, int dstBegin);
2767 // Since getChars() calls getCharsNoCheck() - we use registers rather than constants.
2768 vixl32::Register srcObj = InputRegisterAt(invoke, 0);
2769 vixl32::Register srcBegin = InputRegisterAt(invoke, 1);
2770 vixl32::Register srcEnd = InputRegisterAt(invoke, 2);
2771 vixl32::Register dstObj = InputRegisterAt(invoke, 3);
2772 vixl32::Register dstBegin = InputRegisterAt(invoke, 4);
2773
2774 vixl32::Register num_chr = RegisterFrom(locations->GetTemp(0));
2775 vixl32::Register src_ptr = RegisterFrom(locations->GetTemp(1));
2776 vixl32::Register dst_ptr = RegisterFrom(locations->GetTemp(2));
2777
2778 vixl32::Label done, compressed_string_loop;
Anton Kirilov6f644202017-02-27 18:29:45 +00002779 vixl32::Label* final_label = codegen_->GetFinalLabel(invoke, &done);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002780 // dst to be copied.
2781 __ Add(dst_ptr, dstObj, data_offset);
2782 __ Add(dst_ptr, dst_ptr, Operand(dstBegin, vixl32::LSL, 1));
2783
2784 __ Subs(num_chr, srcEnd, srcBegin);
2785 // Early out for valid zero-length retrievals.
Anton Kirilov6f644202017-02-27 18:29:45 +00002786 __ B(eq, final_label, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002787
2788 // src range to copy.
2789 __ Add(src_ptr, srcObj, value_offset);
2790
2791 UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
2792 vixl32::Register temp;
2793 vixl32::Label compressed_string_preloop;
2794 if (mirror::kUseStringCompression) {
2795 // Location of count in string.
2796 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2797 temp = temps.Acquire();
2798 // String's length.
2799 __ Ldr(temp, MemOperand(srcObj, count_offset));
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002800 __ Tst(temp, 1);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002801 temps.Release(temp);
Artem Serov517d9f62016-12-12 15:51:15 +00002802 __ B(eq, &compressed_string_preloop, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002803 }
2804 __ Add(src_ptr, src_ptr, Operand(srcBegin, vixl32::LSL, 1));
2805
2806 // Do the copy.
2807 vixl32::Label loop, remainder;
2808
2809 temp = temps.Acquire();
2810 // Save repairing the value of num_chr on the < 4 character path.
2811 __ Subs(temp, num_chr, 4);
Artem Serov517d9f62016-12-12 15:51:15 +00002812 __ B(lt, &remainder, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002813
2814 // Keep the result of the earlier subs, we are going to fetch at least 4 characters.
2815 __ Mov(num_chr, temp);
2816
2817 // Main loop used for longer fetches loads and stores 4x16-bit characters at a time.
2818 // (LDRD/STRD fault on unaligned addresses and it's not worth inlining extra code
2819 // to rectify these everywhere this intrinsic applies.)
2820 __ Bind(&loop);
2821 __ Ldr(temp, MemOperand(src_ptr, char_size * 2));
2822 __ Subs(num_chr, num_chr, 4);
2823 __ Str(temp, MemOperand(dst_ptr, char_size * 2));
2824 __ Ldr(temp, MemOperand(src_ptr, char_size * 4, PostIndex));
2825 __ Str(temp, MemOperand(dst_ptr, char_size * 4, PostIndex));
2826 temps.Release(temp);
Artem Serov517d9f62016-12-12 15:51:15 +00002827 __ B(ge, &loop, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002828
2829 __ Adds(num_chr, num_chr, 4);
Anton Kirilov6f644202017-02-27 18:29:45 +00002830 __ B(eq, final_label, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002831
2832 // Main loop for < 4 character case and remainder handling. Loads and stores one
2833 // 16-bit Java character at a time.
2834 __ Bind(&remainder);
2835 temp = temps.Acquire();
2836 __ Ldrh(temp, MemOperand(src_ptr, char_size, PostIndex));
2837 __ Subs(num_chr, num_chr, 1);
2838 __ Strh(temp, MemOperand(dst_ptr, char_size, PostIndex));
2839 temps.Release(temp);
Artem Serov517d9f62016-12-12 15:51:15 +00002840 __ B(gt, &remainder, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002841
2842 if (mirror::kUseStringCompression) {
Anton Kirilov6f644202017-02-27 18:29:45 +00002843 __ B(final_label);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002844
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002845 const size_t c_char_size = DataType::Size(DataType::Type::kInt8);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002846 DCHECK_EQ(c_char_size, 1u);
2847 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
2848 __ Bind(&compressed_string_preloop);
2849 __ Add(src_ptr, src_ptr, srcBegin);
2850 __ Bind(&compressed_string_loop);
2851 temp = temps.Acquire();
2852 __ Ldrb(temp, MemOperand(src_ptr, c_char_size, PostIndex));
2853 __ Strh(temp, MemOperand(dst_ptr, char_size, PostIndex));
2854 temps.Release(temp);
2855 __ Subs(num_chr, num_chr, 1);
Artem Serov517d9f62016-12-12 15:51:15 +00002856 __ B(gt, &compressed_string_loop, /* far_target */ false);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002857 }
2858
Anton Kirilov6f644202017-02-27 18:29:45 +00002859 if (done.IsReferenced()) {
2860 __ Bind(&done);
2861 }
Anton Kirilov5ec62182016-10-13 20:16:02 +01002862}
2863
2864void IntrinsicLocationsBuilderARMVIXL::VisitFloatIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002865 CreateFPToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002866}
2867
2868void IntrinsicCodeGeneratorARMVIXL::VisitFloatIsInfinite(HInvoke* invoke) {
2869 ArmVIXLAssembler* const assembler = GetAssembler();
2870 const vixl32::Register out = OutputRegister(invoke);
2871 // Shifting left by 1 bit makes the value encodable as an immediate operand;
2872 // we don't care about the sign bit anyway.
2873 constexpr uint32_t infinity = kPositiveInfinityFloat << 1U;
2874
2875 __ Vmov(out, InputSRegisterAt(invoke, 0));
2876 // We don't care about the sign bit, so shift left.
2877 __ Lsl(out, out, 1);
2878 __ Eor(out, out, infinity);
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002879 codegen_->GenerateConditionWithZero(kCondEQ, out, out);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002880}
2881
2882void IntrinsicLocationsBuilderARMVIXL::VisitDoubleIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002883 CreateFPToIntLocations(allocator_, invoke);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002884}
2885
2886void IntrinsicCodeGeneratorARMVIXL::VisitDoubleIsInfinite(HInvoke* invoke) {
2887 ArmVIXLAssembler* const assembler = GetAssembler();
2888 const vixl32::Register out = OutputRegister(invoke);
2889 UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
2890 const vixl32::Register temp = temps.Acquire();
2891 // The highest 32 bits of double precision positive infinity separated into
2892 // two constants encodable as immediate operands.
2893 constexpr uint32_t infinity_high = 0x7f000000U;
2894 constexpr uint32_t infinity_high2 = 0x00f00000U;
2895
2896 static_assert((infinity_high | infinity_high2) ==
2897 static_cast<uint32_t>(kPositiveInfinityDouble >> 32U),
2898 "The constants do not add up to the high 32 bits of double "
2899 "precision positive infinity.");
2900 __ Vmov(temp, out, InputDRegisterAt(invoke, 0));
2901 __ Eor(out, out, infinity_high);
2902 __ Eor(out, out, infinity_high2);
2903 // We don't care about the sign bit, so shift left.
2904 __ Orr(out, temp, Operand(out, vixl32::LSL, 1));
Anton Kirilov5601d4e2017-05-11 19:33:50 +01002905 codegen_->GenerateConditionWithZero(kCondEQ, out, out);
Anton Kirilov5ec62182016-10-13 20:16:02 +01002906}
2907
Artem Serov9aee2d42017-01-06 15:58:31 +00002908void IntrinsicLocationsBuilderARMVIXL::VisitMathCeil(HInvoke* invoke) {
2909 if (features_.HasARMv8AInstructions()) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002910 CreateFPToFPLocations(allocator_, invoke);
Artem Serov9aee2d42017-01-06 15:58:31 +00002911 }
2912}
2913
2914void IntrinsicCodeGeneratorARMVIXL::VisitMathCeil(HInvoke* invoke) {
2915 ArmVIXLAssembler* assembler = GetAssembler();
2916 DCHECK(codegen_->GetInstructionSetFeatures().HasARMv8AInstructions());
2917 __ Vrintp(F64, F64, OutputDRegister(invoke), InputDRegisterAt(invoke, 0));
2918}
2919
2920void IntrinsicLocationsBuilderARMVIXL::VisitMathFloor(HInvoke* invoke) {
2921 if (features_.HasARMv8AInstructions()) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002922 CreateFPToFPLocations(allocator_, invoke);
Artem Serov9aee2d42017-01-06 15:58:31 +00002923 }
2924}
2925
2926void IntrinsicCodeGeneratorARMVIXL::VisitMathFloor(HInvoke* invoke) {
2927 ArmVIXLAssembler* assembler = GetAssembler();
2928 DCHECK(codegen_->GetInstructionSetFeatures().HasARMv8AInstructions());
2929 __ Vrintm(F64, F64, OutputDRegister(invoke), InputDRegisterAt(invoke, 0));
2930}
2931
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002932void IntrinsicLocationsBuilderARMVIXL::VisitIntegerValueOf(HInvoke* invoke) {
2933 InvokeRuntimeCallingConventionARMVIXL calling_convention;
2934 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2935 invoke,
2936 codegen_,
2937 LocationFrom(r0),
2938 LocationFrom(calling_convention.GetRegisterAt(0)));
2939}
2940
2941void IntrinsicCodeGeneratorARMVIXL::VisitIntegerValueOf(HInvoke* invoke) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01002942 IntrinsicVisitor::IntegerValueOfInfo info =
2943 IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke, codegen_->GetCompilerOptions());
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002944 LocationSummary* locations = invoke->GetLocations();
2945 ArmVIXLAssembler* const assembler = GetAssembler();
2946
2947 vixl32::Register out = RegisterFrom(locations->Out());
2948 UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
2949 vixl32::Register temp = temps.Acquire();
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002950 if (invoke->InputAt(0)->IsConstant()) {
2951 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
Vladimir Marko6fd16062018-06-26 11:02:04 +01002952 if (static_cast<uint32_t>(value - info.low) < info.length) {
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002953 // Just embed the j.l.Integer in the code.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002954 DCHECK_NE(info.value_boot_image_reference, IntegerValueOfInfo::kInvalidReference);
2955 codegen_->LoadBootImageAddress(out, info.value_boot_image_reference);
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002956 } else {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002957 DCHECK(locations->CanCall());
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002958 // Allocate and initialize a new j.l.Integer.
2959 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2960 // JIT object table.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002961 codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(),
2962 info.integer_boot_image_offset);
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002963 __ Mov(temp, value);
2964 assembler->StoreToOffset(kStoreWord, temp, out, info.value_offset);
2965 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2966 // one.
2967 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2968 }
2969 } else {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002970 DCHECK(locations->CanCall());
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002971 vixl32::Register in = RegisterFrom(locations->InAt(0));
2972 // Check bounds of our cache.
2973 __ Add(out, in, -info.low);
Vladimir Markoeebb8212018-06-05 14:57:24 +01002974 __ Cmp(out, info.length);
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002975 vixl32::Label allocate, done;
Anton Kirilovfd522532017-05-10 12:46:57 +01002976 __ B(hs, &allocate, /* is_far_target */ false);
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002977 // If the value is within the bounds, load the j.l.Integer directly from the array.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002978 codegen_->LoadBootImageAddress(temp, info.array_data_boot_image_reference);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002979 codegen_->LoadFromShiftedRegOffset(DataType::Type::kReference, locations->Out(), temp, out);
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002980 assembler->MaybeUnpoisonHeapReference(out);
2981 __ B(&done);
2982 __ Bind(&allocate);
2983 // Otherwise allocate and initialize a new j.l.Integer.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002984 codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(),
2985 info.integer_boot_image_offset);
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002986 assembler->StoreToOffset(kStoreWord, in, out, info.value_offset);
2987 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2988 // one.
2989 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2990 __ Bind(&done);
2991 }
2992}
2993
Nicolas Geoffray365719c2017-03-08 13:11:50 +00002994void IntrinsicLocationsBuilderARMVIXL::VisitThreadInterrupted(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002995 LocationSummary* locations =
2996 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Nicolas Geoffray365719c2017-03-08 13:11:50 +00002997 locations->SetOut(Location::RequiresRegister());
2998}
2999
3000void IntrinsicCodeGeneratorARMVIXL::VisitThreadInterrupted(HInvoke* invoke) {
3001 ArmVIXLAssembler* assembler = GetAssembler();
3002 vixl32::Register out = RegisterFrom(invoke->GetLocations()->Out());
3003 int32_t offset = Thread::InterruptedOffset<kArmPointerSize>().Int32Value();
3004 __ Ldr(out, MemOperand(tr, offset));
3005 UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
3006 vixl32::Register temp = temps.Acquire();
3007 vixl32::Label done;
Anton Kirilovfd522532017-05-10 12:46:57 +01003008 vixl32::Label* const final_label = codegen_->GetFinalLabel(invoke, &done);
3009 __ CompareAndBranchIfZero(out, final_label, /* far_target */ false);
Nicolas Geoffray365719c2017-03-08 13:11:50 +00003010 __ Dmb(vixl32::ISH);
3011 __ Mov(temp, 0);
3012 assembler->StoreToOffset(kStoreWord, temp, tr, offset);
3013 __ Dmb(vixl32::ISH);
Anton Kirilovfd522532017-05-10 12:46:57 +01003014 if (done.IsReferenced()) {
3015 __ Bind(&done);
3016 }
Nicolas Geoffray365719c2017-03-08 13:11:50 +00003017}
3018
Hans Boehmc7b28de2018-03-09 17:05:28 -08003019void IntrinsicLocationsBuilderARMVIXL::VisitReachabilityFence(HInvoke* invoke) {
3020 LocationSummary* locations =
3021 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
3022 locations->SetInAt(0, Location::Any());
3023}
3024
3025void IntrinsicCodeGeneratorARMVIXL::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { }
3026
Anton Kirilov5ec62182016-10-13 20:16:02 +01003027UNIMPLEMENTED_INTRINSIC(ARMVIXL, MathRoundDouble) // Could be done by changing rounding mode, maybe?
Anton Kirilov5ec62182016-10-13 20:16:02 +01003028UNIMPLEMENTED_INTRINSIC(ARMVIXL, UnsafeCASLong) // High register pressure.
3029UNIMPLEMENTED_INTRINSIC(ARMVIXL, SystemArrayCopyChar)
Vladimir Markod254f5c2017-06-02 15:18:36 +00003030UNIMPLEMENTED_INTRINSIC(ARMVIXL, ReferenceGetReferent)
Anton Kirilov5ec62182016-10-13 20:16:02 +01003031
Aart Bikff7d89c2016-11-07 08:49:28 -08003032UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringStringIndexOf);
3033UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08003034UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringBufferAppend);
3035UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringBufferLength);
3036UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringBufferToString);
3037UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringBuilderAppend);
3038UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringBuilderLength);
3039UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08003040
Anton Kirilov5ec62182016-10-13 20:16:02 +01003041// 1.8.
3042UNIMPLEMENTED_INTRINSIC(ARMVIXL, UnsafeGetAndAddInt)
3043UNIMPLEMENTED_INTRINSIC(ARMVIXL, UnsafeGetAndAddLong)
3044UNIMPLEMENTED_INTRINSIC(ARMVIXL, UnsafeGetAndSetInt)
3045UNIMPLEMENTED_INTRINSIC(ARMVIXL, UnsafeGetAndSetLong)
3046UNIMPLEMENTED_INTRINSIC(ARMVIXL, UnsafeGetAndSetObject)
3047
3048UNREACHABLE_INTRINSICS(ARMVIXL)
3049
3050#undef __
3051
3052} // namespace arm
3053} // namespace art