ARM/ARM64: Implement numberOfLeadingZeros intrinsic.
Change-Id: I4042fb7a0b75140475dcfca23e8f79d310f5333b
diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc
index bc7da80..55e964e 100644
--- a/compiler/optimizing/intrinsics.cc
+++ b/compiler/optimizing/intrinsics.cc
@@ -103,6 +103,16 @@
LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
UNREACHABLE();
}
+ case kIntrinsicNumberOfLeadingZeros:
+ switch (GetType(method.d.data, true)) {
+ case Primitive::kPrimInt:
+ return Intrinsics::kIntegerNumberOfLeadingZeros;
+ case Primitive::kPrimLong:
+ return Intrinsics::kLongNumberOfLeadingZeros;
+ default:
+ LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
+ UNREACHABLE();
+ }
// Abs.
case kIntrinsicAbsDouble:
diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc
index b4dbf75..a797654 100644
--- a/compiler/optimizing/intrinsics_arm.cc
+++ b/compiler/optimizing/intrinsics_arm.cc
@@ -224,6 +224,48 @@
locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
}
+static void GenNumberOfLeadingZeros(LocationSummary* locations,
+ Primitive::Type type,
+ ArmAssembler* assembler) {
+ Location in = locations->InAt(0);
+ Register out = locations->Out().AsRegister<Register>();
+
+ DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
+
+ if (type == Primitive::kPrimLong) {
+ Register in_reg_lo = in.AsRegisterPairLow<Register>();
+ Register in_reg_hi = in.AsRegisterPairHigh<Register>();
+ Label end;
+ __ clz(out, in_reg_hi);
+ __ CompareAndBranchIfNonZero(in_reg_hi, &end);
+ __ clz(out, in_reg_lo);
+ __ AddConstant(out, 32);
+ __ Bind(&end);
+ } else {
+ __ clz(out, in.AsRegister<Register>());
+ }
+}
+
+void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
+ CreateIntToIntLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
+ GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
+}
+
+void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kNoCall,
+ kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
+}
+
+void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
+ GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
+}
+
static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
Location in = locations->InAt(0);
Location out = locations->Out();
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 78ac167..2c93fea 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -260,6 +260,33 @@
GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
}
+static void GenNumberOfLeadingZeros(LocationSummary* locations,
+ Primitive::Type type,
+ vixl::MacroAssembler* masm) {
+ DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
+
+ Location in = locations->InAt(0);
+ Location out = locations->Out();
+
+ __ Clz(RegisterFrom(out, type), RegisterFrom(in, type));
+}
+
+void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
+ CreateIntToIntLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
+ GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
+}
+
+void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
+ CreateIntToIntLocations(arena_, invoke);
+}
+
+void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
+ GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
+}
+
static void GenReverse(LocationSummary* locations,
Primitive::Type type,
vixl::MacroAssembler* masm) {
diff --git a/compiler/optimizing/intrinsics_list.h b/compiler/optimizing/intrinsics_list.h
index 2c9248f..d28c5a3 100644
--- a/compiler/optimizing/intrinsics_list.h
+++ b/compiler/optimizing/intrinsics_list.h
@@ -27,8 +27,10 @@
V(FloatIntBitsToFloat, kStatic) \
V(IntegerReverse, kStatic) \
V(IntegerReverseBytes, kStatic) \
+ V(IntegerNumberOfLeadingZeros, kStatic) \
V(LongReverse, kStatic) \
V(LongReverseBytes, kStatic) \
+ V(LongNumberOfLeadingZeros, kStatic) \
V(ShortReverseBytes, kStatic) \
V(MathAbsDouble, kStatic) \
V(MathAbsFloat, kStatic) \
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index 0d6ca09..993c005 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -1756,6 +1756,8 @@
UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
+UNIMPLEMENTED_INTRINSIC(IntegerNumberOfLeadingZeros)
+UNIMPLEMENTED_INTRINSIC(LongNumberOfLeadingZeros)
#undef UNIMPLEMENTED_INTRINSIC
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index ea342e9..8ab0b77 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -1615,6 +1615,8 @@
UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
+UNIMPLEMENTED_INTRINSIC(IntegerNumberOfLeadingZeros)
+UNIMPLEMENTED_INTRINSIC(LongNumberOfLeadingZeros)
#undef UNIMPLEMENTED_INTRINSIC