Change hash table load factors

Changed class table and intern table load factors to query the
runtime. The runtime returns load factors based on whether or not we
are a low ram device.

DescriptorEquals time for class linking goes from 10% -> 1.2% for
compiling GmsCore with interpret only.

Added test.

Bug: 24917584

Change-Id: Iaaf5d2eab1b0c2d188d299e4bc1852cdb3801173
diff --git a/runtime/base/hash_set_test.cc b/runtime/base/hash_set_test.cc
index 6d2c5e0..743e98e 100644
--- a/runtime/base/hash_set_test.cc
+++ b/runtime/base/hash_set_test.cc
@@ -196,6 +196,24 @@
   }
 }
 
+TEST_F(HashSetTest, TestLoadFactor) {
+  HashSet<std::string, IsEmptyFnString> hash_set;
+  static constexpr size_t kStringCount = 1000;
+  static constexpr double kEpsilon = 0.01;
+  for (size_t i = 0; i < kStringCount; ++i) {
+    hash_set.Insert(RandomString(i % 10 + 1));
+  }
+  // Check that changing the load factor resizes the table to be within the target range.
+  EXPECT_GE(hash_set.CalculateLoadFactor() + kEpsilon, hash_set.GetMinLoadFactor());
+  EXPECT_LE(hash_set.CalculateLoadFactor() - kEpsilon, hash_set.GetMaxLoadFactor());
+  hash_set.SetLoadFactor(0.1, 0.3);
+  EXPECT_DOUBLE_EQ(0.1, hash_set.GetMinLoadFactor());
+  EXPECT_DOUBLE_EQ(0.3, hash_set.GetMaxLoadFactor());
+  EXPECT_LE(hash_set.CalculateLoadFactor() - kEpsilon, hash_set.GetMaxLoadFactor());
+  hash_set.SetLoadFactor(0.6, 0.8);
+  EXPECT_LE(hash_set.CalculateLoadFactor() - kEpsilon, hash_set.GetMaxLoadFactor());
+}
+
 TEST_F(HashSetTest, TestStress) {
   HashSet<std::string, IsEmptyFnString> hash_set;
   std::unordered_multiset<std::string> std_set;