Pass the class loader context to dex2oat when optimizing at runtime

Until now we always passed the special shared library symbol "&" when we
called dex2oat at runtime without an explicit class path.

This CL changes that and passes the class loader context inferred from the
runtime class loaders to dex2oat. If any of the runtime class loaders is
not supported we continue to pass the special library symbol.

Bug: 38138251
Test: m test-art-host
Change-Id: Ica43ee8a3f36dab2d9ed0e634a9f6341379c8e1c
diff --git a/runtime/class_loader_context_test.cc b/runtime/class_loader_context_test.cc
index 2b85188..d4688c1 100644
--- a/runtime/class_loader_context_test.cc
+++ b/runtime/class_loader_context_test.cc
@@ -455,6 +455,20 @@
   ASSERT_EQ(expected_encoding, context->EncodeContextForOatFile(""));
 }
 
+TEST_F(ClassLoaderContextTest, EncodeForDex2oat) {
+  std::string dex1_name = GetTestDexFileName("Main");
+  std::string dex2_name = GetTestDexFileName("MultiDex");
+  std::unique_ptr<ClassLoaderContext> context =
+      ClassLoaderContext::Create("PCL[" + dex1_name + ":" + dex2_name + "]");
+  ASSERT_TRUE(context->OpenDexFiles(InstructionSet::kArm, ""));
+
+  std::vector<std::unique_ptr<const DexFile>> dex1 = OpenTestDexFiles("Main");
+  std::vector<std::unique_ptr<const DexFile>> dex2 = OpenTestDexFiles("MultiDex");
+  std::string encoding = context->EncodeContextForDex2oat("");
+  std::string expected_encoding = "PCL[" + dex1_name + ":" + dex2_name + "]";
+  ASSERT_EQ(expected_encoding, context->EncodeContextForDex2oat(""));
+}
+
 // TODO(calin) add a test which creates the context for a class loader together with dex_elements.
 TEST_F(ClassLoaderContextTest, CreateContextForClassLoader) {
   // The chain is