Merge "Resolve module bitness for target-host required modules"
diff --git a/core/Makefile b/core/Makefile
index dd5c2cc..1ba176a 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -206,7 +206,7 @@
 
 define copy-and-strip-kernel-module
 $(2): $(1)
-	$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP) -o $(2) --strip-debug $(1)
+	$(LLVM_STRIP) -o $(2) --strip-debug $(1)
 endef
 
 # $(1): modules list
@@ -340,6 +340,15 @@
     $(call copy-many-files,$(call module-load-list-copy-paths,$(call intermediates-dir-for,PACKAGING,vendor_ramdisk_recovery_module_list$(_sep)$(_kver)),$(BOARD_VENDOR_RAMDISK_KERNEL_MODULES$(_sep)$(_kver)),$(BOARD_VENDOR_RAMDISK_RECOVERY_KERNEL_MODULES_LOAD$(_sep)$(_kver)),modules.load.recovery,$(TARGET_VENDOR_RAMDISK_OUT))))
 endef
 
+# $(1): kernel module directory name (top is an out of band value for no directory)
+define build-vendor-charger-load
+$(if $(filter top,$(1)),\
+  $(eval _kver :=)$(eval _sep :=),\
+  $(eval _kver := $(1))$(eval _sep :=_))\
+  $(if $(BOARD_VENDOR_CHARGER_KERNEL_MODULES_LOAD$(_sep)$(_kver)),\
+    $(call copy-many-files,$(call module-load-list-copy-paths,$(call intermediates-dir-for,PACKAGING,vendor_charger_module_list$(_sep)$(_kver)),$(BOARD_VENDOR_CHARGER_KERNEL_MODULES$(_sep)$(_kver)),$(BOARD_VENDOR_CHARGER_KERNEL_MODULES_LOAD$(_sep)$(_kver)),modules.load.charger,$(TARGET_OUT_VENDOR))))
+endef
+
 ifneq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
   # If there is no vendor boot partition, store vendor ramdisk kernel modules in the
   # boot ramdisk.
@@ -375,6 +384,7 @@
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR_RAMDISK,$(TARGET_VENDOR_RAMDISK_OUT),,modules.load,$(VENDOR_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(dir))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-recovery-load,$(dir))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR,$(TARGET_OUT_VENDOR),vendor,modules.load,$(VENDOR_STRIPPED_MODULE_STAGING_DIR),$(dir))) \
+  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-charger-load,$(dir))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,ODM,$(TARGET_OUT_ODM),odm,modules.load,,$(dir))) \
   $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
     $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-recovery-as-boot-load,$(dir))),\
@@ -3641,6 +3651,7 @@
   libconscrypt_openjdk_jni \
   lpmake \
   lpunpack \
+  lz4 \
   make_f2fs \
   merge_target_files \
   minigzip \
@@ -4040,13 +4051,13 @@
   $(foreach device,$(BOARD_SUPER_PARTITION_BLOCK_DEVICES), \
     echo "super_$(device)_device_size=$(BOARD_SUPER_PARTITION_$(call to-upper,$(device))_DEVICE_SIZE)" >> $(1);)
   $(if $(BOARD_SUPER_PARTITION_PARTITION_LIST), \
-    echo "dynamic_partition_list=$(call filter-out-missing-vendor, $(BOARD_SUPER_PARTITION_PARTITION_LIST))" >> $(1))
+    echo "dynamic_partition_list=$(call filter-out-missing-vendor,$(BOARD_SUPER_PARTITION_PARTITION_LIST))" >> $(1))
   $(if $(BOARD_SUPER_PARTITION_GROUPS),
     echo "super_partition_groups=$(BOARD_SUPER_PARTITION_GROUPS)" >> $(1))
   $(foreach group,$(BOARD_SUPER_PARTITION_GROUPS), \
     echo "super_$(group)_group_size=$(BOARD_$(call to-upper,$(group))_SIZE)" >> $(1); \
     $(if $(BOARD_$(call to-upper,$(group))_PARTITION_LIST), \
-      echo "super_$(group)_partition_list=$(call filter-out-missing-vendor, $(BOARD_$(call to-upper,$(group))_PARTITION_LIST))" >> $(1);))
+      echo "super_$(group)_partition_list=$(call filter-out-missing-vendor,$(BOARD_$(call to-upper,$(group))_PARTITION_LIST))" >> $(1);))
   $(if $(filter true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED)), \
     echo "build_non_sparse_super_partition=true" >> $(1))
   $(if $(filter true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED)), \
diff --git a/core/OWNERS b/core/OWNERS
index 750f1fa..459683e 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,3 +1,2 @@
 per-file dex_preopt*.mk = ngeoffray@google.com,calin@google.com,mathewi@google.com,dbrazdil@google.com
-per-file construct_context.sh = ngeoffray@google.com,calin@google.com,mathieuc@google.com
 per-file verify_uses_libraries.sh = ngeoffray@google.com,calin@google.com,mathieuc@google.com
diff --git a/core/config.mk b/core/config.mk
index 2ec0cfd..a5b8ef7 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -150,6 +150,7 @@
   ,Project include variables have been removed)
 $(KATI_obsolete_var TARGET_PREFER_32_BIT TARGET_PREFER_32_BIT_APPS TARGET_PREFER_32_BIT_EXECUTABLES)
 $(KATI_obsolete_var PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_ALLOW_LIST.)
+$(KATI_obsolete_var PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST.)
 
 # Used to force goals to build.  Only use for conditionally defined goals.
 .PHONY: FORCE
@@ -977,8 +978,7 @@
     BOARD_SUPER_PARTITION_GROUPS and BOARD_*_PARTITION_LIST)
 endif
 BOARD_SUPER_PARTITION_PARTITION_LIST := \
-    $(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
-        $(BOARD_$(group)_PARTITION_LIST))
+    $(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)),$(BOARD_$(group)_PARTITION_LIST))
 .KATI_READONLY := BOARD_SUPER_PARTITION_PARTITION_LIST
 
 ifneq ($(BOARD_SUPER_PARTITION_SIZE),)
diff --git a/core/construct_context.sh b/core/construct_context.sh
deleted file mode 100755
index d620d08..0000000
--- a/core/construct_context.sh
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/bin/bash
-#
-# Copyright (C) 2018 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -e
-
-# target_sdk_version: parsed from manifest
-#
-# outputs
-# class_loader_context_arg: final class loader conext arg
-# stored_class_loader_context_arg: final stored class loader context arg
-
-if [ -z "${target_sdk_version}" ]; then
-    echo "ERROR: target_sdk_version not set"
-    exit 2
-fi
-
-# The hidl.manager shared library has a dependency on hidl.base. We'll manually
-# add that information to the class loader context if we see those libraries.
-hidl_manager="android.hidl.manager-V1.0-java"
-hidl_base="android.hidl.base-V1.0-java"
-
-function add_to_contexts {
-  for i in $1; do
-    if [[ -z "${class_loader_context}" ]]; then
-      export class_loader_context="PCL[$i]"
-    else
-      export class_loader_context+="#PCL[$i]"
-    fi
-    if [[ $i == *"$hidl_manager"* ]]; then
-      export class_loader_context+="{PCL[${i/$hidl_manager/$hidl_base}]}"
-    fi
-  done
-
-  for i in $2; do
-    if [[ -z "${stored_class_loader_context}" ]]; then
-      export stored_class_loader_context="PCL[$i]"
-    else
-      export stored_class_loader_context+="#PCL[$i]"
-    fi
-    if [[ $i == *"$hidl_manager"* ]]; then
-      export stored_class_loader_context+="{PCL[${i/$hidl_manager/$hidl_base}]}"
-    fi
-  done
-}
-
-# The order below must match what the package manager also computes for
-# class loader context.
-
-if [[ "${target_sdk_version}" -lt "28" ]]; then
-  add_to_contexts "${conditional_host_libs_28}" "${conditional_target_libs_28}"
-fi
-
-if [[ "${target_sdk_version}" -lt "29" ]]; then
-  add_to_contexts "${conditional_host_libs_29}" "${conditional_target_libs_29}"
-fi
-
-if [[ "${target_sdk_version}" -lt "30" ]]; then
-  add_to_contexts "${conditional_host_libs_30}" "${conditional_target_libs_30}"
-fi
-
-add_to_contexts "${dex_preopt_host_libraries}" "${dex_preopt_target_libraries}"
-
-# Generate the actual context string.
-export class_loader_context_arg="--class-loader-context=PCL[]{${class_loader_context}}"
-export stored_class_loader_context_arg="--stored-class-loader-context=PCL[]{${stored_class_loader_context}}"
diff --git a/core/line_coverage.mk b/core/line_coverage.mk
index b920e28..babcb30 100644
--- a/core/line_coverage.mk
+++ b/core/line_coverage.mk
@@ -8,8 +8,7 @@
 # packs them into another zip file called `line_coverage_profiles.zip`.
 #
 # To run the make target set the coverage related envvars first:
-# 	NATIVE_LINE_COVERAGE=true NATIVE_COVERAGE=true \
-#	NATIVE_COVERAGE_PATHS=* make haiku-line-coverage
+# 	NATIVE_COVERAGE=true NATIVE_COVERAGE_PATHS=* make haiku-line-coverage
 # -----------------------------------------------------------------
 
 # TODO(b/148306195): Due this issue some fuzz targets cannot be built with
@@ -47,7 +46,6 @@
 	libinputflinger \
 	libopus \
 	libstagefright \
-	libunwind \
 	libvixl:com.android.art.debug
 
 # Use the intermediates directory to avoid installing libraries to the device.
@@ -68,7 +66,7 @@
 fuzz_target_inputs := $(foreach fuzz,$(fuzz_targets), \
 	$(call intermediates-dir-for,EXECUTABLES,$(fuzz))/$(fuzz))
 
-# When line coverage is enabled (NATIVE_LINE_COVERAGE is set), make creates
+# When coverage is enabled (NATIVE_COVERAGE is set), make creates
 # a "coverage" directory and stores all profile (*.gcno) files in inside.
 # We need everything that is stored inside this directory.
 $(line_coverage_profiles): $(fuzz_target_inputs)
diff --git a/core/main.mk b/core/main.mk
index 0045f7e..cc7cf72 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -102,6 +102,15 @@
 EMMA_INSTRUMENT := true
 endif
 
+# TODO(b/158212027): Turn this into an error when all users have been moved to
+# `NATIVE_COVERAGE_PATHS` and `NATIVE_COVERAGE_EXCLUDE_PATHS`.
+ifneq ($(COVERAGE_PATHS),)
+  $(warning Variable COVERAGE_PATHS is deprecated. Please use NATIVE_COVERAGE_PATHS instead.)
+endif
+ifneq ($(COVERAGE_EXCLUDE_PATHS),)
+  $(warning Variable COVERAGE_EXCLUDE_PATHS is deprecated. Please use NATIVE_COVERAGE_EXCLUDE_PATHS instead.)
+endif
+
 ifeq (true,$(EMMA_INSTRUMENT))
 # Adding the jacoco library can cause the inclusion of
 # some typically banned classes
@@ -1316,8 +1325,7 @@
     $(eval extra_files := $(filter-out $(files) $(HOST_OUT)/%,$(product_target_FILES))) \
     $(eval files_in_requirement := $(filter $(path_patterns),$(extra_files))) \
     $(eval all_offending_files += $(files_in_requirement)) \
-    $(eval allowed := $(strip $(PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST) \
-      $(PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST))) \
+    $(eval allowed := $(PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST)) \
     $(eval allowed_patterns := $(call resolve-product-relative-paths,$(allowed))) \
     $(eval offending_files := $(filter-out $(allowed_patterns),$(files_in_requirement))) \
     $(eval enforcement := $(PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS)) \
diff --git a/core/product.mk b/core/product.mk
index 0eee2ab..f531319 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -328,7 +328,6 @@
 _product_single_value_vars += PRODUCT_ENFORCE_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT
 _product_list_vars += PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_ALLOW_LIST
 _product_list_vars += PRODUCT_ARTIFACT_PATH_REQUIREMENT_HINT
-_product_list_vars += PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST
 _product_list_vars += PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST
 
 # List of modules that should be forcefully unmarked from being LOCAL_PRODUCT_MODULE, and hence
diff --git a/core/soong_android_app_set.mk b/core/soong_android_app_set.mk
index 5ed9b2c..eb8e5e7 100644
--- a/core/soong_android_app_set.mk
+++ b/core/soong_android_app_set.mk
@@ -31,4 +31,17 @@
 $(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
 PACKAGES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
 
+# android_app_set modules are always presigned
+PACKAGES.$(LOCAL_MODULE).CERTIFICATE := PRESIGNED
+PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
+
+ifneq ($(LOCAL_MODULE_STEM),)
+  PACKAGES.$(LOCAL_MODULE).STEM := $(LOCAL_MODULE_STEM)
+else
+  PACKAGES.$(LOCAL_MODULE).STEM := $(LOCAL_MODULE)
+endif
+
+# Set a actual_partition_tag (calculated in base_rules.mk) for the package.
+PACKAGES.$(LOCAL_MODULE).PARTITION := $(actual_partition_tag)
+
 SOONG_ALREADY_CONV += $(LOCAL_MODULE)
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 84bfd1e..b9e7a27 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -114,8 +114,7 @@
 $(call add_json_list, JavaCoveragePaths,                 $(JAVA_COVERAGE_PATHS))
 $(call add_json_list, JavaCoverageExcludePaths,          $(JAVA_COVERAGE_EXCLUDE_PATHS))
 
-$(call add_json_bool, NativeLineCoverage,                $(filter true,$(NATIVE_LINE_COVERAGE)))
-$(call add_json_bool, Native_coverage,                   $(filter true,$(NATIVE_COVERAGE)))
+$(call add_json_bool, GcovCoverage,                      $(filter true,$(NATIVE_COVERAGE)))
 $(call add_json_bool, ClangCoverage,                     $(filter true,$(CLANG_COVERAGE)))
 # TODO(b/158212027): Remove `$(COVERAGE_PATHS)` from this list when all users have been moved to
 # `NATIVE_COVERAGE_PATHS`.
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index cb84a5c..a820a28 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -14,12 +14,10 @@
 
 .PHONY: general-tests
 
-# TODO(b/149249068): Remove vts10-tradefed.jar after all VTS tests are converted
 general_tests_tools := \
     $(HOST_OUT_JAVA_LIBRARIES)/cts-tradefed.jar \
     $(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util.jar \
     $(HOST_OUT_JAVA_LIBRARIES)/vts-tradefed.jar \
-    $(HOST_OUT_JAVA_LIBRARIES)/vts10-tradefed.jar
 
 intermediates_dir := $(call intermediates-dir-for,PACKAGING,general-tests)
 general_tests_zip := $(PRODUCT_OUT)/general-tests.zip
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 8a59a39..8bbc35e 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -17,6 +17,7 @@
 import base64
 import collections
 import copy
+import datetime
 import errno
 import fnmatch
 import getopt
@@ -53,16 +54,17 @@
     # running this function, user-supplied search path (`--path`) hasn't been
     # available. So the value set here is the default, which might be overridden
     # by commandline flag later.
-    exec_path = sys.argv[0]
+    exec_path = os.path.realpath(sys.argv[0])
     if exec_path.endswith('.py'):
       script_name = os.path.basename(exec_path)
       # logger hasn't been initialized yet at this point. Use print to output
       # warnings.
       print(
           'Warning: releasetools script should be invoked as hermetic Python '
-          'executable -- build and run `{}` directly.'.format(script_name[:-3]),
+          'executable -- build and run `{}` directly.'.format(
+              script_name[:-3]),
           file=sys.stderr)
-    self.search_path = os.path.realpath(os.path.join(os.path.dirname(exec_path), '..'))
+    self.search_path = os.path.dirname(os.path.dirname(exec_path))
 
     self.signapk_path = "framework/signapk.jar"  # Relative to search_path
     self.signapk_shared_library_path = "lib64"   # Relative to search_path
@@ -191,11 +193,11 @@
     if OPTIONS.logfile:
       config = copy.deepcopy(config)
       config['handlers']['logfile'] = {
-        'class': 'logging.FileHandler',
-        'formatter': 'standard',
-        'level': 'INFO',
-        'mode': 'w',
-        'filename': OPTIONS.logfile,
+          'class': 'logging.FileHandler',
+          'formatter': 'standard',
+          'level': 'INFO',
+          'mode': 'w',
+          'filename': OPTIONS.logfile,
       }
       config['loggers']['']['handlers'].append('logfile')
 
@@ -224,7 +226,7 @@
   if 'universal_newlines' not in kwargs:
     kwargs['universal_newlines'] = True
   # Don't log any if caller explicitly says so.
-  if verbose != False:
+  if verbose:
     logger.info("  Running: \"%s\"", " ".join(args))
   return subprocess.Popen(args, **kwargs)
 
@@ -274,7 +276,7 @@
   if output is None:
     output = ""
   # Don't log any if caller explicitly says so.
-  if verbose != False:
+  if verbose:
     logger.info("%s", output.rstrip())
   if proc.returncode != 0:
     raise ExternalError(
@@ -375,7 +377,6 @@
             'Invalid build fingerprint: "{}". See the requirement in Android CDD '
             "3.2.2. Build Parameters.".format(fingerprint))
 
-
     self._partition_fingerprints = {}
     for partition in PARTITIONS_WITH_CARE_MAP:
       try:
@@ -522,7 +523,8 @@
           self.GetPartitionBuildProp("ro.product.device", partition),
           self.GetPartitionBuildProp("ro.build.version.release", partition),
           self.GetPartitionBuildProp("ro.build.id", partition),
-          self.GetPartitionBuildProp("ro.build.version.incremental", partition),
+          self.GetPartitionBuildProp(
+              "ro.build.version.incremental", partition),
           self.GetPartitionBuildProp("ro.build.type", partition),
           self.GetPartitionBuildProp("ro.build.tags", partition))
 
@@ -683,7 +685,7 @@
   if "boot_images" in d:
     boot_images = d["boot_images"]
   for b in boot_images.split():
-    makeint(b.replace(".img","_size"))
+    makeint(b.replace(".img", "_size"))
 
   # Load recovery fstab if applicable.
   d["fstab"] = _FindAndLoadRecoveryFstab(d, input_file, read_helper)
@@ -703,7 +705,7 @@
     for partition in PARTITIONS_WITH_CARE_MAP:
       fingerprint = build_info.GetPartitionFingerprint(partition)
       if fingerprint:
-        d["avb_{}_salt".format(partition)] = sha256(fingerprint).hexdigest()
+        d["avb_{}_salt".format(partition)] = sha256(fingerprint.encode()).hexdigest()
 
   return d
 
@@ -749,6 +751,7 @@
         placeholders in the build.prop file. We expect exactly one value for
         each of the variables.
   """
+
   def __init__(self, input_file, name, placeholder_values=None):
     self.input_file = input_file
     self.partition = name
@@ -808,7 +811,7 @@
     """Parses the build prop in a given import statement."""
 
     tokens = line.split()
-    if tokens[0] != 'import' or (len(tokens) != 2 and len(tokens) != 3) :
+    if tokens[0] != 'import' or (len(tokens) != 2 and len(tokens) != 3):
       raise ValueError('Unrecognized import statement {}'.format(line))
 
     if len(tokens) == 3:
@@ -998,9 +1001,9 @@
 
   # Pick virtual ab related flags from vendor dict, if defined.
   if "virtual_ab" in vendor_dict.keys():
-     merged_dict["virtual_ab"] = vendor_dict["virtual_ab"]
+    merged_dict["virtual_ab"] = vendor_dict["virtual_ab"]
   if "virtual_ab_retrofit" in vendor_dict.keys():
-     merged_dict["virtual_ab_retrofit"] = vendor_dict["virtual_ab_retrofit"]
+    merged_dict["virtual_ab_retrofit"] = vendor_dict["virtual_ab_retrofit"]
   return merged_dict
 
 
@@ -1234,7 +1237,7 @@
     kernel = "kernel"
   else:
     kernel = image_name.replace("boot", "kernel")
-    kernel = kernel.replace(".img","")
+    kernel = kernel.replace(".img", "")
   if not os.access(os.path.join(sourcedir, kernel), os.F_OK):
     return None
 
@@ -1358,7 +1361,7 @@
     if partition_name == "recovery":
       part_size = info_dict["recovery_size"]
     else:
-      part_size = info_dict[image_name.replace(".img","_size")]
+      part_size = info_dict[image_name.replace(".img", "_size")]
     cmd = [avbtool, "add_hash_footer", "--image", img.name,
            "--partition_size", str(part_size), "--partition_name",
            partition_name]
@@ -1511,7 +1514,8 @@
   if info_dict is None:
     info_dict = OPTIONS.info_dict
 
-  data = _BuildVendorBootImage(os.path.join(unpack_dir, tree_subdir), info_dict)
+  data = _BuildVendorBootImage(
+      os.path.join(unpack_dir, tree_subdir), info_dict)
   if data:
     return File(name, data)
   return None
@@ -1520,7 +1524,7 @@
 def Gunzip(in_filename, out_filename):
   """Gunzips the given gzip compressed file to a given output file."""
   with gzip.open(in_filename, "rb") as in_file, \
-       open(out_filename, "wb") as out_file:
+          open(out_filename, "wb") as out_file:
     shutil.copyfileobj(in_file, out_file)
 
 
@@ -1622,8 +1626,7 @@
     if reset_file_map:
       img.ResetFileMap()
     return img
-  else:
-    return GetNonSparseImage(which, tmpdir, hashtree_info_generator)
+  return GetNonSparseImage(which, tmpdir, hashtree_info_generator)
 
 
 def GetNonSparseImage(which, tmpdir, hashtree_info_generator=None):
@@ -1822,10 +1825,9 @@
     # Not a decimal number. Codename?
     if version in codename_to_api_level_map:
       return codename_to_api_level_map[version]
-    else:
-      raise ExternalError(
-          "Unknown minSdkVersion: '{}'. Known codenames: {}".format(
-              version, codename_to_api_level_map))
+    raise ExternalError(
+        "Unknown minSdkVersion: '{}'. Known codenames: {}".format(
+            version, codename_to_api_level_map))
 
 
 def SignFile(input_name, output_name, key, password, min_api_level=None,
@@ -1930,7 +1932,8 @@
     msg = "%s size (%d) is %.2f%% of limit (%d)" % (target, size, pct, limit)
     if pct >= 99.0:
       raise ExternalError(msg)
-    elif pct >= 95.0:
+
+    if pct >= 95.0:
       logger.warning("\n  WARNING: %s\n", msg)
     else:
       logger.info("  %s", msg)
@@ -2040,6 +2043,7 @@
       Put verbose logs to specified file (regardless of --verbose option.)
 """
 
+
 def Usage(docstring):
   print(docstring.rstrip("\n"))
   print(COMMON_DOCSTRING)
@@ -2202,7 +2206,7 @@
 
       current = self.UpdateAndReadFile(current)
 
-  def PromptResult(self, current): # pylint: disable=no-self-use
+  def PromptResult(self, current):  # pylint: disable=no-self-use
     """Prompt the user to enter a value (password) for each key in
     'current' whose value is fales.  Returns a new dict with all the
     values.
@@ -2265,7 +2269,6 @@
 
 def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
              compress_type=None):
-  import datetime
 
   # http://b/18015246
   # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
@@ -2391,6 +2394,7 @@
 
 class DeviceSpecificParams(object):
   module = None
+
   def __init__(self, **kwargs):
     """Keyword arguments to the constructor become attributes of this
     object, which is passed to all functions in the device-specific
@@ -2519,12 +2523,12 @@
 
 
 DIFF_PROGRAM_BY_EXT = {
-    ".gz" : "imgdiff",
-    ".zip" : ["imgdiff", "-z"],
-    ".jar" : ["imgdiff", "-z"],
-    ".apk" : ["imgdiff", "-z"],
-    ".img" : "imgdiff",
-    }
+    ".gz": "imgdiff",
+    ".zip": ["imgdiff", "-z"],
+    ".jar": ["imgdiff", "-z"],
+    ".apk": ["imgdiff", "-z"],
+    ".img": "imgdiff",
+}
 
 
 class Difference(object):
@@ -2563,6 +2567,7 @@
       cmd.append(ptemp.name)
       p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
       err = []
+
       def run():
         _, e = p.communicate()
         if e:
@@ -2591,7 +2596,6 @@
     self.patch = diff
     return self.tf, self.sf, self.patch
 
-
   def GetPatch(self):
     """Returns a tuple of (target_file, source_file, patch_data).
 
@@ -2902,7 +2906,7 @@
                 new_data_name=new_data_name, code=code))
     script.AppendExtra(script.WordWrap(call))
 
-  def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
+  def _HashBlocks(self, source, ranges):  # pylint: disable=no-self-use
     data = source.ReadRangeSet(ranges)
     ctx = sha1()
 
@@ -2911,7 +2915,7 @@
 
     return ctx.hexdigest()
 
-  def _HashZeroBlocks(self, num_blocks): # pylint: disable=no-self-use
+  def _HashZeroBlocks(self, num_blocks):  # pylint: disable=no-self-use
     """Return the hash value for all zero blocks."""
     zero_block = '\x00' * 4096
     ctx = sha1()
@@ -2934,6 +2938,7 @@
     "squashfs": "EMMC"
 }
 
+
 def GetTypeAndDevice(mount_point, info, check_no_slot=True):
   """
   Use GetTypeAndDeviceExpr whenever possible. This function is kept for
@@ -2944,11 +2949,10 @@
   if fstab:
     if check_no_slot:
       assert not fstab[mount_point].slotselect, \
-             "Use GetTypeAndDeviceExpr instead"
+          "Use GetTypeAndDeviceExpr instead"
     return (PARTITION_TYPES[fstab[mount_point].fs_type],
             fstab[mount_point].device)
-  else:
-    raise KeyError
+  raise KeyError
 
 
 def GetTypeAndDeviceExpr(mount_point, info):
@@ -2963,8 +2967,7 @@
     if p.slotselect:
       device_expr = 'add_slot_suffix(%s)' % device_expr
     return (PARTITION_TYPES[fstab[mount_point].fs_type], device_expr)
-  else:
-    raise KeyError
+  raise KeyError
 
 
 def GetEntryForDevice(fstab, device):
@@ -2979,6 +2982,7 @@
       return fstab[mount_point]
   return None
 
+
 def ParseCertificate(data):
   """Parses and converts a PEM-encoded certificate into DER-encoded.
 
@@ -3305,7 +3309,7 @@
       for p, u in self._partition_updates.items():
         if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
           u.block_difference.WritePostInstallVerifyScript(script)
-          script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
+          script.AppendExtra('unmap_partition("%s");' % p)  # ignore errors
 
     for p, u in self._partition_updates.items():
       if u.tgt_size and u.src_size <= u.tgt_size:
@@ -3313,7 +3317,7 @@
         u.block_difference.WriteScript(script, output_zip, progress=u.progress,
                                        write_verify_script=write_verify_script)
         if write_verify_script:
-          script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
+          script.AppendExtra('unmap_partition("%s");' % p)  # ignore errors
 
     script.Comment('--- End patching dynamic partitions ---')
 
@@ -3370,7 +3374,8 @@
 
     for p, u in self._partition_updates.items():
       if u.tgt_size and u.src_size < u.tgt_size:
-        comment('Grow partition %s from %d to %d' % (p, u.src_size, u.tgt_size))
+        comment('Grow partition %s from %d to %d' %
+                (p, u.src_size, u.tgt_size))
         append('resize %s %d' % (p, u.tgt_size))
 
     for p, u in self._partition_updates.items():
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 3b68439..7fb0a77 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -221,8 +221,10 @@
 import check_target_files_vintf
 import common
 import edify_generator
+import target_files_diff
 import verity_utils
 
+
 if sys.hexversion < 0x02070000:
   print("Python 2.7 or newer is required.", file=sys.stderr)
   sys.exit(1)
@@ -545,10 +547,10 @@
     target_files_dir = "SYSTEM/vendor"
 
   patch = "%s/recovery-from-boot.p" % target_files_dir
-  img = "%s/etc/recovery.img" %target_files_dir
+  img = "%s/etc/recovery.img" % target_files_dir
 
-  namelist = [name for name in target_files_zip.namelist()]
-  return (patch in namelist or img in namelist)
+  namelist = target_files_zip.namelist()
+  return patch in namelist or img in namelist
 
 
 def HasPartition(target_files_zip, partition):
@@ -626,7 +628,8 @@
 
   def GetIncrementalBlockDifferenceForPartition(name):
     if not HasPartition(source_zip, name):
-      raise RuntimeError("can't generate incremental that adds {}".format(name))
+      raise RuntimeError(
+          "can't generate incremental that adds {}".format(name))
 
     partition_src = common.GetUserImage(name, OPTIONS.source_tmp, source_zip,
                                         info_dict=source_info,
@@ -637,8 +640,7 @@
     partition_tgt = common.GetUserImage(name, OPTIONS.target_tmp, target_zip,
                                         info_dict=target_info,
                                         allow_shared_blocks=allow_shared_blocks,
-                                        hashtree_info_generator=
-                                        hashtree_info_generator)
+                                        hashtree_info_generator=hashtree_info_generator)
 
     # Check the first block of the source system partition for remount R/W only
     # if the filesystem is ext4.
@@ -1450,7 +1452,7 @@
     fs = source_info["fstab"]["/misc"]
     assert fs.fs_type.upper() == "EMMC", \
         "two-step packages only supported on devices with EMMC /misc partitions"
-    bcb_dev = {"bcb_dev" : fs.device}
+    bcb_dev = {"bcb_dev": fs.device}
     common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
     script.AppendExtra("""
 if get_stage("%(bcb_dev)s") == "2/3" then
@@ -1668,7 +1670,7 @@
         partitions = [partition for partition in partitions if partition
                       not in SECONDARY_PAYLOAD_SKIPPED_IMAGES]
         output_list.append('{}={}'.format(key, ' '.join(partitions)))
-      elif key == 'virtual_ab' or key == "virtual_ab_retrofit":
+      elif key in ['virtual_ab', "virtual_ab_retrofit"]:
         # Remove virtual_ab flag from secondary payload so that OTA client
         # don't use snapshots for secondary update
         pass
@@ -1712,7 +1714,8 @@
           partition_list = f.read().splitlines()
         partition_list = [partition for partition in partition_list if partition
                           and partition not in SECONDARY_PAYLOAD_SKIPPED_IMAGES]
-        common.ZipWriteStr(target_zip, info.filename, '\n'.join(partition_list))
+        common.ZipWriteStr(target_zip, info.filename,
+                           '\n'.join(partition_list))
       # Remove the unnecessary partitions from the dynamic partitions list.
       elif (info.filename == 'META/misc_info.txt' or
             info.filename == DYNAMIC_PARTITION_INFO):
@@ -1795,7 +1798,8 @@
       "{} is in super_block_devices but not in {}".format(
           super_device_not_updated, AB_PARTITIONS)
   # ab_partitions -= (dynamic_partition_list - super_block_devices)
-  new_ab_partitions = common.MakeTempFile(prefix="ab_partitions", suffix=".txt")
+  new_ab_partitions = common.MakeTempFile(
+      prefix="ab_partitions", suffix=".txt")
   with open(new_ab_partitions, 'w') as f:
     for partition in ab_partitions:
       if (partition in dynamic_partition_list and
@@ -1985,7 +1989,7 @@
     OPTIONS.source_tmp = common.UnzipTemp(
         OPTIONS.incremental_source, UNZIP_PATTERN)
     with zipfile.ZipFile(target_file) as input_zip, \
-        zipfile.ZipFile(source_file) as source_zip:
+            zipfile.ZipFile(source_file) as source_zip:
       WriteBlockIncrementalOTAPackage(
           input_zip,
           source_zip,
@@ -2245,7 +2249,6 @@
         OPTIONS.incremental_source, TARGET_DIFFING_UNZIP_PATTERN)
 
     with open(OPTIONS.log_diff, 'w') as out_file:
-      import target_files_diff
       target_files_diff.recursiveDiff(
           '', source_dir, target_dir, out_file)
 
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index c2bfd32..5d10c40 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -608,22 +608,22 @@
     elif (OPTIONS.remove_avb_public_keys and
           (filename.startswith("BOOT/RAMDISK/avb/") or
            filename.startswith("BOOT/RAMDISK/first_stage_ramdisk/avb/"))):
-        matched_removal = False
-        for key_to_remove in OPTIONS.remove_avb_public_keys:
-          if filename.endswith(key_to_remove):
-            matched_removal = True
-            print("Removing AVB public key from ramdisk: %s" % filename)
-            break
-        if not matched_removal:
-          # Copy it verbatim if we don't want to remove it.
-          common.ZipWriteStr(output_tf_zip, out_info, data)
+      matched_removal = False
+      for key_to_remove in OPTIONS.remove_avb_public_keys:
+        if filename.endswith(key_to_remove):
+          matched_removal = True
+          print("Removing AVB public key from ramdisk: %s" % filename)
+          break
+      if not matched_removal:
+        # Copy it verbatim if we don't want to remove it.
+        common.ZipWriteStr(output_tf_zip, out_info, data)
 
     # Skip verity keyid (for system_root_image use) if we will replace it.
     elif OPTIONS.replace_verity_keyid and filename == "BOOT/cmdline":
       pass
 
     # Skip the care_map as we will regenerate the system/vendor images.
-    elif filename == "META/care_map.pb" or filename == "META/care_map.txt":
+    elif filename in ["META/care_map.pb", "META/care_map.txt"]:
       pass
 
     # Updates system_other.avbpubkey in /product/etc/.
@@ -967,11 +967,10 @@
     if extra_args:
       print('Setting extra AVB signing args for %s to "%s"' % (
           partition, extra_args))
-      if partition in AVB_FOOTER_ARGS_BY_PARTITION:
-        args_key = AVB_FOOTER_ARGS_BY_PARTITION[partition]
-      else:
-        # custom partition
-        args_key = "avb_{}_add_hashtree_footer_args".format(partition)
+      args_key = AVB_FOOTER_ARGS_BY_PARTITION.get(
+          partition,
+          # custom partition
+          "avb_{}_add_hashtree_footer_args".format(partition))
       misc_info[args_key] = (misc_info.get(args_key, '') + ' ' + extra_args)
 
   for partition in AVB_FOOTER_ARGS_BY_PARTITION:
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 69be511..ac469eb 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -371,6 +371,17 @@
             partition, info_dict, key_file)
         cmd.extend(['--expected_chain_partition', chained_partition_arg])
 
+    # Handle the boot image with a non-default name, e.g. boot-5.4.img
+    boot_images = info_dict.get("boot_images")
+    if boot_images:
+      # we used the 1st boot image to generate the vbmeta. Rename the filename
+      # to boot.img so that avbtool can find it correctly.
+      first_image_name = boot_images.split()[0]
+      first_image_path = os.path.join(input_tmp, 'IMAGES', first_image_name)
+      assert os.path.isfile(first_image_path)
+      renamed_boot_image_path = os.path.join(input_tmp, 'IMAGES', 'boot.img')
+      os.rename(first_image_path, renamed_boot_image_path)
+
     proc = common.Run(cmd)
     stdoutdata, _ = proc.communicate()
     assert proc.returncode == 0, \