Merge "copy kernel modules to ramdisk, add modules.load[.recovery]"
diff --git a/core/Makefile b/core/Makefile
index 73ad716..e921dfa 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1148,6 +1148,7 @@
 # Remove when b/63676296 is resolved.
 $(error Prebuilt bootimage is only supported for AB targets)
 endif
+INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
 $(eval $(call copy-one-file,$(BOARD_PREBUILT_BOOTIMAGE),$(INSTALLED_BOOTIMAGE_TARGET)))
 else # BOARD_PREBUILT_BOOTIMAGE not defined
 INSTALLED_BOOTIMAGE_TARGET :=
@@ -1370,25 +1371,25 @@
 ifneq (true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED))
   INTERNAL_USERIMAGES_SPARSE_EXT_FLAG := -s
 endif
-
-INTERNAL_USERIMAGES_DEPS := $(SIMG2IMG)
-INTERNAL_USERIMAGES_DEPS += $(MKEXTUSERIMG) $(MAKE_EXT4FS) $(E2FSCK) $(TUNE2FS)
-ifeq ($(TARGET_USERIMAGES_USE_F2FS),true)
-INTERNAL_USERIMAGES_DEPS += $(MKF2FSUSERIMG) $(MAKE_F2FS)
-endif
-
-ifeq ($(BOARD_AVB_ENABLE),true)
-INTERNAL_USERIMAGES_DEPS += $(AVBTOOL)
-endif
-
 ifneq (true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))
   INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG := -s
 endif
-ifneq ($(filter $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE) $(BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE) $(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE) $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
-INTERNAL_USERIMAGES_DEPS += $(MAKE_SQUASHFS) $(MKSQUASHFSUSERIMG) $(IMG2SIMG)
+
+INTERNAL_USERIMAGES_DEPS := \
+    $(BLK_ALLOC_TO_BASE_FS) \
+    $(E2FSCK) \
+    $(MKE2FS_CONF) \
+    $(MKEXTUSERIMG) \
+    $(SIMG2IMG) \
+    $(TUNE2FS)
+
+ifeq ($(TARGET_USERIMAGES_USE_F2FS),true)
+INTERNAL_USERIMAGES_DEPS += $(MKF2FSUSERIMG)
 endif
 
-INTERNAL_USERIMAGES_BINARY_PATHS := $(sort $(dir $(INTERNAL_USERIMAGES_DEPS)))
+ifneq ($(filter $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE) $(BOARD_PRODUCT_SERVICESIMAGE_FILE_SYSTEM_TYPE) $(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE) $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
+INTERNAL_USERIMAGES_DEPS += $(MKSQUASHFSUSERIMG)
+endif
 
 ifeq (true,$(PRODUCT_SUPPORTS_VERITY))
 INTERNAL_USERIMAGES_DEPS += $(BUILD_VERITY_METADATA) $(BUILD_VERITY_TREE) $(APPEND2SIMG) $(VERITY_SIGNER)
@@ -1397,13 +1398,16 @@
 endif
 endif
 
+ifeq ($(BOARD_AVB_ENABLE),true)
+INTERNAL_USERIMAGES_DEPS += $(AVBTOOL)
+endif
+
+# Get a colon-separated list of search paths.
+INTERNAL_USERIMAGES_BINARY_PATHS := $(subst $(space),:,$(sort $(dir $(INTERNAL_USERIMAGES_DEPS))))
+
 SELINUX_FC := $(call intermediates-dir-for,ETC,file_contexts.bin)/file_contexts.bin
 INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
 
-INTERNAL_USERIMAGES_DEPS += $(BLK_ALLOC_TO_BASE_FS)
-
-INTERNAL_USERIMAGES_DEPS += $(MKE2FS_CONF)
-
 ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
 
 ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
@@ -2311,7 +2315,7 @@
   @mkdir -p $(dir $(1)) $(systemimage_intermediates) && rm -rf $(systemimage_intermediates)/system_image_info.txt
   $(call generate-image-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt,system, \
       skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT) $(systemimage_intermediates)/system_image_info.txt $(1) $(TARGET_OUT) \
       || ( mkdir -p $${DIST_DIR}; cp $(INSTALLED_FILES_FILE) $${DIST_DIR}/installed-files-rescued.txt; \
@@ -2559,7 +2563,7 @@
   @mkdir -p $(TARGET_OUT_DATA)
   @mkdir -p $(userdataimage_intermediates) && rm -rf $(userdataimage_intermediates)/userdata_image_info.txt
   $(call generate-image-prop-dictionary, $(userdataimage_intermediates)/userdata_image_info.txt,userdata,skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT_DATA) $(userdataimage_intermediates)/userdata_image_info.txt $(INSTALLED_USERDATAIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_USERDATAIMAGE_TARGET),$(BOARD_USERDATAIMAGE_PARTITION_SIZE))
@@ -2665,7 +2669,7 @@
   @mkdir -p $(TARGET_OUT_CACHE)
   @mkdir -p $(cacheimage_intermediates) && rm -rf $(cacheimage_intermediates)/cache_image_info.txt
   $(call generate-image-prop-dictionary, $(cacheimage_intermediates)/cache_image_info.txt,cache,skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT_CACHE) $(cacheimage_intermediates)/cache_image_info.txt $(INSTALLED_CACHEIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_CACHEIMAGE_TARGET),$(BOARD_CACHEIMAGE_PARTITION_SIZE))
@@ -2736,7 +2740,7 @@
   @mkdir -p $(TARGET_OUT_SYSTEM_OTHER)
   @mkdir -p $(systemotherimage_intermediates) && rm -rf $(systemotherimage_intermediates)/system_other_image_info.txt
   $(call generate-image-prop-dictionary, $(systemotherimage_intermediates)/system_other_image_info.txt,system,skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT_SYSTEM_OTHER) $(systemotherimage_intermediates)/system_other_image_info.txt $(INSTALLED_SYSTEMOTHERIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_SYSTEMOTHERIMAGE_TARGET),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
@@ -2874,7 +2878,7 @@
   $(call create-vendor-odm-symlink)
   @mkdir -p $(vendorimage_intermediates) && rm -rf $(vendorimage_intermediates)/vendor_image_info.txt
   $(call generate-image-prop-dictionary, $(vendorimage_intermediates)/vendor_image_info.txt,vendor,skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       build/make/tools/releasetools/build_image.py \
       $(TARGET_OUT_VENDOR) $(vendorimage_intermediates)/vendor_image_info.txt $(INSTALLED_VENDORIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_VENDORIMAGE_TARGET),$(BOARD_VENDORIMAGE_PARTITION_SIZE))
@@ -2929,7 +2933,7 @@
   @mkdir -p $(TARGET_OUT_PRODUCT)
   @mkdir -p $(productimage_intermediates) && rm -rf $(productimage_intermediates)/product_image_info.txt
   $(call generate-image-prop-dictionary, $(productimage_intermediates)/product_image_info.txt,product,skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       ./build/tools/releasetools/build_image.py \
       $(TARGET_OUT_PRODUCT) $(productimage_intermediates)/product_image_info.txt $(INSTALLED_PRODUCTIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_PRODUCTIMAGE_TARGET),$(BOARD_PRODUCTIMAGE_PARTITION_SIZE))
@@ -3027,7 +3031,7 @@
   @mkdir -p $(TARGET_OUT_PRODUCT_SERVICES)
   @mkdir -p $(product_servicesimage_intermediates) && rm -rf $(product_servicesimage_intermediates)/product_services_image_info.txt
   $(call generate-image-prop-dictionary, $(product_servicesimage_intermediates)/product_services_image_info.txt,product_services, skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       ./build/tools/releasetools/build_image.py \
       $(TARGET_OUT_PRODUCT_SERVICES) $(product_servicesimage_intermediates)/product_services_image_info.txt $(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_PRODUCT_SERVICESIMAGE_TARGET),$(BOARD_PRODUCT_SERVICESIMAGE_PARTITION_SIZE))
@@ -3078,7 +3082,7 @@
   @mkdir -p $(TARGET_OUT_ODM)
   @mkdir -p $(odmimage_intermediates) && rm -rf $(odmimage_intermediates)/odm_image_info.txt
   $(call generate-userimage-prop-dictionary, $(odmimage_intermediates)/odm_image_info.txt, skip_fsck=true)
-  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
       ./build/tools/releasetools/build_image.py \
       $(TARGET_OUT_ODM) $(odmimage_intermediates)/odm_image_info.txt $(INSTALLED_ODMIMAGE_TARGET) $(TARGET_OUT)
   $(hide) $(call assert-max-image-size,$(INSTALLED_ODMIMAGE_TARGET),$(BOARD_ODMIMAGE_PARTITION_SIZE))
@@ -3675,6 +3679,7 @@
   imgdiff \
   libconscrypt_openjdk_jni \
   lpmake \
+  lpunpack \
   make_f2fs \
   minigzip \
   mkbootfs \
@@ -4165,7 +4170,7 @@
 	$(call generate-userimage-prop-dictionary, $(zip_root)/META/misc_info.txt)
 ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 ifdef BUILDING_SYSTEM_IMAGE
-	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
+	$(hide) PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH MKBOOTIMG=$(MKBOOTIMG) \
 	    build/make/tools/releasetools/make_recovery_patch $(zip_root) $(zip_root)
 endif # BUILDING_SYSTEM_IMAGE
 endif
@@ -4289,7 +4294,7 @@
 	        echo "$(group)_partition_list=$(_group_partition_list)" >> $(zip_root)/META/dynamic_partitions_info.txt;))
 endif # BOARD_SUPER_PARTITION_GROUPS
 	@# TODO(b/134525174): Remove `-r` after addressing the issue with recovery patch generation.
-	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
+	$(hide) PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH MKBOOTIMG=$(MKBOOTIMG) \
 	    build/make/tools/releasetools/add_img_to_target_files -a -r -v -p $(HOST_OUT) $(zip_root)
 	@# Zip everything up, preserving symlinks and placing META/ files first to
 	@# help early validation of the .zip file while uploading it.
@@ -4320,14 +4325,14 @@
 # $(1): output file
 # $(2): additional args
 define build-ota-package-target
-PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
-   build/make/tools/releasetools/ota_from_target_files -v \
-   --block \
-   --extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
-   -p $(HOST_OUT) \
-   $(if $(OEM_OTA_CONFIG), -o $(OEM_OTA_CONFIG)) \
-   $(2) \
-   $(BUILT_TARGET_FILES_PACKAGE) $(1)
+PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
+    build/make/tools/releasetools/ota_from_target_files \
+    --verbose \
+    --extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
+    --path $(HOST_OUT) \
+    $(if $(OEM_OTA_CONFIG), --oem_settings $(OEM_OTA_CONFIG)) \
+    $(2) \
+    $(BUILT_TARGET_FILES_PACKAGE) $(1)
 endef
 
 name := $(TARGET_PRODUCT)
diff --git a/core/aapt2.mk b/core/aapt2.mk
index fbbf3dd..7b17df4 100644
--- a/core/aapt2.mk
+++ b/core/aapt2.mk
@@ -61,8 +61,8 @@
 
 # Always set --pseudo-localize, it will be stripped out later for release
 # builds that don't want it.
-$(my_res_resources_flat) $(my_overlay_resources_flat) $(my_resources_flata): \
-  PRIVATE_AAPT2_CFLAGS := --pseudo-localize
+$(my_res_resources_flat) $(my_overlay_resources_flat) $(my_resources_flata) $(my_generated_resources_flata) $(my_zippped_resources_flata): \
+  PRIVATE_AAPT2_CFLAGS := --pseudo-localize $(filter --legacy,$(LOCAL_AAPT_FLAGS))
 
 # TODO(b/78447299): Forbid LOCAL_STATIC_JAVA_AAR_LIBRARIES in aapt2 and remove
 # support for it.
diff --git a/core/config.mk b/core/config.mk
index 92efd34..4ce664a 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -577,16 +577,12 @@
 endif
 APICHECK := $(HOST_OUT_JAVA_LIBRARIES)/metalava$(COMMON_JAVA_PACKAGE_SUFFIX)
 FS_GET_STATS := $(HOST_OUT_EXECUTABLES)/fs_get_stats$(HOST_EXECUTABLE_SUFFIX)
-MAKE_EXT4FS := $(HOST_OUT_EXECUTABLES)/mke2fs$(HOST_EXECUTABLE_SUFFIX)
 MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs
 MKE2FS_CONF := system/extras/ext4_utils/mke2fs.conf
 BLK_ALLOC_TO_BASE_FS := $(HOST_OUT_EXECUTABLES)/blk_alloc_to_base_fs$(HOST_EXECUTABLE_SUFFIX)
-MAKE_SQUASHFS := $(HOST_OUT_EXECUTABLES)/mksquashfs$(HOST_EXECUTABLE_SUFFIX)
 MKSQUASHFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mksquashfsimage.sh
-MAKE_F2FS := $(HOST_OUT_EXECUTABLES)/make_f2fs$(HOST_EXECUTABLE_SUFFIX)
 MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh
 SIMG2IMG := $(HOST_OUT_EXECUTABLES)/simg2img$(HOST_EXECUTABLE_SUFFIX)
-IMG2SIMG := $(HOST_OUT_EXECUTABLES)/img2simg$(HOST_EXECUTABLE_SUFFIX)
 E2FSCK := $(HOST_OUT_EXECUTABLES)/e2fsck$(HOST_EXECUTABLE_SUFFIX)
 MKTARBALL := build/make/tools/mktarball.sh
 TUNE2FS := $(HOST_OUT_EXECUTABLES)/tune2fs$(HOST_EXECUTABLE_SUFFIX)
diff --git a/core/definitions.mk b/core/definitions.mk
index 4017c47..c5e8456 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -1883,20 +1883,20 @@
 ###########################################################
 define aapt2-compile-one-resource-file
 @mkdir -p $(dir $@)
-$(hide) $(AAPT2) compile -o $(dir $@) $(PRIVATE_AAPT2_CFLAGS) --legacy $<
+$(hide) $(AAPT2) compile -o $(dir $@) $(PRIVATE_AAPT2_CFLAGS) $<
 endef
 
 define aapt2-compile-resource-dirs
 @mkdir -p $(dir $@)
 $(hide) $(AAPT2) compile -o $@ $(addprefix --dir ,$(PRIVATE_SOURCE_RES_DIRS)) \
-  $(PRIVATE_AAPT2_CFLAGS) --legacy
+  $(PRIVATE_AAPT2_CFLAGS)
 endef
 
 # TODO(b/74574557): use aapt2 compile --zip if it gets implemented
 define aapt2-compile-resource-zips
 @mkdir -p $(dir $@)
 $(ZIPSYNC) -d $@.contents -l $@.list $(PRIVATE_SOURCE_RES_ZIPS)
-$(hide) $(AAPT2) compile -o $@ --dir $@.contents $(PRIVATE_AAPT2_CFLAGS) --legacy
+$(hide) $(AAPT2) compile -o $@ --dir $@.contents $(PRIVATE_AAPT2_CFLAGS)
 endef
 
 # Set up rule to compile one resource file with aapt2.
@@ -2450,7 +2450,7 @@
 # Host init verifier doesn't exist on darwin.
 ifneq ($(HOST_OS),darwin)
 $(2): $(1) $(HOST_INIT_VERIFIER) $(call intermediates-dir-for,ETC,passwd)/passwd
-	$(hide) $(HOST_INIT_VERIFIER) $$< $(call intermediates-dir-for,ETC,passwd)/passwd
+	$(hide) $(HOST_INIT_VERIFIER) -p $(call intermediates-dir-for,ETC,passwd)/passwd $$<
 else
 $(2): $(1)
 endif
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index 5693147..570dbd8 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -138,9 +138,7 @@
   $(call end_json_map)
 
   $(call add_json_str,  DirtyImageObjects,                  $(DIRTY_IMAGE_OBJECTS))
-  $(call add_json_str,  PreloadedClasses,                   $(PRELOADED_CLASSES))
   $(call add_json_list, BootImageProfiles,                  $(PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION))
-  $(call add_json_bool, UseProfileForBootImage,             $(call invert_bool,$(filter false,$(PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE))))
   $(call add_json_str,  BootFlags,                          $(PRODUCT_DEX_PREOPT_BOOT_FLAGS))
   $(call add_json_str,  Dex2oatImageXmx,                    $(DEX2OAT_IMAGE_XMX))
   $(call add_json_str,  Dex2oatImageXms,                    $(DEX2OAT_IMAGE_XMS))
diff --git a/core/main.mk b/core/main.mk
index 5cb1d34..73aa649 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -832,6 +832,9 @@
 $(call update-host-shared-libs-deps-for-suites)
 ifdef HOST_CROSS_OS
 $(call resolve-shared-libs-depes,HOST_CROSS_,,true)
+ifdef HOST_CROSS_2ND_ARCH
+$(call resolve-shared-libs-depes,HOST_CROSS_,true,true)
+endif
 endif
 
 # Pass the shared libraries dependencies to prebuilt ELF file check.
diff --git a/core/package_internal.mk b/core/package_internal.mk
index d693c58..c414295 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -328,7 +328,7 @@
 built_apk_splits := $(foreach s,$(my_split_suffixes),$(intermediates)/package_$(s).apk)
 endif
 
-$(R_file_stamp) $(my_res_package): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS)
+$(R_file_stamp) $(my_res_package): PRIVATE_AAPT_FLAGS := $(filter-out --legacy,$(LOCAL_AAPT_FLAGS))
 $(R_file_stamp) $(my_res_package): PRIVATE_TARGET_AAPT_CHARACTERISTICS := $(TARGET_AAPT_CHARACTERISTICS)
 $(R_file_stamp) $(my_res_package): PRIVATE_MANIFEST_PACKAGE_NAME := $(LOCAL_MANIFEST_PACKAGE_NAME)
 $(R_file_stamp) $(my_res_package): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOCAL_MANIFEST_INSTRUMENTATION_FOR)
@@ -346,7 +346,6 @@
 my_full_asset_paths := $(all_assets)
 
 # Add AAPT2 link specific flags.
-$(my_res_package): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS)
 ifndef LOCAL_AAPT_NAMESPACES
   $(my_res_package): PRIVATE_AAPT_FLAGS += --no-static-lib-packages
 endif
@@ -416,14 +415,6 @@
 
 $(my_res_package) : $(all_library_res_package_export_deps)
 
-# These four are set above for $(R_stamp_file) and $(my_res_package), but
-# $(LOCAL_BUILT_MODULE) is not set before java.mk, so they have to be set again
-# here.
-$(LOCAL_BUILT_MODULE): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_AAPT_CHARACTERISTICS := $(TARGET_AAPT_CHARACTERISTICS)
-$(LOCAL_BUILT_MODULE): PRIVATE_MANIFEST_PACKAGE_NAME := $(LOCAL_MANIFEST_PACKAGE_NAME)
-$(LOCAL_BUILT_MODULE): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOCAL_MANIFEST_INSTRUMENTATION_FOR)
-
 ifneq ($(full_classes_jar),)
 $(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
 # Use the jarjar processed arhive as the initial package file.
diff --git a/core/product.mk b/core/product.mk
index 838673c..9ec3257 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -541,10 +541,15 @@
 endef
 
 #
-# Strip the variables in _product_strip_var_list
+# Strip the variables in _product_var_list and a few build-system
+# internal variables, and assign the ones for the current product
+# to a shorthand that is more convenient to read from elsewhere.
 #
 define strip-product-vars
-$(foreach v,$(_product_var_list), \
+$(foreach v,\
+  $(_product_var_list) \
+    PRODUCT_ENFORCE_PACKAGES_EXIST \
+    PRODUCT_ENFORCE_PACKAGES_EXIST_WHITELIST, \
   $(eval $(v) := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).$(v)))) \
 )
 endef
diff --git a/core/tasks/oem_image.mk b/core/tasks/oem_image.mk
index e9c506a..489feeb 100644
--- a/core/tasks/oem_image.mk
+++ b/core/tasks/oem_image.mk
@@ -34,7 +34,7 @@
 	@mkdir -p $(TARGET_OUT_OEM)
 	@mkdir -p $(oemimage_intermediates) && rm -rf $(oemimage_intermediates)/oem_image_info.txt
 	$(call generate-image-prop-dictionary, $(oemimage_intermediates)/oem_image_info.txt,oem,skip_fsck=true)
-	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+	$(hide) PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
 	  build/make/tools/releasetools/build_image.py \
 	  $(TARGET_OUT_OEM) $(oemimage_intermediates)/oem_image_info.txt $@ $(TARGET_OUT)
 	$(hide) $(call assert-max-image-size,$@,$(BOARD_OEMIMAGE_PARTITION_SIZE))
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index a58019e..b0d1a0c 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -151,7 +151,7 @@
 	# Generate the image.
 	$(if $(filter oem,$(PRIVATE_MOUNT_POINT)), \
 	  $(hide) echo "oem.buildnumber=$(BUILD_NUMBER_FROM_FILE)" >> $(PRIVATE_STAGING_DIR)/oem.prop)
-	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+	$(hide) PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
 	  build/make/tools/releasetools/build_image.py \
 	  $(PRIVATE_STAGING_DIR) $(PRIVATE_INTERMEDIATES)/image_info.txt $@ $(TARGET_OUT)
 
diff --git a/envsetup.sh b/envsetup.sh
index 2fa5660..941c5f7 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -768,218 +768,6 @@
     fi
 }
 
-function m()
-{
-    local T=$(gettop)
-    if [ "$T" ]; then
-        _wrap_build $T/build/soong/soong_ui.bash --make-mode $@
-    else
-        echo "Couldn't locate the top of the tree.  Try setting TOP."
-        return 1
-    fi
-}
-
-function findmakefile()
-{
-    local TOPFILE=build/make/core/envsetup.mk
-    local HERE=$PWD
-    if [ "$1" ]; then
-        \cd $1
-    fi;
-    local T=
-    while [ \( ! \( -f $TOPFILE \) \) -a \( $PWD != "/" \) ]; do
-        T=`PWD= /bin/pwd`
-        if [ -f "$T/Android.mk" -o -f "$T/Android.bp" ]; then
-            echo $T/Android.mk
-            \cd $HERE
-            return
-        fi
-        \cd ..
-    done
-    \cd $HERE
-    return 1
-}
-
-function mm()
-{
-    local T=$(gettop)
-    # If we're sitting in the root of the build tree, just do a
-    # normal build.
-    if [ -f build/soong/soong_ui.bash ]; then
-        _wrap_build $T/build/soong/soong_ui.bash --make-mode $@
-    else
-        # Find the closest Android.mk file.
-        local M=$(findmakefile)
-        local MODULES=
-        local GET_INSTALL_PATH=
-        local ARGS=
-        # Remove the path to top as the makefilepath needs to be relative
-        local M=`echo $M|sed 's:'$T'/::'`
-        if [ ! "$T" ]; then
-            echo "Couldn't locate the top of the tree.  Try setting TOP."
-            return 1
-        elif [ ! "$M" ]; then
-            echo "Couldn't locate a makefile from the current directory."
-            return 1
-        else
-            local ARG
-            for ARG in $@; do
-                case $ARG in
-                  GET-INSTALL-PATH) GET_INSTALL_PATH=$ARG;;
-                esac
-            done
-            if [ -n "$GET_INSTALL_PATH" ]; then
-              MODULES=
-              ARGS=GET-INSTALL-PATH-IN-$(dirname ${M})
-              ARGS=${ARGS//\//-}
-            else
-              MODULES=MODULES-IN-$(dirname ${M})
-              # Convert "/" to "-".
-              MODULES=${MODULES//\//-}
-              ARGS=$@
-            fi
-            if [ "1" = "${WITH_TIDY_ONLY}" -o "true" = "${WITH_TIDY_ONLY}" ]; then
-              MODULES=tidy_only
-            fi
-            ONE_SHOT_MAKEFILE=$M _wrap_build $T/build/soong/soong_ui.bash --make-mode $MODULES $ARGS
-        fi
-    fi
-}
-
-function mmm()
-{
-    local T=$(gettop)
-    if [ "$T" ]; then
-        local MAKEFILE=
-        local MODULES=
-        local MODULES_IN_PATHS=
-        local ARGS=
-        local DIR TO_CHOP
-        local DIR_MODULES
-        local GET_INSTALL_PATH=
-        local GET_INSTALL_PATHS=
-        local DASH_ARGS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^-.*$/')
-        local DIRS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^[^-].*$/')
-        for DIR in $DIRS ; do
-            DIR_MODULES=`echo $DIR | sed -n -e 's/.*:\(.*$\)/\1/p' | sed 's/,/ /'`
-            DIR=`echo $DIR | sed -e 's/:.*//' -e 's:/$::'`
-            # Remove the leading ./ and trailing / if any exists.
-            DIR=${DIR#./}
-            DIR=${DIR%/}
-            local M
-            if [ "$DIR_MODULES" = "" ]; then
-                M=$(findmakefile $DIR)
-            else
-                # Only check the target directory if a module is specified.
-                if [ -f $DIR/Android.mk -o -f $DIR/Android.bp ]; then
-                    local HERE=$PWD
-                    cd $DIR
-                    M=`PWD= /bin/pwd`
-                    M=$M/Android.mk
-                    cd $HERE
-                fi
-            fi
-            if [ "$M" ]; then
-                # Remove the path to top as the makefilepath needs to be relative
-                local M=`echo $M|sed 's:'$T'/::'`
-                if [ "$DIR_MODULES" = "" ]; then
-                    MODULES_IN_PATHS="$MODULES_IN_PATHS MODULES-IN-$(dirname ${M})"
-                    GET_INSTALL_PATHS="$GET_INSTALL_PATHS GET-INSTALL-PATH-IN-$(dirname ${M})"
-                else
-                    MODULES="$MODULES $DIR_MODULES"
-                fi
-                MAKEFILE="$MAKEFILE $M"
-            else
-                case $DIR in
-                  showcommands | snod | dist | *=*) ARGS="$ARGS $DIR";;
-                  GET-INSTALL-PATH) GET_INSTALL_PATH=$DIR;;
-                  *) if [ -d $DIR ]; then
-                         echo "No Android.mk in $DIR.";
-                     else
-                         echo "Couldn't locate the directory $DIR";
-                     fi
-                     return 1;;
-                esac
-            fi
-        done
-        if [ -n "$GET_INSTALL_PATH" ]; then
-          ARGS=${GET_INSTALL_PATHS//\//-}
-          MODULES=
-          MODULES_IN_PATHS=
-        fi
-        if [ "1" = "${WITH_TIDY_ONLY}" -o "true" = "${WITH_TIDY_ONLY}" ]; then
-          MODULES=tidy_only
-          MODULES_IN_PATHS=
-        fi
-        # Convert "/" to "-".
-        MODULES_IN_PATHS=${MODULES_IN_PATHS//\//-}
-        ONE_SHOT_MAKEFILE="$MAKEFILE" _wrap_build $T/build/soong/soong_ui.bash --make-mode $DASH_ARGS $MODULES $MODULES_IN_PATHS $ARGS
-    else
-        echo "Couldn't locate the top of the tree.  Try setting TOP."
-        return 1
-    fi
-}
-
-function mma()
-{
-  local T=$(gettop)
-  if [ -f build/soong/soong_ui.bash ]; then
-    _wrap_build $T/build/soong/soong_ui.bash --make-mode $@
-  else
-    if [ ! "$T" ]; then
-      echo "Couldn't locate the top of the tree.  Try setting TOP."
-      return 1
-    fi
-    local M=$(findmakefile || echo $(realpath $PWD)/Android.mk)
-    # Remove the path to top as the makefilepath needs to be relative
-    local M=`echo $M|sed 's:'$T'/::'`
-    local MODULES_IN_PATHS=MODULES-IN-$(dirname ${M})
-    # Convert "/" to "-".
-    MODULES_IN_PATHS=${MODULES_IN_PATHS//\//-}
-    _wrap_build $T/build/soong/soong_ui.bash --make-mode $@ $MODULES_IN_PATHS
-  fi
-}
-
-function mmma()
-{
-  local T=$(gettop)
-  if [ "$T" ]; then
-    local DASH_ARGS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^-.*$/')
-    local DIRS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^[^-].*$/')
-    local MY_PWD=`PWD= /bin/pwd`
-    if [ "$MY_PWD" = "$T" ]; then
-      MY_PWD=
-    else
-      MY_PWD=`echo $MY_PWD|sed 's:'$T'/::'`
-    fi
-    local DIR=
-    local MODULES_IN_PATHS=
-    local ARGS=
-    for DIR in $DIRS ; do
-      if [ -d $DIR ]; then
-        # Remove the leading ./ and trailing / if any exists.
-        DIR=${DIR#./}
-        DIR=${DIR%/}
-        if [ "$MY_PWD" != "" ]; then
-          DIR=$MY_PWD/$DIR
-        fi
-        MODULES_IN_PATHS="$MODULES_IN_PATHS MODULES-IN-$DIR"
-      else
-        case $DIR in
-          showcommands | snod | dist | *=*) ARGS="$ARGS $DIR";;
-          *) echo "Couldn't find directory $DIR"; return 1;;
-        esac
-      fi
-    done
-    # Convert "/" to "-".
-    MODULES_IN_PATHS=${MODULES_IN_PATHS//\//-}
-    _wrap_build $T/build/soong/soong_ui.bash --make-mode $DASH_ARGS $ARGS $MODULES_IN_PATHS
-  else
-    echo "Couldn't locate the top of the tree.  Try setting TOP."
-    return 1
-  fi
-}
-
 function croot()
 {
     local T=$(gettop)
@@ -1665,6 +1453,41 @@
     return $ret
 }
 
+function _trigger_build()
+(
+    local -r bc="$1"; shift
+    if T="$(gettop)"; then
+      _wrap_build "$T/build/soong/soong_ui.bash" --build-mode --${bc} --dir="$(pwd)" "$@"
+    else
+      echo "Couldn't locate the top of the tree. Try setting TOP."
+    fi
+)
+
+function m()
+(
+    _trigger_build "all-modules" "$@"
+)
+
+function mm()
+(
+    _trigger_build "modules-in-a-dir-no-deps" "$@"
+)
+
+function mmm()
+(
+    _trigger_build "modules-in-dirs-no-deps" "$@"
+)
+
+function mma()
+(
+    _trigger_build "modules-in-a-dir" "$@"
+)
+
+function mmma()
+(
+    _trigger_build "modules-in-dirs" "$@"
+)
+
 function make()
 {
     _wrap_build $(get_make_command "$@") "$@"
diff --git a/target/product/generic.mk b/target/product/generic.mk
index 6fe4818..68130e3 100644
--- a/target/product/generic.mk
+++ b/target/product/generic.mk
@@ -25,4 +25,5 @@
 PRODUCT_DEVICE := generic
 PRODUCT_NAME := generic
 
-$(call enforce-product-packages-exist,)
+whitelist := product_manifest.xml
+$(call enforce-product-packages-exist,$(whitelist))
diff --git a/target/product/mainline_arm64.mk b/target/product/mainline_arm64.mk
index 6050924..c098c9f 100644
--- a/target/product/mainline_arm64.mk
+++ b/target/product/mainline_arm64.mk
@@ -16,7 +16,8 @@
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/mainline.mk)
-$(call enforce-product-packages-exist,)
+whitelist := product_manifest.xml
+$(call enforce-product-packages-exist,$(whitelist))
 
 PRODUCT_NAME := mainline_arm64
 PRODUCT_DEVICE := mainline_arm64
diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk
index 87393d4..f836bd3 100644
--- a/target/product/mainline_system.mk
+++ b/target/product/mainline_system.mk
@@ -99,8 +99,6 @@
 
 # Enable stats logging in LMKD
 TARGET_LMKD_STATS_LOG := true
-PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
-    ro.lmk.log_stats=true
 
 # Enable dynamic partition size
 PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
diff --git a/tools/mktarball.sh b/tools/mktarball.sh
index ef0fe86..ced7e17 100755
--- a/tools/mktarball.sh
+++ b/tools/mktarball.sh
@@ -37,7 +37,7 @@
 #    echo "$f: dir: $is_dir curr: $curr_perms uid: $new_uid gid: $new_gid "\
 #         "perms: $new_perms"
     tar --no-recursion --numeric-owner --owner $new_uid \
-        --group $new_gid --mode $new_perms -p -rf ${target_tar} ${f}
+        --group $new_gid --mode $new_perms -rf ${target_tar} ${f}
 done
 
 if [ $? -eq 0 ] ; then
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index b5ae009..8cf3fab 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -44,6 +44,7 @@
         "ota_from_target_files.py",
         "ota_package_parser.py",
         "rangelib.py",
+        "sign_apex.py",
         "sign_target_files_apks.py",
         "sparse_img.py",
         "target_files_diff.py",
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index fb4ca76..0751125 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -59,9 +59,9 @@
   try:
     common.RunAndCheckOutput(cmd)
   except common.ExternalError as e:
-    raise ApexSigningError, \
+    raise ApexSigningError(
         'Failed to sign APEX payload {} with {}:\n{}'.format(
-            payload_file, payload_key_path, e), sys.exc_info()[2]
+            payload_file, payload_key_path, e))
 
   # Verify the signed payload image with specified public key.
   logger.info('Verifying %s', payload_file)
@@ -75,9 +75,9 @@
   try:
     common.RunAndCheckOutput(cmd)
   except common.ExternalError as e:
-    raise ApexSigningError, \
+    raise ApexSigningError(
         'Failed to validate payload signing for {} with {}:\n{}'.format(
-            payload_file, payload_key, e), sys.exc_info()[2]
+            payload_file, payload_key, e))
 
 
 def ParseApexPayloadInfo(payload_path):
@@ -100,9 +100,9 @@
   try:
     output = common.RunAndCheckOutput(cmd)
   except common.ExternalError as e:
-    raise ApexInfoError, \
+    raise ApexInfoError(
         'Failed to get APEX payload info for {}:\n{}'.format(
-            payload_path, e), sys.exc_info()[2]
+            payload_path, e))
 
   # Extract the Algorithm / Salt / Prop info from payload (i.e. an image signed
   # with avbtool). For example,
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index 7d3424b..0d990f1 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -38,8 +38,7 @@
   """Check if the cert uses SHA-256 hashing algorithm."""
 
   cmd = ['openssl', 'x509', '-text', '-noout', '-in', cert]
-  p1 = common.Run(cmd, stdout=subprocess.PIPE)
-  cert_dump, _ = p1.communicate()
+  cert_dump = common.RunAndCheckOutput(cmd, stdout=subprocess.PIPE)
 
   algorithm = re.search(r'Signature Algorithm: ([a-zA-Z0-9]+)', cert_dump)
   assert algorithm, "Failed to identify the signature algorithm."
@@ -69,13 +68,13 @@
   print('Certificate: %s' % (cert,))
 
   # Read in the package.
-  with open(package) as package_file:
+  with open(package, 'rb') as package_file:
     package_bytes = package_file.read()
 
   length = len(package_bytes)
   assert length >= 6, "Not big enough to contain footer."
 
-  footer = [ord(x) for x in package_bytes[-6:]]
+  footer = bytearray(package_bytes[-6:])
   assert footer[2] == 0xff and footer[3] == 0xff, "Footer is wrong."
 
   signature_start_from_end = (footer[1] << 8) + footer[0]
@@ -111,31 +110,25 @@
 
   # Parse the signature and get the hash.
   cmd = ['openssl', 'asn1parse', '-inform', 'DER', '-in', sig_file]
-  p1 = common.Run(cmd, stdout=subprocess.PIPE)
-  sig, _ = p1.communicate()
-  assert p1.returncode == 0, "Failed to parse the signature."
+  sig = common.RunAndCheckOutput(cmd, stdout=subprocess.PIPE)
 
-  digest_line = sig.strip().split('\n')[-1]
+  digest_line = sig.rstrip().split('\n')[-1]
   digest_string = digest_line.split(':')[3]
   digest_file = common.MakeTempFile(prefix='digest-')
   with open(digest_file, 'wb') as f:
-    f.write(digest_string.decode('hex'))
+    f.write(bytearray.fromhex(digest_string))
 
   # Verify the digest by outputing the decrypted result in ASN.1 structure.
   decrypted_file = common.MakeTempFile(prefix='decrypted-')
   cmd = ['openssl', 'rsautl', '-verify', '-certin', '-inkey', cert,
          '-in', digest_file, '-out', decrypted_file]
-  p1 = common.Run(cmd, stdout=subprocess.PIPE)
-  p1.communicate()
-  assert p1.returncode == 0, "Failed to run openssl rsautl -verify."
+  common.RunAndCheckOutput(cmd, stdout=subprocess.PIPE)
 
   # Parse the output ASN.1 structure.
   cmd = ['openssl', 'asn1parse', '-inform', 'DER', '-in', decrypted_file]
-  p1 = common.Run(cmd, stdout=subprocess.PIPE)
-  decrypted_output, _ = p1.communicate()
-  assert p1.returncode == 0, "Failed to parse the output."
+  decrypted_output = common.RunAndCheckOutput(cmd, stdout=subprocess.PIPE)
 
-  digest_line = decrypted_output.strip().split('\n')[-1]
+  digest_line = decrypted_output.rstrip().split('\n')[-1]
   digest_string = digest_line.split(':')[3].lower()
 
   # Verify that the two digest strings match.
@@ -156,7 +149,7 @@
 
   # Dump pubkey from the certificate.
   pubkey = common.MakeTempFile(prefix="key-", suffix=".pem")
-  with open(pubkey, 'wb') as pubkey_fp:
+  with open(pubkey, 'w') as pubkey_fp:
     pubkey_fp.write(common.ExtractPublicKey(cert))
 
   package_dir = common.MakeTempDir(prefix='package-')
@@ -166,11 +159,7 @@
   cmd = ['delta_generator',
          '--in_file=' + payload_file,
          '--public_key=' + pubkey]
-  proc = common.Run(cmd)
-  stdoutdata, _ = proc.communicate()
-  assert proc.returncode == 0, \
-      'Failed to verify payload with delta_generator: {}\n{}'.format(
-          package, stdoutdata)
+  common.RunAndCheckOutput(cmd)
   common.ZipClose(package_zip)
 
   # Verified successfully upon reaching here.
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index 4b0d4c7..60200a3 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -39,8 +39,11 @@
 
 """
 
+from __future__ import print_function
+
 import logging
 import os
+import os.path
 import re
 import subprocess
 import sys
@@ -49,7 +52,7 @@
 import common
 
 if sys.hexversion < 0x02070000:
-  print >> sys.stderr, "Python 2.7 or newer is required."
+  print("Python 2.7 or newer is required.", file=sys.stderr)
   sys.exit(1)
 
 
@@ -65,8 +68,10 @@
 class MyZipInfo(zipfile.ZipInfo):
   def _decodeExtra(self):
     pass
+
 zipfile.ZipInfo = MyZipInfo
 
+
 OPTIONS = common.OPTIONS
 
 OPTIONS.text = False
@@ -76,28 +81,34 @@
 PROBLEMS = []
 PROBLEM_PREFIX = []
 
+
 def AddProblem(msg):
   PROBLEMS.append(" ".join(PROBLEM_PREFIX) + " " + msg)
+
+
 def Push(msg):
   PROBLEM_PREFIX.append(msg)
+
+
 def Pop():
   PROBLEM_PREFIX.pop()
 
 
 def Banner(msg):
-  print "-" * 70
-  print "  ", msg
-  print "-" * 70
+  print("-" * 70)
+  print("  ", msg)
+  print("-" * 70)
 
 
 def GetCertSubject(cert):
   p = common.Run(["openssl", "x509", "-inform", "DER", "-text"],
                  stdin=subprocess.PIPE,
-                 stdout=subprocess.PIPE)
+                 stdout=subprocess.PIPE,
+                 universal_newlines=False)
   out, err = p.communicate(cert)
   if err and not err.strip():
     return "(error reading cert subject)"
-  for line in out.split("\n"):
+  for line in out.decode().split("\n"):
     line = line.strip()
     if line.startswith("Subject:"):
       return line[8:].strip()
@@ -105,6 +116,7 @@
 
 
 class CertDB(object):
+
   def __init__(self):
     self.certs = {}
 
@@ -132,13 +144,13 @@
           to_load.extend(certs)
 
     for i in to_load:
-      f = open(i)
-      cert = common.ParseCertificate(f.read())
-      f.close()
+      with open(i) as f:
+        cert = common.ParseCertificate(f.read())
       name, _ = os.path.splitext(i)
       name, _ = os.path.splitext(name)
       self.Add(cert, name)
 
+
 ALL_CERTS = CertDB()
 
 
@@ -152,13 +164,14 @@
                     "-outform", "PEM",
                     "-print_certs"],
                    stdin=subprocess.PIPE,
-                   stdout=subprocess.PIPE)
+                   stdout=subprocess.PIPE,
+                   universal_newlines=False)
     out, err = p.communicate(data)
     if err and not err.strip():
-      AddProblem("error reading cert:\n" + err)
+      AddProblem("error reading cert:\n" + err.decode())
       return None
 
-    cert = common.ParseCertificate(out)
+    cert = common.ParseCertificate(out.decode())
     if not cert:
       AddProblem("error parsing cert output")
       return None
@@ -184,22 +197,20 @@
 
   def RecordCerts(self, full_filename):
     out = set()
-    try:
-      f = open(full_filename)
-      apk = zipfile.ZipFile(f, "r")
+    with zipfile.ZipFile(full_filename) as apk:
       pkcs7 = None
       for info in apk.infolist():
-        if info.filename.startswith("META-INF/") and \
-           (info.filename.endswith(".DSA") or info.filename.endswith(".RSA")):
-          pkcs7 = apk.read(info.filename)
-          cert = CertFromPKCS7(pkcs7, info.filename)
+        filename = info.filename
+        if (filename.startswith("META-INF/") and
+            info.filename.endswith((".DSA", ".RSA"))):
+          pkcs7 = apk.read(filename)
+          cert = CertFromPKCS7(pkcs7, filename)
           out.add(cert)
           ALL_CERTS.Add(cert)
       if not pkcs7:
         AddProblem("no signature")
-    finally:
-      f.close()
-      self.certs = frozenset(out)
+
+    self.certs = frozenset(out)
 
   def ReadManifest(self, full_filename):
     p = common.Run(["aapt", "dump", "xmltree", full_filename,
@@ -247,8 +258,8 @@
     # This is the list of wildcards of files we extract from |filename|.
     apk_extensions = ['*.apk', '*.apex']
 
-    self.certmap, compressed_extension = common.ReadApkCerts(
-        zipfile.ZipFile(filename))
+    with zipfile.ZipFile(filename) as input_zip:
+      self.certmap, compressed_extension = common.ReadApkCerts(input_zip)
     if compressed_extension:
       apk_extensions.append('*.apk' + compressed_extension)
 
@@ -287,7 +298,7 @@
     """Look for any instances where packages signed with different
     certs request the same sharedUserId."""
     apks_by_uid = {}
-    for apk in self.apks.itervalues():
+    for apk in self.apks.values():
       if apk.shared_uid:
         apks_by_uid.setdefault(apk.shared_uid, []).append(apk)
 
@@ -302,15 +313,15 @@
 
       AddProblem("different cert sets for packages with uid %s" % (uid,))
 
-      print "uid %s is shared by packages with different cert sets:" % (uid,)
+      print("uid %s is shared by packages with different cert sets:" % (uid,))
       for apk in apks:
-        print "%-*s  [%s]" % (self.max_pkg_len, apk.package, apk.filename)
+        print("%-*s  [%s]" % (self.max_pkg_len, apk.package, apk.filename))
         for cert in apk.certs:
-          print "   ", ALL_CERTS.Get(cert)
-      print
+          print("   ", ALL_CERTS.Get(cert))
+      print()
 
   def CheckExternalSignatures(self):
-    for apk_filename, certname in self.certmap.iteritems():
+    for apk_filename, certname in self.certmap.items():
       if certname == "EXTERNAL":
         # Apps marked EXTERNAL should be signed with the test key
         # during development, then manually re-signed after
@@ -326,25 +337,25 @@
   def PrintCerts(self):
     """Display a table of packages grouped by cert."""
     by_cert = {}
-    for apk in self.apks.itervalues():
+    for apk in self.apks.values():
       for cert in apk.certs:
         by_cert.setdefault(cert, []).append((apk.package, apk))
 
-    order = [(-len(v), k) for (k, v) in by_cert.iteritems()]
+    order = [(-len(v), k) for (k, v) in by_cert.items()]
     order.sort()
 
     for _, cert in order:
-      print "%s:" % (ALL_CERTS.Get(cert),)
+      print("%s:" % (ALL_CERTS.Get(cert),))
       apks = by_cert[cert]
       apks.sort()
       for _, apk in apks:
         if apk.shared_uid:
-          print "  %-*s  %-*s  [%s]" % (self.max_fn_len, apk.filename,
+          print("  %-*s  %-*s  [%s]" % (self.max_fn_len, apk.filename,
                                         self.max_pkg_len, apk.package,
-                                        apk.shared_uid)
+                                        apk.shared_uid))
         else:
-          print "  %-*s  %s" % (self.max_fn_len, apk.filename, apk.package)
-      print
+          print("  %-*s  %s" % (self.max_fn_len, apk.filename, apk.package))
+      print()
 
   def CompareWith(self, other):
     """Look for instances where a given package that exists in both
@@ -365,12 +376,12 @@
             by_certpair.setdefault((other.apks[i].certs,
                                     self.apks[i].certs), []).append(i)
         else:
-          print "%s [%s]: new APK (not in comparison target_files)" % (
-              i, self.apks[i].filename)
+          print("%s [%s]: new APK (not in comparison target_files)" % (
+              i, self.apks[i].filename))
       else:
         if i in other.apks:
-          print "%s [%s]: removed APK (only in comparison target_files)" % (
-              i, other.apks[i].filename)
+          print("%s [%s]: removed APK (only in comparison target_files)" % (
+              i, other.apks[i].filename))
 
     if by_certpair:
       AddProblem("some APKs changed certs")
@@ -378,23 +389,23 @@
       for (old, new), packages in sorted(by_certpair.items()):
         for i, o in enumerate(old):
           if i == 0:
-            print "was", ALL_CERTS.Get(o)
+            print("was", ALL_CERTS.Get(o))
           else:
-            print "   ", ALL_CERTS.Get(o)
+            print("   ", ALL_CERTS.Get(o))
         for i, n in enumerate(new):
           if i == 0:
-            print "now", ALL_CERTS.Get(n)
+            print("now", ALL_CERTS.Get(n))
           else:
-            print "   ", ALL_CERTS.Get(n)
+            print("   ", ALL_CERTS.Get(n))
         for i in sorted(packages):
           old_fn = other.apks[i].filename
           new_fn = self.apks[i].filename
           if old_fn == new_fn:
-            print "  %-*s  [%s]" % (max_pkg_len, i, old_fn)
+            print("  %-*s  [%s]" % (max_pkg_len, i, old_fn))
           else:
-            print "  %-*s  [was: %s; now: %s]" % (max_pkg_len, i,
-                                                  old_fn, new_fn)
-        print
+            print("  %-*s  [was: %s; now: %s]" % (max_pkg_len, i,
+                                                  old_fn, new_fn))
+        print()
 
 
 def main(argv):
@@ -451,9 +462,9 @@
     target_files.CompareWith(compare_files)
 
   if PROBLEMS:
-    print "%d problem(s) found:\n" % (len(PROBLEMS),)
+    print("%d problem(s) found:\n" % (len(PROBLEMS),))
     for p in PROBLEMS:
-      print p
+      print(p)
     return 1
 
   return 0
@@ -464,9 +475,7 @@
     r = main(sys.argv[1:])
     sys.exit(r)
   except common.ExternalError as e:
-    print
-    print "   ERROR: %s" % (e,)
-    print
+    print("\n   ERROR: %s\n" % (e,))
     sys.exit(1)
   finally:
     common.Cleanup()
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 7cff831..4bebef5 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -14,6 +14,7 @@
 
 from __future__ import print_function
 
+import base64
 import collections
 import copy
 import errno
@@ -30,7 +31,6 @@
 import re
 import shlex
 import shutil
-import string
 import subprocess
 import sys
 import tempfile
@@ -190,6 +190,8 @@
     kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
         stdin, etc. stdout and stderr will default to subprocess.PIPE and
         subprocess.STDOUT respectively unless caller specifies any of them.
+        universal_newlines will default to True, as most of the users in
+        releasetools expect string output.
 
   Returns:
     A subprocess.Popen object.
@@ -197,6 +199,8 @@
   if 'stdout' not in kwargs and 'stderr' not in kwargs:
     kwargs['stdout'] = subprocess.PIPE
     kwargs['stderr'] = subprocess.STDOUT
+  if 'universal_newlines' not in kwargs:
+    kwargs['universal_newlines'] = True
   # Don't log any if caller explicitly says so.
   if verbose != False:
     logger.info("  Running: \"%s\"", " ".join(args))
@@ -314,7 +318,7 @@
 
   def read_helper(fn):
     if isinstance(input_file, zipfile.ZipFile):
-      return input_file.read(fn)
+      return input_file.read(fn).decode()
     else:
       path = os.path.join(input_file, *fn.split("/"))
       try:
@@ -455,6 +459,13 @@
   return LoadDictionaryFromLines(data.split("\n"))
 
 
+def LoadDictionaryFromFile(file_path):
+  with open(file_path) as f:
+    lines = list(f.read().splitlines())
+
+  return LoadDictionaryFromLines(lines)
+
+
 def LoadDictionaryFromLines(lines):
   d = {}
   for line in lines:
@@ -526,7 +537,7 @@
   # system. Other areas assume system is always at "/system" so point /system
   # at /.
   if system_root_image:
-    assert not d.has_key("/system") and d.has_key("/")
+    assert '/system' not in d and '/' in d
     d["/system"] = d["/"]
   return d
 
@@ -953,7 +964,7 @@
     # filename listed in system.map may contain an additional leading slash
     # (i.e. "//system/framework/am.jar"). Using lstrip to get consistent
     # results.
-    arcname = string.replace(entry, which, which.upper(), 1).lstrip('/')
+    arcname = entry.replace(which, which.upper(), 1).lstrip('/')
 
     # Special handling another case, where files not under /system
     # (e.g. "/sbin/charger") are packed under ROOT/ in a target_files.zip.
@@ -1223,7 +1234,7 @@
     if basename:
       installed_files.add(basename)
 
-  for line in tf_zip.read("META/apkcerts.txt").split("\n"):
+  for line in tf_zip.read('META/apkcerts.txt').decode().split('\n'):
     line = line.strip()
     if not line:
       continue
@@ -1433,6 +1444,8 @@
 
       if not first:
         print("key file %s still missing some passwords." % (self.pwfile,))
+        if sys.version_info[0] >= 3:
+          raw_input = input  # pylint: disable=redefined-builtin
         answer = raw_input("try to edit again? [y]> ").strip()
         if answer and answer[0] not in 'yY':
           raise RuntimeError("key passwords unavailable")
@@ -2185,7 +2198,7 @@
   This gives the same result as `openssl x509 -in <filename> -outform DER`.
 
   Returns:
-    The decoded certificate string.
+    The decoded certificate bytes.
   """
   cert_buffer = []
   save = False
@@ -2196,7 +2209,7 @@
       cert_buffer.append(line)
     if "--BEGIN CERTIFICATE--" in line:
       save = True
-  cert = "".join(cert_buffer).decode('base64')
+  cert = base64.b64decode("".join(cert_buffer))
   return cert
 
 
@@ -2338,7 +2351,7 @@
 
   logger.info("putting script in %s", sh_location)
 
-  output_sink(sh_location, sh)
+  output_sink(sh_location, sh.encode())
 
 
 class DynamicPartitionUpdate(object):
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
index f0ae217..e2b0e3d 100755
--- a/tools/releasetools/merge_target_files.py
+++ b/tools/releasetools/merge_target_files.py
@@ -334,9 +334,9 @@
     in_vendor = any(item.startswith(partition) for item in vendor_item_list)
     if in_framework and in_vendor:
       logger.error(
-          'Cannot extract items from {0} for both the framework and vendor builds. '
-          'Please ensure only one merge config item list includes {0}.'.format(
-              partition))
+          'Cannot extract items from {0} for both the framework and vendor'
+          ' builds. Please ensure only one merge config item list'
+          ' includes {0}.'.format(partition))
       has_error = True
 
   if ('dynamic_partition_list' in framework_misc_info_keys) or (
@@ -500,18 +500,14 @@
       instance.
   """
 
-  def read_helper(d):
-    misc_info_txt = os.path.join(d, 'META', 'misc_info.txt')
-    with open(misc_info_txt) as f:
-      return list(f.read().splitlines())
-
-  framework_dict = common.LoadDictionaryFromLines(
-      read_helper(framework_target_files_temp_dir))
+  misc_info_path = ['META', 'misc_info.txt']
+  framework_dict = common.LoadDictionaryFromFile(
+      os.path.join(framework_target_files_temp_dir, *misc_info_path))
 
   # We take most of the misc info from the vendor target files.
 
-  merged_dict = common.LoadDictionaryFromLines(
-      read_helper(vendor_target_files_temp_dir))
+  merged_dict = common.LoadDictionaryFromFile(
+      os.path.join(vendor_target_files_temp_dir, *misc_info_path))
 
   # Replace certain values in merged_dict with values from
   # framework_dict.
@@ -578,16 +574,12 @@
                    'dynamic_partitions_info.txt')):
     return
 
-  def read_helper(d):
-    dynamic_partitions_info_txt = os.path.join(d, 'META',
-                                               'dynamic_partitions_info.txt')
-    with open(dynamic_partitions_info_txt) as f:
-      return list(f.read().splitlines())
+  dynamic_partitions_info_path = ['META', 'dynamic_partitions_info.txt']
 
-  framework_dynamic_partitions_dict = common.LoadDictionaryFromLines(
-      read_helper(framework_target_files_dir))
-  vendor_dynamic_partitions_dict = common.LoadDictionaryFromLines(
-      read_helper(vendor_target_files_dir))
+  framework_dynamic_partitions_dict = common.LoadDictionaryFromFile(
+      os.path.join(framework_target_files_dir, *dynamic_partitions_info_path))
+  vendor_dynamic_partitions_dict = common.LoadDictionaryFromFile(
+      os.path.join(vendor_target_files_dir, *dynamic_partitions_info_path))
 
   merged_dynamic_partitions_dict = merge_dynamic_partition_info_dicts(
       framework_dict=framework_dynamic_partitions_dict,
@@ -854,12 +846,8 @@
   misc_info_txt = os.path.join(output_target_files_temp_dir, 'META',
                                'misc_info.txt')
 
-  def read_helper():
-    with open(misc_info_txt) as f:
-      return list(f.read().splitlines())
-
-  use_dynamic_partitions = common.LoadDictionaryFromLines(
-      read_helper()).get('use_dynamic_partitions')
+  use_dynamic_partitions = common.LoadDictionaryFromFile(misc_info_txt).get(
+      'use_dynamic_partitions')
 
   if use_dynamic_partitions != 'true' and output_super_empty:
     raise ValueError(
@@ -1000,7 +988,8 @@
       OPTIONS.framework_item_list = a
     elif o == '--system-misc-info-keys':
       logger.warning(
-          '--system-misc-info-keys has been renamed to --framework-misc-info-keys'
+          '--system-misc-info-keys has been renamed to '
+          '--framework-misc-info-keys'
       )
       OPTIONS.framework_misc_info_keys = a
     elif o == '--framework-misc-info-keys':
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index f686ca0..3442b27 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -38,8 +38,8 @@
   -k  (--package_key) <key>
       Key to use to sign the package (default is the value of
       default_system_dev_certificate from the input target-files's
-      META/misc_info.txt, or "build/make/target/product/security/testkey" if that
-      value is not specified).
+      META/misc_info.txt, or "build/make/target/product/security/testkey" if
+      that value is not specified).
 
       For incremental OTAs, the default value is based on the source
       target-file, not the target build.
@@ -371,7 +371,7 @@
       return prop_val
 
     source_order_val = self.info_dict.get("build.prop", {}).get(
-      "ro.product.property_source_order")
+        "ro.product.property_source_order")
     if source_order_val:
       source_order = source_order_val.split(",")
     else:
@@ -381,13 +381,13 @@
     if any([x not in BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER
             for x in source_order]):
       raise common.ExternalError(
-        "Invalid ro.product.property_source_order '{}'".format(source_order))
+          "Invalid ro.product.property_source_order '{}'".format(source_order))
 
     for source in source_order:
-      source_prop = prop.replace("ro.product", "ro.product.{}".format(source),
-                                 1)
-      prop_val = self.info_dict.get("{}.build.prop".format(source), {}).get(
-        source_prop)
+      source_prop = prop.replace(
+          "ro.product", "ro.product.{}".format(source), 1)
+      prop_val = self.info_dict.get(
+          "{}.build.prop".format(source), {}).get(source_prop)
       if prop_val:
         return prop_val
 
@@ -412,14 +412,14 @@
         return self.GetBuildProp("ro.build.fingerprint")
       except common.ExternalError:
         return "{}/{}/{}:{}/{}/{}:{}/{}".format(
-          self.GetBuildProp("ro.product.brand"),
-          self.GetBuildProp("ro.product.name"),
-          self.GetBuildProp("ro.product.device"),
-          self.GetBuildProp("ro.build.version.release"),
-          self.GetBuildProp("ro.build.id"),
-          self.GetBuildProp("ro.build.version.incremental"),
-          self.GetBuildProp("ro.build.type"),
-          self.GetBuildProp("ro.build.tags"))
+            self.GetBuildProp("ro.product.brand"),
+            self.GetBuildProp("ro.product.name"),
+            self.GetBuildProp("ro.product.device"),
+            self.GetBuildProp("ro.build.version.release"),
+            self.GetBuildProp("ro.build.id"),
+            self.GetBuildProp("ro.build.version.incremental"),
+            self.GetBuildProp("ro.build.type"),
+            self.GetBuildProp("ro.build.tags"))
     return "%s/%s/%s:%s" % (
         self.GetOemProperty("ro.product.brand"),
         self.GetOemProperty("ro.product.name"),
@@ -508,7 +508,7 @@
     MODULUS_PREFIX = "Modulus="
     assert modulus_string.startswith(MODULUS_PREFIX)
     modulus_string = modulus_string[len(MODULUS_PREFIX):]
-    key_size = len(modulus_string) / 2
+    key_size = len(modulus_string) // 2
     assert key_size == 256 or key_size == 512, \
         "Unsupported key size {}".format(key_size)
     return key_size
@@ -1051,7 +1051,7 @@
     output: A ZipFile object or a string of the output file path.
   """
 
-  value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.iteritems())])
+  value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.items())])
   if isinstance(output, zipfile.ZipFile):
     common.ZipWriteStr(output, METADATA_NAME, value,
                        compress_type=zipfile.ZIP_STORED)
@@ -1067,7 +1067,7 @@
 
   post_timestamp = target_info.GetBuildProp("ro.build.date.utc")
   pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
-  is_downgrade = long(post_timestamp) < long(pre_timestamp)
+  is_downgrade = int(post_timestamp) < int(pre_timestamp)
 
   if OPTIONS.downgrade:
     if not is_downgrade:
@@ -1392,7 +1392,7 @@
     payload_offset += len(payload_info.extra) + len(payload_info.filename)
     payload_size = payload_info.file_size
 
-    with input_zip.open('payload.bin', 'r') as payload_fp:
+    with input_zip.open('payload.bin') as payload_fp:
       header_bin = payload_fp.read(24)
 
     # network byte order (big-endian)
@@ -1864,7 +1864,6 @@
 
   with zipfile.ZipFile(input_file, 'r') as input_zip:
     infolist = input_zip.infolist()
-    namelist = input_zip.namelist()
 
   input_tmp = common.UnzipTemp(input_file, UNZIP_PATTERN)
   for info in infolist:
@@ -1963,8 +1962,8 @@
     for partition in ab_partitions:
       if (partition in dynamic_partition_list and
           partition not in super_block_devices):
-          logger.info("Dropping %s from ab_partitions.txt", partition)
-          continue
+        logger.info("Dropping %s from ab_partitions.txt", partition)
+        continue
       f.write(partition + "\n")
   to_delete = [AB_PARTITIONS]
 
@@ -1976,7 +1975,7 @@
   to_delete += [DYNAMIC_PARTITION_INFO]
 
   # Remove the existing partition images as well as the map files.
-  to_delete += replace.values()
+  to_delete += list(replace.values())
   to_delete += ['IMAGES/{}.map'.format(dev) for dev in super_block_devices]
 
   common.ZipDelete(target_file, to_delete)
@@ -1986,7 +1985,7 @@
   # Write super_{foo}.img as {foo}.img.
   for src, dst in replace.items():
     assert src in namelist, \
-          'Missing {} in {}; {} cannot be written'.format(src, input_file, dst)
+        'Missing {} in {}; {} cannot be written'.format(src, input_file, dst)
     unzipped_file = os.path.join(input_tmp, *src.split('/'))
     common.ZipWrite(target_zip, unzipped_file, arcname=dst)
 
@@ -2291,7 +2290,8 @@
   OPTIONS.cache_size = cache_size
 
   if OPTIONS.extra_script is not None:
-    OPTIONS.extra_script = open(OPTIONS.extra_script).read()
+    with open(OPTIONS.extra_script) as fp:
+      OPTIONS.extra_script = fp.read()
 
   if OPTIONS.extracted_input is not None:
     OPTIONS.input_tmp = OPTIONS.extracted_input
diff --git a/tools/releasetools/sign_apex.py b/tools/releasetools/sign_apex.py
index 1778615..affd6a7 100755
--- a/tools/releasetools/sign_apex.py
+++ b/tools/releasetools/sign_apex.py
@@ -40,6 +40,20 @@
 logger = logging.getLogger(__name__)
 
 
+def SignApexFile(apex_file, payload_key, container_key, signing_args=None):
+  """Signs the given apex file."""
+  with open(apex_file, 'rb') as input_fp:
+    apex_data = input_fp.read()
+
+  return apex_utils.SignApex(
+      apex_data,
+      payload_key=payload_key,
+      container_key=container_key,
+      container_pw=None,
+      codename_to_api_level_map=None,
+      signing_args=signing_args)
+
+
 def main(argv):
 
   options = {}
@@ -76,20 +90,12 @@
 
   common.InitLogging()
 
-  input_zip = args[0]
-  output_zip = args[1]
-  with open(input_zip) as input_fp:
-    apex_data = input_fp.read()
-
-  signed_apex = apex_utils.SignApex(
-      apex_data,
-      payload_key=options['payload_key'],
-      container_key=options['container_key'],
-      container_pw=None,
-      codename_to_api_level_map=None,
-      signing_args=options.get('payload_extra_args'))
-
-  shutil.copyfile(signed_apex, output_zip)
+  signed_apex = SignApexFile(
+      args[0],
+      options['payload_key'],
+      options['container_key'],
+      options.get('payload_extra_args'))
+  shutil.copyfile(signed_apex, args[1])
   logger.info("done.")
 
 
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index c174d2f..668ad9b 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -488,23 +488,33 @@
       continue
 
     # System properties.
-    elif filename in ("SYSTEM/build.prop",
-                      "VENDOR/build.prop",
-                      "SYSTEM/vendor/build.prop",
-                      "ODM/build.prop",  # legacy
-                      "ODM/etc/build.prop",
-                      "VENDOR/odm/build.prop",  # legacy
-                      "VENDOR/odm/etc/build.prop",
-                      "PRODUCT/build.prop",
-                      "SYSTEM/product/build.prop",
-                      "PRODUCT_SERVICES/build.prop",
-                      "SYSTEM/product_services/build.prop",
-                      "SYSTEM/etc/prop.default",
-                      "BOOT/RAMDISK/prop.default",
-                      "BOOT/RAMDISK/default.prop",  # legacy
-                      "ROOT/default.prop",  # legacy
-                      "RECOVERY/RAMDISK/prop.default",
-                      "RECOVERY/RAMDISK/default.prop"):  # legacy
+    elif filename in (
+        "SYSTEM/build.prop",
+
+        "VENDOR/build.prop",
+        "SYSTEM/vendor/build.prop",
+
+        "ODM/etc/build.prop",
+        "VENDOR/odm/etc/build.prop",
+
+        "PRODUCT/build.prop",
+        "SYSTEM/product/build.prop",
+
+        "PRODUCT_SERVICES/build.prop",
+        "SYSTEM/product_services/build.prop",
+
+        "SYSTEM/etc/prop.default",
+        "BOOT/RAMDISK/prop.default",
+        "RECOVERY/RAMDISK/prop.default",
+
+        # ROOT/default.prop is a legacy path, but may still exist for upgrading
+        # devices that don't support `property_overrides_split_enabled`.
+        "ROOT/default.prop",
+
+        # RECOVERY/RAMDISK/default.prop is a legacy path, but will always exist
+        # as a symlink in the current code. So it's a no-op here. Keeping the
+        # path here for clarity.
+        "RECOVERY/RAMDISK/default.prop"):
       print("Rewriting %s:" % (filename,))
       if stat.S_ISLNK(info.external_attr >> 16):
         new_data = data
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 11ac9f5..914e58e 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -579,7 +579,7 @@
   def test_ExtractPublicKey(self):
     cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
     pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
-    with open(pubkey, 'rb') as pubkey_fp:
+    with open(pubkey) as pubkey_fp:
       self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert))
 
   def test_ExtractPublicKey_invalidInput(self):
@@ -590,15 +590,16 @@
   def test_ExtractAvbPublicKey(self):
     privkey = os.path.join(self.testdata_dir, 'testkey.key')
     pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
-    with open(common.ExtractAvbPublicKey(privkey)) as privkey_fp, \
-        open(common.ExtractAvbPublicKey(pubkey)) as pubkey_fp:
+    with open(common.ExtractAvbPublicKey(privkey), 'rb') as privkey_fp, \
+        open(common.ExtractAvbPublicKey(pubkey), 'rb') as pubkey_fp:
       self.assertEqual(privkey_fp.read(), pubkey_fp.read())
 
   def test_ParseCertificate(self):
     cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
 
     cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER']
-    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+                      universal_newlines=False)
     expected, _ = proc.communicate()
     self.assertEqual(0, proc.returncode)
 
@@ -914,7 +915,7 @@
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
     with zipfile.ZipFile(target_files, 'w') as target_files_zip:
       info_values = ''.join(
-          ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.iteritems())])
+          ['{}={}\n'.format(k, v) for k, v in sorted(info_dict.items())])
       common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values)
 
       FSTAB_TEMPLATE = "/dev/block/system {} ext4 ro,barrier=1 defaults"
@@ -1085,7 +1086,7 @@
     loc = os.path.join(self._tempdir, prefix, name)
     if not os.path.exists(os.path.dirname(loc)):
       os.makedirs(os.path.dirname(loc))
-    with open(loc, "w+") as f:
+    with open(loc, "wb") as f:
       f.write(data)
 
   def test_full_recovery(self):
@@ -1110,7 +1111,7 @@
     validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
                                                         self._info)
     # Validate 'recovery-from-boot' with bonus argument.
-    self._out_tmp_sink("etc/recovery-resource.dat", "bonus", "SYSTEM")
+    self._out_tmp_sink("etc/recovery-resource.dat", b"bonus", "SYSTEM")
     common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
                              recovery_image, boot_image, self._info)
     validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
@@ -1118,25 +1119,30 @@
 
 
 class MockScriptWriter(object):
-  """A class that mocks edify_generator.EdifyGenerator.
-  """
+  """A class that mocks edify_generator.EdifyGenerator."""
+
   def __init__(self, enable_comments=False):
     self.lines = []
     self.enable_comments = enable_comments
+
   def Comment(self, comment):
     if self.enable_comments:
-      self.lines.append("# {}".format(comment))
+      self.lines.append('# {}'.format(comment))
+
   def AppendExtra(self, extra):
     self.lines.append(extra)
+
   def __str__(self):
-    return "\n".join(self.lines)
+    return '\n'.join(self.lines)
 
 
 class MockBlockDifference(object):
+
   def __init__(self, partition, tgt, src=None):
     self.partition = partition
     self.tgt = tgt
     self.src = src
+
   def WriteScript(self, script, _, progress=None,
                   write_verify_script=False):
     if progress:
@@ -1144,11 +1150,13 @@
     script.AppendExtra("patch({});".format(self.partition))
     if write_verify_script:
       self.WritePostInstallVerifyScript(script)
+
   def WritePostInstallVerifyScript(self, script):
     script.AppendExtra("verify({});".format(self.partition))
 
 
 class FakeSparseImage(object):
+
   def __init__(self, size):
     self.blocksize = 4096
     self.total_blocks = size // 4096
@@ -1156,12 +1164,13 @@
 
 
 class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase):
+
   @staticmethod
   def get_op_list(output_path):
     with zipfile.ZipFile(output_path) as output_zip:
-      with output_zip.open("dynamic_partitions_op_list") as op_list:
-        return [line.strip() for line in op_list.readlines()
-                if not line.startswith("#")]
+      with output_zip.open('dynamic_partitions_op_list') as op_list:
+        return [line.decode().strip() for line in op_list.readlines()
+                if not line.startswith(b'#')]
 
   def setUp(self):
     self.script = MockScriptWriter()
diff --git a/tools/releasetools/test_sign_apex.py b/tools/releasetools/test_sign_apex.py
new file mode 100644
index 0000000..4dcc214
--- /dev/null
+++ b/tools/releasetools/test_sign_apex.py
@@ -0,0 +1,41 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os.path
+
+import common
+import sign_apex
+import test_utils
+
+
+class SignApexTest(test_utils.ReleaseToolsTestCase):
+
+  def setUp(self):
+    self.testdata_dir = test_utils.get_testdata_dir()
+    self.assertTrue(os.path.exists(self.testdata_dir))
+
+    common.OPTIONS.search_path = test_utils.get_search_path()
+
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_SignApexFile(self):
+    foo_apex = os.path.join(self.testdata_dir, 'foo.apex')
+    payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
+    container_key = os.path.join(self.testdata_dir, 'testkey')
+    signed_foo_apex = sign_apex.SignApexFile(
+        foo_apex,
+        payload_key,
+        container_key)
+    self.assertTrue(os.path.exists(signed_foo_apex))
diff --git a/tools/releasetools/test_verity_utils.py b/tools/releasetools/test_verity_utils.py
index 1cc539f..d02bc7f 100644
--- a/tools/releasetools/test_verity_utils.py
+++ b/tools/releasetools/test_verity_utils.py
@@ -47,25 +47,22 @@
     }
 
     self.hash_algorithm = "sha256"
-    self.fixed_salt = \
-        "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
-    self.expected_root_hash = \
-        "0b7c4565e87b1026e11fbab91c0bc29e185c847a5b44d40e6e86e461e8adf80d"
+    self.fixed_salt = (
+        "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7")
+    self.expected_root_hash = (
+        "0b7c4565e87b1026e11fbab91c0bc29e185c847a5b44d40e6e86e461e8adf80d")
 
-  def _create_simg(self, raw_data):
+  def _CreateSimg(self, raw_data):  # pylint: disable=no-self-use
     output_file = common.MakeTempFile()
     raw_image = common.MakeTempFile()
     with open(raw_image, 'wb') as f:
       f.write(raw_data)
 
     cmd = ["img2simg", raw_image, output_file, '4096']
-    p = common.Run(cmd)
-    p.communicate()
-    self.assertEqual(0, p.returncode)
-
+    common.RunAndCheckOutput(cmd)
     return output_file
 
-  def _generate_image(self):
+  def _GenerateImage(self):
     partition_size = 1024 * 1024
     prop_dict = {
         'partition_size': str(partition_size),
@@ -79,11 +76,11 @@
     self.assertIsNotNone(verity_image_builder)
     adjusted_size = verity_image_builder.CalculateMaxImageSize()
 
-    raw_image = ""
+    raw_image = bytearray(adjusted_size)
     for i in range(adjusted_size):
-      raw_image += str(i % 10)
+      raw_image[i] = ord('0') + i % 10
 
-    output_file = self._create_simg(raw_image)
+    output_file = self._CreateSimg(raw_image)
 
     # Append the verity metadata.
     verity_image_builder.Build(output_file)
@@ -92,7 +89,7 @@
 
   @SkipIfExternalToolsUnavailable()
   def test_CreateHashtreeInfoGenerator(self):
-    image_file = sparse_img.SparseImage(self._generate_image())
+    image_file = sparse_img.SparseImage(self._GenerateImage())
 
     generator = CreateHashtreeInfoGenerator(
         'system', image_file, self.prop_dict)
@@ -103,7 +100,7 @@
 
   @SkipIfExternalToolsUnavailable()
   def test_DecomposeSparseImage(self):
-    image_file = sparse_img.SparseImage(self._generate_image())
+    image_file = sparse_img.SparseImage(self._GenerateImage())
 
     generator = VerifiedBootVersion1HashtreeInfoGenerator(
         self.partition_size, 4096, True)
@@ -114,7 +111,7 @@
 
   @SkipIfExternalToolsUnavailable()
   def test_ParseHashtreeMetadata(self):
-    image_file = sparse_img.SparseImage(self._generate_image())
+    image_file = sparse_img.SparseImage(self._GenerateImage())
     generator = VerifiedBootVersion1HashtreeInfoGenerator(
         self.partition_size, 4096, True)
     generator.DecomposeSparseImage(image_file)
@@ -131,12 +128,12 @@
   def test_ValidateHashtree_smoke(self):
     generator = VerifiedBootVersion1HashtreeInfoGenerator(
         self.partition_size, 4096, True)
-    generator.image = sparse_img.SparseImage(self._generate_image())
+    generator.image = sparse_img.SparseImage(self._GenerateImage())
 
     generator.hashtree_info = info = HashtreeInfo()
-    info.filesystem_range = RangeSet(data=[0, 991232 / 4096])
+    info.filesystem_range = RangeSet(data=[0, 991232 // 4096])
     info.hashtree_range = RangeSet(
-        data=[991232 / 4096, (991232 + 12288) / 4096])
+        data=[991232 // 4096, (991232 + 12288) // 4096])
     info.hash_algorithm = self.hash_algorithm
     info.salt = self.fixed_salt
     info.root_hash = self.expected_root_hash
@@ -147,12 +144,12 @@
   def test_ValidateHashtree_failure(self):
     generator = VerifiedBootVersion1HashtreeInfoGenerator(
         self.partition_size, 4096, True)
-    generator.image = sparse_img.SparseImage(self._generate_image())
+    generator.image = sparse_img.SparseImage(self._GenerateImage())
 
     generator.hashtree_info = info = HashtreeInfo()
-    info.filesystem_range = RangeSet(data=[0, 991232 / 4096])
+    info.filesystem_range = RangeSet(data=[0, 991232 // 4096])
     info.hashtree_range = RangeSet(
-        data=[991232 / 4096, (991232 + 12288) / 4096])
+        data=[991232 // 4096, (991232 + 12288) // 4096])
     info.hash_algorithm = self.hash_algorithm
     info.salt = self.fixed_salt
     info.root_hash = "a" + self.expected_root_hash[1:]
@@ -161,12 +158,12 @@
 
   @SkipIfExternalToolsUnavailable()
   def test_Generate(self):
-    image_file = sparse_img.SparseImage(self._generate_image())
+    image_file = sparse_img.SparseImage(self._GenerateImage())
     generator = CreateHashtreeInfoGenerator('system', 4096, self.prop_dict)
     info = generator.Generate(image_file)
 
-    self.assertEqual(RangeSet(data=[0, 991232 / 4096]), info.filesystem_range)
-    self.assertEqual(RangeSet(data=[991232 / 4096, (991232 + 12288) / 4096]),
+    self.assertEqual(RangeSet(data=[0, 991232 // 4096]), info.filesystem_range)
+    self.assertEqual(RangeSet(data=[991232 // 4096, (991232 + 12288) // 4096]),
                      info.hashtree_range)
     self.assertEqual(self.hash_algorithm, info.hash_algorithm)
     self.assertEqual(self.fixed_salt, info.salt)
diff --git a/tools/releasetools/testdata/foo.apex b/tools/releasetools/testdata/foo.apex
new file mode 100644
index 0000000..42e0adb
--- /dev/null
+++ b/tools/releasetools/testdata/foo.apex
Binary files differ
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 37d5d27..5d99c99 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -44,7 +44,7 @@
   """Constructs and returns a File object. Rounds up its size if needed."""
 
   assert os.path.exists(unpacked_name)
-  with open(unpacked_name, 'r') as f:
+  with open(unpacked_name, 'rb') as f:
     file_data = f.read()
   file_size = len(file_data)
   if round_up:
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index 3063800..e7f84f5 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -131,7 +131,8 @@
     BuildVerityImageError: On error.
   """
   try:
-    with open(target, "a") as out_file, open(file_to_append, "r") as input_file:
+    with open(target, 'ab') as out_file, \
+        open(file_to_append, 'rb') as input_file:
       for line in input_file:
         out_file.write(line)
   except IOError:
@@ -178,6 +179,8 @@
     # key_path and algorithm are only available when chain partition is used.
     key_path = prop_dict.get("avb_key_path")
     algorithm = prop_dict.get("avb_algorithm")
+
+    # Image uses hash footer.
     if prop_dict.get("avb_hash_enable") == "true":
       return VerifiedBootVersion2VerityImageBuilder(
           prop_dict["partition_name"],
@@ -188,16 +191,17 @@
           algorithm,
           prop_dict.get("avb_salt"),
           prop_dict["avb_add_hash_footer_args"])
-    else:
-      return VerifiedBootVersion2VerityImageBuilder(
-          prop_dict["partition_name"],
-          partition_size,
-          VerifiedBootVersion2VerityImageBuilder.AVB_HASHTREE_FOOTER,
-          prop_dict["avb_avbtool"],
-          key_path,
-          algorithm,
-          prop_dict.get("avb_salt"),
-          prop_dict["avb_add_hashtree_footer_args"])
+
+    # Image uses hashtree footer.
+    return VerifiedBootVersion2VerityImageBuilder(
+        prop_dict["partition_name"],
+        partition_size,
+        VerifiedBootVersion2VerityImageBuilder.AVB_HASHTREE_FOOTER,
+        prop_dict["avb_avbtool"],
+        key_path,
+        algorithm,
+        prop_dict.get("avb_salt"),
+        prop_dict["avb_add_hashtree_footer_args"])
 
   return None
 
@@ -605,19 +609,19 @@
     self.metadata_size = metadata_size
 
     self.hashtree_info.filesystem_range = RangeSet(
-        data=[0, adjusted_size / self.block_size])
+        data=[0, adjusted_size // self.block_size])
     self.hashtree_info.hashtree_range = RangeSet(
-        data=[adjusted_size / self.block_size,
-              (adjusted_size + verity_tree_size) / self.block_size])
+        data=[adjusted_size // self.block_size,
+              (adjusted_size + verity_tree_size) // self.block_size])
 
   def _ParseHashtreeMetadata(self):
     """Parses the hash_algorithm, root_hash, salt from the metadata block."""
 
     metadata_start = self.filesystem_size + self.hashtree_size
     metadata_range = RangeSet(
-        data=[metadata_start / self.block_size,
-              (metadata_start + self.metadata_size) / self.block_size])
-    meta_data = ''.join(self.image.ReadRangeSet(metadata_range))
+        data=[metadata_start // self.block_size,
+              (metadata_start + self.metadata_size) // self.block_size])
+    meta_data = b''.join(self.image.ReadRangeSet(metadata_range))
 
     # More info about the metadata structure available in:
     # system/extras/verity/build_verity_metadata.py
@@ -640,9 +644,9 @@
     assert (int(table_entries[5]) * self.block_size == self.filesystem_size and
             int(table_entries[6]) * self.block_size == self.filesystem_size)
 
-    self.hashtree_info.hash_algorithm = table_entries[7]
-    self.hashtree_info.root_hash = table_entries[8]
-    self.hashtree_info.salt = table_entries[9]
+    self.hashtree_info.hash_algorithm = table_entries[7].decode()
+    self.hashtree_info.root_hash = table_entries[8].decode()
+    self.hashtree_info.salt = table_entries[9].decode()
 
   def ValidateHashtree(self):
     """Checks that we can reconstruct the verity hash tree."""
@@ -669,8 +673,8 @@
 
     # Reads the generated hash tree and checks if it has the exact same bytes
     # as the one in the sparse image.
-    with open(generated_verity_tree, "rb") as fd:
-      return fd.read() == ''.join(self.image.ReadRangeSet(
+    with open(generated_verity_tree, 'rb') as fd:
+      return fd.read() == b''.join(self.image.ReadRangeSet(
           self.hashtree_info.hashtree_range))
 
   def Generate(self, image):
diff --git a/tools/warn.py b/tools/warn.py
index c710164..9389b7d 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -1,5 +1,5 @@
 #!/usr/bin/python
-# This file uses the following encoding: utf-8
+# Prefer python3 but work also with python2.
 
 """Grep warnings messages and output HTML tables or warning counts in CSV.
 
@@ -74,9 +74,11 @@
 #   escape_string, strip_escape_string, emit_warning_arrays
 #   emit_js_data():
 
+from __future__ import print_function
 import argparse
 import cgi
 import csv
+import io
 import multiprocessing
 import os
 import re
@@ -540,7 +542,7 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
-         'Java: Use Java\'s utility functional interfaces instead of Function\u003cA, B> for primitive types.',
+         u'Java: Use Java\'s utility functional interfaces instead of Function\u003cA, B> for primitive types.',
      'patterns': [r".*: warning: \[LambdaFunctionalInterface\] .+"]},
     {'category': 'java',
      'severity': Severity.LOW,
@@ -1270,7 +1272,7 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: Prefer the short-circuiting boolean operators \u0026\u0026 and || to \u0026 and |.',
+         u'Java: Prefer the short-circuiting boolean operators \u0026\u0026 and || to \u0026 and |.',
      'patterns': [r".*: warning: \[ShortCircuitBoolean\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
@@ -1535,7 +1537,7 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java:  Implementing \'Comparable\u003cT>\' where T is not compatible with the implementing class.',
+         u'Java:  Implementing \'Comparable\u003cT>\' where T is not compatible with the implementing class.',
      'patterns': [r".*: warning: \[ComparableType\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
@@ -1790,7 +1792,7 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Path implements Iterable\u003cPath>; prefer Collection\u003cPath> for clarity',
+         u'Java: Path implements Iterable\u003cPath>; prefer Collection\u003cPath> for clarity',
      'patterns': [r".*: warning: \[IterablePathParameter\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
@@ -2922,17 +2924,17 @@
 
 
 def dump_html_prologue(title):
-  print '<html>\n<head>'
-  print '<title>' + title + '</title>'
-  print html_head_scripts
+  print('<html>\n<head>')
+  print('<title>' + title + '</title>')
+  print(html_head_scripts)
   emit_stats_by_project()
-  print '</head>\n<body>'
-  print html_big(title)
-  print '<p>'
+  print('</head>\n<body>')
+  print(html_big(title))
+  print('<p>')
 
 
 def dump_html_epilogue():
-  print '</body>\n</head>\n</html>'
+  print('</body>\n</head>\n</html>')
 
 
 def sort_warnings():
@@ -2943,6 +2945,7 @@
 def emit_stats_by_project():
   """Dump a google chart table of warnings per project and severity."""
   # warnings[p][s] is number of warnings in project p of severity s.
+  # pylint:disable=g-complex-comprehension
   warnings = {p: {s: 0 for s in Severity.range} for p in project_names}
   for i in warn_patterns:
     s = i['severity']
@@ -2988,11 +2991,11 @@
       total_all_severities += total_by_severity[s]
   one_row.append(total_all_projects)
   stats_rows.append(one_row)
-  print '<script>'
+  print('<script>')
   emit_const_string_array('StatsHeader', stats_header)
   emit_const_object_array('StatsRows', stats_rows)
-  print draw_table_javascript
-  print '</script>'
+  print(draw_table_javascript)
+  print('</script>')
 
 
 def dump_stats():
@@ -3008,14 +3011,14 @@
       skipped += len(i['members'])
     else:
       known += len(i['members'])
-  print 'Number of classified warnings: <b>' + str(known) + '</b><br>'
-  print 'Number of skipped warnings: <b>' + str(skipped) + '</b><br>'
-  print 'Number of unclassified warnings: <b>' + str(unknown) + '</b><br>'
+  print('Number of classified warnings: <b>' + str(known) + '</b><br>')
+  print('Number of skipped warnings: <b>' + str(skipped) + '</b><br>')
+  print('Number of unclassified warnings: <b>' + str(unknown) + '</b><br>')
   total = unknown + known + skipped
   extra_msg = ''
   if total < 1000:
     extra_msg = ' (low count may indicate incremental build)'
-  print 'Total number of warnings: <b>' + str(total) + '</b>' + extra_msg
+  print('Total number of warnings: <b>' + str(total) + '</b>' + extra_msg)
 
 
 # New base table of warnings, [severity, warn_id, project, warning_message]
@@ -3029,14 +3032,14 @@
 #     id for each warning pattern
 #     sort by project, severity, warn_id, warning_message
 def emit_buttons():
-  print ('<button class="button" onclick="expandCollapse(1);">'
-         'Expand all warnings</button>\n'
-         '<button class="button" onclick="expandCollapse(0);">'
-         'Collapse all warnings</button>\n'
-         '<button class="button" onclick="groupBySeverity();">'
-         'Group warnings by severity</button>\n'
-         '<button class="button" onclick="groupByProject();">'
-         'Group warnings by project</button><br>')
+  print('<button class="button" onclick="expandCollapse(1);">'
+        'Expand all warnings</button>\n'
+        '<button class="button" onclick="expandCollapse(0);">'
+        'Collapse all warnings</button>\n'
+        '<button class="button" onclick="groupBySeverity();">'
+        'Group warnings by severity</button>\n'
+        '<button class="button" onclick="groupByProject();">'
+        'Group warnings by project</button><br>')
 
 
 def all_patterns(category):
@@ -3051,14 +3054,14 @@
   """Show which warnings no longer occur."""
   anchor = 'fixed_warnings'
   mark = anchor + '_mark'
-  print ('\n<br><p style="background-color:lightblue"><b>'
-         '<button id="' + mark + '" '
-         'class="bt" onclick="expand(\'' + anchor + '\');">'
-         '&#x2295</button> Fixed warnings. '
-         'No more occurrences. Please consider turning these into '
-         'errors if possible, before they are reintroduced in to the build'
-         ':</b></p>')
-  print '<blockquote>'
+  print('\n<br><p style="background-color:lightblue"><b>'
+        '<button id="' + mark + '" '
+        'class="bt" onclick="expand(\'' + anchor + '\');">'
+        '&#x2295</button> Fixed warnings. '
+        'No more occurrences. Please consider turning these into '
+        'errors if possible, before they are reintroduced in to the build'
+        ':</b></p>')
+  print('<blockquote>')
   fixed_patterns = []
   for i in warn_patterns:
     if not i['members']:
@@ -3066,16 +3069,16 @@
                             all_patterns(i) + ')')
     if i['option']:
       fixed_patterns.append(' ' + i['option'])
-  fixed_patterns.sort()
-  print '<div id="' + anchor + '" style="display:none;"><table>'
+  fixed_patterns = sorted(fixed_patterns)
+  print('<div id="' + anchor + '" style="display:none;"><table>')
   cur_row_class = 0
   for text in fixed_patterns:
     cur_row_class = 1 - cur_row_class
     # remove last '\n'
     t = text[:-1] if text[-1] == '\n' else text
-    print '<tr><td class="c' + str(cur_row_class) + '">' + t + '</td></tr>'
-  print '</table></div>'
-  print '</blockquote>'
+    print('<tr><td class="c' + str(cur_row_class) + '">' + t + '</td></tr>')
+  print('</table></div>')
+  print('</blockquote>')
 
 
 def find_project_index(line):
@@ -3187,8 +3190,9 @@
 def normalize_warning_line(line):
   """Normalize file path relative to android_root in a warning line."""
   # replace fancy quotes with plain ol' quotes
-  line = line.replace('‘', "'")
-  line = line.replace('’', "'")
+  line = re.sub(u'[\u2018\u2019]', '\'', line)
+  # replace non-ASCII chars to spaces
+  line = re.sub(u'[^\x00-\x7f]', ' ', line)
   line = line.strip()
   first_column = line.find(':')
   if first_column > 0:
@@ -3246,21 +3250,22 @@
 
 
 def emit_warning_array(name):
-  print 'var warning_{} = ['.format(name)
+  print('var warning_{} = ['.format(name))
   for i in range(len(warn_patterns)):
-    print '{},'.format(warn_patterns[i][name])
-  print '];'
+    print('{},'.format(warn_patterns[i][name]))
+  print('];')
 
 
 def emit_warning_arrays():
   emit_warning_array('severity')
-  print 'var warning_description = ['
+  print('var warning_description = [')
   for i in range(len(warn_patterns)):
     if warn_patterns[i]['members']:
-      print '"{}",'.format(escape_string(warn_patterns[i]['description']))
+      print('"{}",'.format(escape_string(warn_patterns[i]['description'])))
     else:
-      print '"",'  # no such warning
-  print '];'
+      print('"",')  # no such warning
+  print('];')
+
 
 scripts_for_warning_groups = """
   function compareMessages(x1, x2) { // of the same warning type
@@ -3393,39 +3398,42 @@
 
 # Emit a JavaScript const string
 def emit_const_string(name, value):
-  print 'const ' + name + ' = "' + escape_string(value) + '";'
+  print('const ' + name + ' = "' + escape_string(value) + '";')
 
 
 # Emit a JavaScript const integer array.
 def emit_const_int_array(name, array):
-  print 'const ' + name + ' = ['
+  print('const ' + name + ' = [')
   for n in array:
-    print str(n) + ','
-  print '];'
+    print(str(n) + ',')
+  print('];')
 
 
 # Emit a JavaScript const string array.
 def emit_const_string_array(name, array):
-  print 'const ' + name + ' = ['
+  print('const ' + name + ' = [')
   for s in array:
-    print '"' + strip_escape_string(s) + '",'
-  print '];'
+    print('"' + strip_escape_string(s) + '",')
+  print('];')
 
 
 # Emit a JavaScript const string array for HTML.
 def emit_const_html_string_array(name, array):
-  print 'const ' + name + ' = ['
+  print('const ' + name + ' = [')
   for s in array:
-    print '"' + cgi.escape(strip_escape_string(s)) + '",'
-  print '];'
+    # Not using html.escape yet, to work for both python 2 and 3,
+    # until all users switch to python 3.
+    # pylint:disable=deprecated-method
+    print('"' + cgi.escape(strip_escape_string(s)) + '",')
+  print('];')
 
 
 # Emit a JavaScript const object array.
 def emit_const_object_array(name, array):
-  print 'const ' + name + ' = ['
+  print('const ' + name + ' = [')
   for x in array:
-    print str(x) + ','
-  print '];'
+    print(str(x) + ',')
+  print('];')
 
 
 def emit_js_data():
@@ -3471,18 +3479,18 @@
   dump_html_prologue('Warnings for ' + platform_version + ' - ' +
                      target_product + ' - ' + target_variant)
   dump_stats()
-  print '<br><div id="stats_table"></div><br>'
-  print '\n<script>'
+  print('<br><div id="stats_table"></div><br>')
+  print('\n<script>')
   emit_js_data()
-  print scripts_for_warning_groups
-  print '</script>'
+  print(scripts_for_warning_groups)
+  print('</script>')
   emit_buttons()
   # Warning messages are grouped by severities or project names.
-  print '<br><div id="warning_groups"></div>'
+  print('<br><div id="warning_groups"></div>')
   if args.byproject:
-    print '<script>groupByProject();</script>'
+    print('<script>groupByProject();</script>')
   else:
-    print '<script>groupBySeverity();</script>'
+    print('<script>groupBySeverity();</script>')
   dump_fixed()
   dump_html_epilogue()
 
@@ -3506,8 +3514,7 @@
       warning = kind + ': ' + description_for_csv(i)
       writer.writerow([n, '', warning])
       # print number of warnings for each project, ordered by project name.
-      projects = i['projects'].keys()
-      projects.sort()
+      projects = sorted(i['projects'].keys())
       for p in projects:
         writer.writerow([i['projects'][p], p, warning])
   writer.writerow([total, '', kind + ' warnings'])
@@ -3526,7 +3533,9 @@
 
 
 def main():
-  warning_lines = parse_input_file(open(args.buildlog, 'r'))
+  # We must use 'utf-8' codec to parse some non-ASCII code in warnings.
+  warning_lines = parse_input_file(
+      io.open(args.buildlog, mode='r', encoding='utf-8'))
   parallel_classify_warnings(warning_lines)
   # If a user pases a csv path, save the fileoutput to the path
   # If the user also passed gencsv write the output to stdout