Merge "Fix comment indicator on endif"
diff --git a/Changes.md b/Changes.md
index 3e48bad..37bbad0 100644
--- a/Changes.md
+++ b/Changes.md
@@ -92,6 +92,11 @@
 attribute to the root element `<manifest>`. If `PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE`
 is 26 or 27, you can add `"target-level"="1"` to your device manifest instead.
 
+### Stop using USE_CLANG_PLATFORM_BUILD {#USE_CLANG_PLATFORM_BUILD}
+
+Clang is the default and only supported Android compiler, so there is no reason
+for this option to exist.
+
 ### Other envsetup.sh variables  {#other_envsetup_variables}
 
 * ANDROID_TOOLCHAIN
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 7e23ee0..5ab64b3 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -456,6 +456,13 @@
 $(call add-clean-step, rm -rf $(HOST_OUT_COMMON_INTERMEDIATES)/*/*_intermediates/with-local/)
 $(call add-clean-step, rm -rf $(HOST_OUT_COMMON_INTERMEDIATES)/*/*_intermediates/no-local/)
 
+# Remove legacy VINTF metadata files
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/manifest.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/vendor/manifest.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/manifest.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/vendor/compatibility_matrix.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/compatibility_matrix.xml)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/OWNERS b/OWNERS
index 89b446a..7a59f70 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,2 +1,3 @@
 ccross@android.com
 dwillemsen@google.com
+nanzhang@google.com
diff --git a/core/Makefile b/core/Makefile
index a3fbe33..fbd71b3 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -423,6 +423,32 @@
 	$(hide) build/make/tools/post_process_props.py $@
 endif  # property_overrides_split_enabled
 
+# -----------------------------------------------------------------
+# product build.prop
+INSTALLED_PRODUCT_BUILD_PROP_TARGET := $(TARGET_OUT_PRODUCT)/build.prop
+ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_PRODUCT_BUILD_PROP_TARGET)
+
+FINAL_PRODUCT_PROPERTIES += \
+    $(call collapse-pairs, $(PRODUCT_PRODUCT_PROPERTIES))
+FINAL_PRODUCT_PROPERTIES := $(call uniq-pairs-by-first-component, \
+    $(FINAL_PRODUCT_PROPERTIES),=)
+
+$(INSTALLED_PRODUCT_BUILD_PROP_TARGET):
+	@echo Target product buildinfo: $@
+	@mkdir -p $(dir $@)
+	$(hide) echo > $@
+ifdef BOARD_USES_PRODUCTIMAGE
+	$(hide) echo ro.product.build.date=`$(DATE_FROM_FILE)`>>$@
+	$(hide) echo ro.product.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@
+	$(hide) echo ro.product.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@
+endif  # BOARD_USES_PRODUCTIMAGE
+	$(hide) echo "#" >> $@; \
+	        echo "# ADDITIONAL PRODUCT PROPERTIES" >> $@; \
+	        echo "#" >> $@;
+	$(hide) $(foreach line,$(FINAL_PRODUCT_PROPERTIES), \
+		echo "$(line)" >> $@;)
+	$(hide) build/make/tools/post_process_props.py $@
+
 # ----------------------------------------------------------------
 
 # -----------------------------------------------------------------
@@ -1013,7 +1039,7 @@
 ifneq (true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))
   INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG := -s
 endif
-ifneq ($(filter $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
+ifneq ($(filter $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE) $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
 INTERNAL_USERIMAGES_DEPS += $(MAKE_SQUASHFS) $(MKSQUASHFSUSERIMG) $(IMG2SIMG)
 endif
 
@@ -1044,6 +1070,7 @@
 $(if $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "system_fs_type=$(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "system_extfs_inode_count=$(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_JOURNAL_SIZE),$(hide) echo "system_journal_size=$(BOARD_SYSTEMIMAGE_JOURNAL_SIZE)" >> $(1))
+$(if $(BOARD_EXT4_SHARE_DUP_BLOCKS),$(hide) echo "ext4_share_dup_blocks=$(BOARD_EXT4_SHARE_DUP_BLOCKS)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "system_squashfs_compressor=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "system_squashfs_compressor_opt=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "system_squashfs_block_size=$(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
@@ -1065,6 +1092,15 @@
 $(if $(BOARD_VENDORIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "vendor_squashfs_block_size=$(BOARD_VENDORIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
 $(if $(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "vendor_squashfs_disable_4k_align=$(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH),$(hide) echo "vendor_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "product_fs_type=$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_EXTFS_INODE_COUNT),$(hide) echo "product_extfs_inode_count=$(BOARD_PRODUCTIMAGE_EXTFS_INODE_COUNT)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_PARTITION_SIZE),$(hide) echo "product_size=$(BOARD_PRODUCTIMAGE_PARTITION_SIZE)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_JOURNAL_SIZE),$(hide) echo "product_journal_size=$(BOARD_PRODUCTIMAGE_JOURNAL_SIZE)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "product_squashfs_compressor=$(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "product_squashfs_compressor_opt=$(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "product_squashfs_block_size=$(BOARD_PRODUCTIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "product_squashfs_disable_4k_align=$(BOARD_PRODUCTIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH),$(hide) echo "product_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH)" >> $(1))
 $(if $(BOARD_OEMIMAGE_PARTITION_SIZE),$(hide) echo "oem_size=$(BOARD_OEMIMAGE_PARTITION_SIZE)" >> $(1))
 $(if $(BOARD_OEMIMAGE_JOURNAL_SIZE),$(hide) echo "oem_journal_size=$(BOARD_OEMIMAGE_JOURNAL_SIZE)" >> $(1))
 $(if $(BOARD_OEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "oem_extfs_inode_count=$(BOARD_OEMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
@@ -1079,6 +1115,7 @@
 $(if $(filter eng, $(TARGET_BUILD_VARIANT)),$(hide) echo "verity_disable=true" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_VERITY_PARTITION),$(hide) echo "system_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_VERITY_PARTITION),$(hide) echo "vendor_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_VERITY_PARTITION)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_VERITY_PARTITION),$(hide) echo "product_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_key=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_subkey=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY)" >> $(1))
@@ -1099,6 +1136,13 @@
         $(hide) echo "avb_vendor_key_path=$(BOARD_AVB_VENDOR_KEY_PATH)" >> $(1)
         $(hide) echo "avb_vendor_algorithm=$(BOARD_AVB_VENDOR_ALGORITHM)" >> $(1)
         $(hide) echo "avb_vendor_rollback_index_location=$(BOARD_AVB_VENDOR_ROLLBACK_INDEX_LOCATION)" >> $(1)))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_product_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_product_add_hashtree_footer_args=$(BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),\
+    $(if $(BOARD_AVB_PRODUCT_KEY_PATH),\
+        $(hide) echo "avb_product_key_path=$(BOARD_AVB_PRODUCT_KEY_PATH)" >> $(1)
+        $(hide) echo "avb_product_algorithm=$(BOARD_AVB_PRODUCT_ALGORITHM)" >> $(1)
+        $(hide) echo "avb_product_rollback_index_location=$(BOARD_AVB_PRODUCT_ROLLBACK_INDEX_LOCATION)" >> $(1)))
 $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
     $(hide) echo "recovery_as_boot=true" >> $(1))
 $(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),\
@@ -1150,6 +1194,9 @@
 ifdef property_overrides_split_enabled
 recovery_build_props += $(INSTALLED_VENDOR_BUILD_PROP_TARGET)
 endif
+ifdef BOARD_USES_PRODUCTIMAGE
+recovery_build_props += $(INSTALLED_PRODUCT_BUILD_PROP_TARGET)
+endif
 recovery_resources_common := $(call include-path-for, recovery)/res
 
 # Set recovery_density to the density bucket of the device.
@@ -1442,10 +1489,26 @@
 endef
 endif
 
+# Create symlink /system/product to /product if necessary.
+ifdef BOARD_USES_PRODUCTIMAGE
+define create-system-product-symlink
+$(hide) if [ -d $(TARGET_OUT)/product ] && [ ! -h $(TARGET_OUT)/product ]; then \
+  echo 'Non-symlink $(TARGET_OUT)/product detected!' 1>&2; \
+  echo 'You cannot install files to $(TARGET_OUT)/product while building a separate product.img!' 1>&2; \
+  exit 1; \
+fi
+$(hide) ln -sf /product $(TARGET_OUT)/product
+endef
+else
+define create-system-product-symlink
+endef
+endif
+
 # $(1): output file
 define build-systemimage-target
   @echo "Target system fs image: $(1)"
   $(call create-system-vendor-symlink)
+  $(call create-system-product-symlink)
   @mkdir -p $(dir $(1)) $(systemimage_intermediates) && rm -rf $(systemimage_intermediates)/system_image_info.txt
   $(call generate-userimage-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt, \
       skip_fsck=true)
@@ -1520,6 +1583,7 @@
 define build-systemtarball-target
   $(call pretty,"Target system fs tarball: $(INSTALLED_SYSTEMTARBALL_TARGET)")
   $(call create-system-vendor-symlink)
+  $(call create-system-product-symlink)
   $(MKTARBALL) $(FS_GET_STATS) \
     $(PRODUCT_OUT) system $(PRIVATE_SYSTEM_TAR) \
     $(INSTALLED_SYSTEMTARBALL_TARGET) $(TARGET_OUT)
@@ -1600,6 +1664,10 @@
 	$(hide) cd $(dir $@) && zip -qryX $(notdir $@) \
 		$(TARGET_COPY_OUT_VENDOR)
 endif
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+	$(hide) cd $(dir $@) && zip -qryX $(notdir $@) \
+		$(TARGET_COPY_OUT_PRODUCT)
+endif
 ifneq ($(PDK_PLATFORM_JAVA_ZIP_CONTENTS),)
 	$(hide) cd $(OUT_DIR) && zip -qryX $(patsubst $(OUT_DIR)/%,%,$@) $(PDK_PLATFORM_JAVA_ZIP_CONTENTS)
 endif
@@ -1908,6 +1976,55 @@
 endif
 
 # -----------------------------------------------------------------
+# product partition image
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+INTERNAL_PRODUCTIMAGE_FILES := \
+    $(filter $(TARGET_OUT_PRODUCT)/%,\
+      $(ALL_DEFAULT_INSTALLED_MODULES)\
+      $(ALL_PDK_FUSION_FILES))
+
+# platform.zip depends on $(INTERNAL_PRODUCTIMAGE_FILES).
+$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_PRODUCTIMAGE_FILES)
+
+INSTALLED_FILES_FILE_PRODUCT := $(PRODUCT_OUT)/installed-files-product.txt
+$(INSTALLED_FILES_FILE_PRODUCT) : $(INTERNAL_PRODUCTIMAGE_FILES) $(FILESLIST)
+	@echo Installed file list: $@
+	@mkdir -p $(dir $@)
+	@rm -f $@
+	$(hide) $(FILESLIST) $(TARGET_OUT_PRODUCT) > $(@:.txt=.json)
+	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+
+productimage_intermediates := \
+    $(call intermediates-dir-for,PACKAGING,product)
+BUILT_PRODUCTIMAGE_TARGET := $(PRODUCT_OUT)/product.img
+define build-productimage-target
+  $(call pretty,"Target product fs image: $(INSTALLED_PRODUCTIMAGE_TARGET)")
+  @mkdir -p $(TARGET_OUT_PRODUCT)
+  @mkdir -p $(productimage_intermediates) && rm -rf $(productimage_intermediates)/product_image_info.txt
+  $(call generate-userimage-prop-dictionary, $(productimage_intermediates)/product_image_info.txt, skip_fsck=true)
+  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+      ./build/tools/releasetools/build_image.py \
+      $(TARGET_OUT_PRODUCT) $(productimage_intermediates)/product_image_info.txt $(INSTALLED_PRODUCTIMAGE_TARGET) $(TARGET_OUT)
+  $(hide) $(call assert-max-image-size,$(INSTALLED_PRODUCTIMAGE_TARGET),$(BOARD_PRODUCTIMAGE_PARTITION_SIZE))
+endef
+
+# We just build this directly to the install location.
+INSTALLED_PRODUCTIMAGE_TARGET := $(BUILT_PRODUCTIMAGE_TARGET)
+$(INSTALLED_PRODUCTIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_PRODUCTIMAGE_FILES) $(INSTALLED_FILES_FILE_PRODUCT) $(BUILD_IMAGE_SRCS)
+	$(build-productimage-target)
+
+.PHONY: productimage-nodeps pnod
+productimage-nodeps pnod: | $(INTERNAL_USERIMAGES_DEPS)
+	$(build-productimage-target)
+
+sync: $(INTERNAL_PRODUCTIMAGE_FILES)
+
+else ifdef BOARD_PREBUILT_PRODUCTIMAGE
+INSTALLED_PRODUCTIMAGE_TARGET := $(PRODUCT_OUT)/product.img
+$(eval $(call copy-one-file,$(BOARD_PREBUILT_PRODUCTIMAGE),$(INSTALLED_PRODUCTIMAGE_TARGET)))
+endif
+
+# -----------------------------------------------------------------
 # dtbo image
 ifdef BOARD_PREBUILT_DTBOIMAGE
 INSTALLED_DTBOIMAGE_TARGET := $(PRODUCT_OUT)/dtbo.img
@@ -1961,6 +2078,7 @@
 SYSTEM_FOOTER_ARGS := BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS
 VENDOR_FOOTER_ARGS := BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS
 RECOVERY_FOOTER_ARGS := BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS
+PRODUCT_FOOTER_ARGS := BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS
 
 # Check and set required build variables for a chain partition.
 # $(1): the partition to enable AVB chain, e.g., BOOT or SYSTEM.
@@ -2013,6 +2131,15 @@
 endif
 endif
 
+ifdef INSTALLED_PRODUCTIMAGE_TARGET
+ifdef BOARD_AVB_PRODUCT_KEY_PATH
+$(eval $(call check-and-set-avb-chain-args,PRODUCT))
+else
+INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
+    --include_descriptors_from_image $(INSTALLED_PRODUCTIMAGE_TARGET)
+endif
+endif
+
 ifdef INSTALLED_DTBOIMAGE_TARGET
 ifdef BOARD_AVB_DTBO_KEY_PATH
 $(eval $(call check-and-set-avb-chain-args,DTBO))
@@ -2066,6 +2193,9 @@
   $(if $(BOARD_AVB_VENDOR_KEY_PATH),\
     $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_VENDOR_KEY_PATH) \
       --output $(1)/vendor.avbpubkey)
+  $(if $(BOARD_AVB_PRODUCT_KEY_PATH),\
+    $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_PRODUCT_KEY_PATH) \
+      --output $(1)/product.avbpubkey)
   $(if $(BOARD_AVB_DTBO_KEY_PATH),\
     $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_DTBO_KEY_PATH) \
       --output $(1)/dtbo.avbpubkey)
@@ -2092,6 +2222,7 @@
 		$(INSTALLED_BOOTIMAGE_TARGET) \
 		$(INSTALLED_SYSTEMIMAGE) \
 		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_PRODUCTIMAGE_TARGET) \
 		$(INSTALLED_DTBOIMAGE_TARGET) \
 		$(INSTALLED_RECOVERYIMAGE_TARGET) \
 		$(BOARD_AVB_KEY_PATH)
@@ -2355,6 +2486,7 @@
 		$(INSTALLED_USERDATAIMAGE_TARGET) \
 		$(INSTALLED_CACHEIMAGE_TARGET) \
 		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_PRODUCTIMAGE_TARGET) \
 		$(INSTALLED_VBMETAIMAGE_TARGET) \
 		$(INSTALLED_DTBOIMAGE_TARGET) \
 		$(INTERNAL_SYSTEMOTHERIMAGE_FILES) \
@@ -2363,6 +2495,7 @@
 		$(INSTALLED_2NDBOOTLOADER_TARGET) \
 		$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH) \
 		$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH) \
+		$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH) \
 		$(SELINUX_FC) \
 		$(APKCERTS_FILE) \
 		$(SOONG_ZIP) \
@@ -2370,11 +2503,12 @@
 		$(HOST_OUT_EXECUTABLES)/imgdiff \
 		$(HOST_OUT_EXECUTABLES)/bsdiff \
 		$(BUILD_IMAGE_SRCS) \
-		$(INSTALLED_VENDOR_MANIFEST) \
-		$(INSTALLED_VENDOR_MATRIX) \
+		$(BUILT_VENDOR_MANIFEST) \
+		$(BUILT_VENDOR_MATRIX) \
 		| $(ACP)
 	@echo "Package target files: $@"
 	$(call create-system-vendor-symlink)
+	$(call create-system-product-symlink)
 	$(hide) rm -rf $@ $@.list $(zip_root)
 	$(hide) mkdir -p $(dir $@) $(zip_root)
 ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
@@ -2440,6 +2574,11 @@
 	$(hide) $(call package_files-copy-root, \
 		$(TARGET_OUT_VENDOR),$(zip_root)/VENDOR)
 endif
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+	@# Contents of the product image
+	$(hide) $(call package_files-copy-root, \
+		$(TARGET_OUT_PRODUCT),$(zip_root)/PRODUCT)
+endif
 ifdef INSTALLED_SYSTEMOTHERIMAGE_TARGET
 	@# Contents of the system_other image
 	$(hide) $(call package_files-copy-root, \
@@ -2504,6 +2643,10 @@
 	$(hide) cp $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH) \
 	  $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH))
 endif
+ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH),)
+	$(hide) cp $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH) \
+	  $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH))
+endif
 ifneq ($(strip $(SANITIZE_TARGET)),)
 	# We need to create userdata.img with real data because the instrumented libraries are in userdata.img.
 	$(hide) echo "userdata_img_with_data=true" >> $(zip_root)/META/misc_info.txt
@@ -2587,6 +2730,10 @@
 	$(hide) mkdir -p $(zip_root)/IMAGES
 	$(hide) cp $(INSTALLED_VENDORIMAGE_TARGET) $(zip_root)/IMAGES/
 endif
+ifdef BOARD_PREBUILT_PRODUCTIMAGE
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_PRODUCTIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
 ifdef BOARD_PREBUILT_BOOTIMAGE
 	$(hide) mkdir -p $(zip_root)/IMAGES
 	$(hide) cp $(INSTALLED_BOOTIMAGE_TARGET) $(zip_root)/IMAGES/
@@ -2617,6 +2764,9 @@
 ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
 	$(hide) $(call fs_config,$(zip_root)/VENDOR,vendor/) > $(zip_root)/META/vendor_filesystem_config.txt
 endif
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+	$(hide) $(call fs_config,$(zip_root)/PRODUCT,product/) > $(zip_root)/META/product_filesystem_config.txt
+endif
 ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
 	$(hide) $(call fs_config,$(zip_root)/ROOT,) > $(zip_root)/META/root_filesystem_config.txt
 endif
@@ -2734,6 +2884,7 @@
 		$(INSTALLED_BOOTIMAGE_TARGET) \
 		$(INSTALLED_USERDATAIMAGE_TARGET) \
 		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_PRODUCTIMAGE_TARGET) \
 		$(updater_dep)
 endif
 $(SYMBOLS_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,symbols)/filelist
@@ -2756,7 +2907,8 @@
 $(COVERAGE_ZIP): $(INSTALLED_SYSTEMIMAGE) \
 		$(INSTALLED_BOOTIMAGE_TARGET) \
 		$(INSTALLED_USERDATAIMAGE_TARGET) \
-		$(INSTALLED_VENDORIMAGE_TARGET)
+		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_PRODUCTIMAGE_TARGET)
 endif
 $(COVERAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,coverage)/filelist
 $(COVERAGE_ZIP): $(SOONG_ZIP)
@@ -2856,6 +3008,15 @@
 vendorimage: $(INSTALLED_QEMU_VENDORIMAGE)
 droidcore: $(INSTALLED_QEMU_VENDORIMAGE)
 endif
+ifeq ($(BOARD_USES_PRODUCTIMAGE),true)
+INSTALLED_QEMU_PRODUCTIMAGE := $(PRODUCT_OUT)/product-qemu.img
+$(INSTALLED_QEMU_PRODUCTIMAGE): $(INSTALLED_PRODUCTIMAGE_TARGET) $(MK_QEMU_IMAGE_SH) $(SGDISK_HOST)
+	@echo Create product-qemu.img
+	(export SGDISK=$(SGDISK_HOST); $(MK_QEMU_IMAGE_SH) ${PRODUCT_OUT}/product.img)
+
+productimage: $(INSTALLED_QEMU_PRODUCTIMAGE)
+droidcore: $(INSTALLED_QEMU_PRODUCTIMAGE)
+endif
 endif
 # -----------------------------------------------------------------
 # The emulator package
diff --git a/core/aux_config.mk b/core/aux_config.mk
index c40b8cc..41c14ae 100644
--- a/core/aux_config.mk
+++ b/core/aux_config.mk
@@ -32,7 +32,7 @@
 
 # setup AUX globals
 AUX_SHLIB_SUFFIX := .so
-AUX_GLOBAL_ARFLAGS := crsPD
+AUX_GLOBAL_ARFLAGS := cqsD
 AUX_STATIC_LIB_SUFFIX := .a
 
 # Load ever-lasting "indexed" version of AUX variant environment; it is treated as READ-ONLY from this
diff --git a/core/base_rules.mk b/core/base_rules.mk
index cebf52b..b4d8888 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -72,6 +72,8 @@
 LOCAL_OEM_MODULE := true
 else ifneq ($(filter $(TARGET_OUT_ODM)/%,$(_path)),)
 LOCAL_ODM_MODULE := true
+else ifneq ($(filter $(TARGET_OUT_PRODUCT)/%,$(_path)),)
+LOCAL_PRODUCT_MODULE := true
 endif
 _path :=
 
@@ -86,7 +88,7 @@
 endif
 
 include $(BUILD_SYSTEM)/local_vndk.mk
-include $(BUILD_SYSTEM)/local_vsdk.mk
+include $(BUILD_SYSTEM)/local_systemsdk.mk
 
 my_module_tags := $(LOCAL_MODULE_TAGS)
 ifeq ($(my_host_cross),true)
@@ -200,6 +202,8 @@
   partition_tag := _OEM
 else ifeq (true,$(LOCAL_ODM_MODULE))
   partition_tag := _ODM
+else ifeq (true,$(LOCAL_PRODUCT_MODULE))
+  partition_tag := _PRODUCT
 else ifeq (NATIVE_TESTS,$(LOCAL_MODULE_CLASS))
   partition_tag := _DATA
 else
@@ -541,14 +545,23 @@
     ifeq (true, $(LOCAL_IS_HOST_MODULE))
       is_instrumentation_test := false
     endif
+    # If LOCAL_MODULE_CLASS is not APPS, it's certainly not an instrumentation
+    # test. However, some packages for test data also have LOCAL_MODULE_CLASS
+    # set to APPS. These will require flag LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG
+    # to disable auto-generating test config file.
+    ifneq (APPS, $(LOCAL_MODULE_CLASS))
+      is_instrumentation_test := false
+    endif
   endif
   # CTS modules can be used for test data, so test config files must be
   # explicitly created using AndroidTest.xml
   ifeq (,$(filter cts, $(LOCAL_COMPATIBILITY_SUITE)))
-    ifeq (true, $(filter true,$(is_native) $(is_instrumentation_test)))
-      include $(BUILD_SYSTEM)/autogen_test_config.mk
-      test_config := $(autogen_test_config_file)
-      autogen_test_config_file :=
+    ifneq (true, $(LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG))
+      ifeq (true, $(filter true,$(is_native) $(is_instrumentation_test)))
+        include $(BUILD_SYSTEM)/autogen_test_config.mk
+        test_config := $(autogen_test_config_file)
+        autogen_test_config_file :=
+      endif
     endif
   endif
 endif
diff --git a/core/binary.mk b/core/binary.mk
index a4fd8e3..e3da7d2 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -354,11 +354,6 @@
             my_clang := true
         endif
     endif
-# Add option to make gcc the default for device build
-else ifeq ($(USE_CLANG_PLATFORM_BUILD),false)
-    ifeq ($(my_clang),)
-        my_clang := false
-    endif
 else ifeq ($(my_clang),)
     my_clang := true
 endif
diff --git a/core/build-system.html b/core/build-system.html
index c7938cc..3d86e24 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -762,6 +762,19 @@
 Dialer, Contacts, etc.  This will probably change or go away when we switch
 to an ant-based build system for the apps.</p>
 
+<h4>LOCAL_PATCH_MODULE (experimental option)</h4>
+<p>As of January 2018, you almost certainly don't need this option, so please
+ask and only use it if you understand what you're doing. This feature is
+experimental and may go away in future.</p>
+<p>
+When compiling language level 9+ .java code in packages that are part of a
+a system module, <code>LOCAL_PATCH_MODULE</code> names the module that your
+sources and dependencies should be patched into. The Android runtime currently
+(Jan 2018) doesn't implement the JEP 261 module system so this option is only
+supported at compile time. It should only be needed to compile tests in packages
+that exist in libcore and which are inconvenient to move elsewhere.
+</p>
+
 <h4>LOCAL_PATH</h4>
 <p>The directory your Android.mk file is in. You can set it by putting the
 following as the first line in your Android.mk:</p>
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index b4a03ea..7d3fa75 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -55,6 +55,7 @@
 LOCAL_DEX_PREOPT_IMAGE_LOCATION:=
 LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING:=
 LOCAL_DEX_PREOPT:= # '',true,false,nostripping
+LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG:=
 LOCAL_DONT_CHECK_MODULE:=
 # Don't delete the META_INF dir when merging static Java libraries.
 LOCAL_DONT_DELETE_JAR_META_INF:=
@@ -181,6 +182,7 @@
 LOCAL_PACKAGE_NAME:=
 LOCAL_PACKAGE_SPLITS:=
 LOCAL_PACK_MODULE_RELOCATIONS:=
+LOCAL_PATCH_MODULE:=
 LOCAL_PICKUP_FILES:=
 LOCAL_POST_INSTALL_CMD:=
 LOCAL_POST_LINK_CMD:=
@@ -195,6 +197,7 @@
 LOCAL_PREBUILT_STRIP_COMMENTS:=
 LOCAL_PRIVILEGED_MODULE:=
 # '',full,custom,disabled,obfuscation,optimization
+LOCAL_PRODUCT_MODULE:=
 LOCAL_PROGUARD_ENABLED:=
 LOCAL_PROGUARD_FLAG_FILES:=
 LOCAL_PROGUARD_FLAGS:=
diff --git a/core/combo/select.mk b/core/combo/select.mk
index 5e181b9..eab4c72 100644
--- a/core/combo/select.mk
+++ b/core/combo/select.mk
@@ -28,7 +28,7 @@
 
 # Set reasonable defaults for the various variables
 
-$(combo_var_prefix)GLOBAL_ARFLAGS := crsPD
+$(combo_var_prefix)GLOBAL_ARFLAGS := cqsD -format=gnu
 
 $(combo_var_prefix)STATIC_LIB_SUFFIX := .a
 
diff --git a/core/config.mk b/core/config.mk
index 4fc5edf..6883d20 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -76,6 +76,7 @@
   ANDROID_PRE_BUILD_PATHS \
   ,See $(CHANGES_URL)#other_envsetup_variables)
 $(KATI_obsolete_var PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE,Set FCM Version in device manifest instead. See $(CHANGES_URL)#PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE)
+$(KATI_obsolete_var USE_CLANG_PLATFORM_BUILD,Clang is the only supported Android compiler. See $(CHANGES_URL)#USE_CLANG_PLATFORM_BUILD)
 
 CHANGES_URL :=
 
@@ -685,6 +686,7 @@
 
 DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump2$(BUILD_EXECUTABLE_SUFFIX)
 PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
+HIDDENAPI := $(HOST_OUT_EXECUTABLES)/hiddenapi
 
 # relocation packer
 RELOCATION_PACKER := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/relocation_packer/relocation_packer
@@ -741,6 +743,19 @@
 
 APICHECK_COMMAND := $(APICHECK) -JXmx1024m -J"classpath $(APICHECK_CLASSPATH)"
 
+# Boolean variable determining if the whitelist for compatible properties is enabled
+PRODUCT_COMPATIBLE_PROPERTY := false
+ifneq ($(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE),)
+  PRODUCT_COMPATIBLE_PROPERTY := $(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE)
+else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
+  #$(warning no product shipping level defined)
+else ifneq ($(call math_lt,27,$(PRODUCT_SHIPPING_API_LEVEL)),)
+  PRODUCT_COMPATIBLE_PROPERTY := true
+endif
+
+.KATI_READONLY := \
+    PRODUCT_COMPATIBLE_PROPERTY
+
 # Boolean variable determining if Treble is fully enabled
 PRODUCT_FULL_TREBLE := false
 ifneq ($(PRODUCT_FULL_TREBLE_OVERRIDE),)
@@ -810,6 +825,9 @@
       $(error When PRODUCT_SHIPPING_API_LEVEL >= 27, TARGET_USES_MKE2FS must be true)
     endif
   endif
+  ifneq ($(call numbers_less_than,$(PRODUCT_SHIPPING_API_LEVEL),$(BOARD_SYSTEMSDK_VERSIONS)),)
+    $(error BOARD_SYSTEMSDK_VERSIONS ($(BOARD_SYSTEMSDK_VERSIONS)) must all be greater than or equal to PRODUCT_SHIPPING_API_LEVEL ($(PRODUCT_SHIPPING_API_LEVEL)))
+  endif
 endif
 
 # The default key if not set as LOCAL_CERTIFICATE
@@ -825,8 +843,6 @@
 endif
 $(.KATI_obsolete_var DEVICE_FRAMEWORK_MANIFEST_FILE,No one should ever need to use this.)
 
-FRAMEWORK_COMPATIBILITY_MATRIX_FILES := $(wildcard hardware/interfaces/compatibility_matrix.*.xml)
-
 BUILD_NUMBER_FROM_FILE := $$(cat $(OUT_DIR)/build_number.txt)
 BUILD_DATETIME_FROM_FILE := $$(cat $(OUT_DIR)/build_date.txt)
 
@@ -919,14 +935,22 @@
 TARGET_SDK_VERSIONS_WITHOUT_JAVA_19_SUPPORT := $(call numbers_less_than,27,$(TARGET_AVAILABLE_SDK_VERSIONS))
 
 INTERNAL_PLATFORM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/public_api.txt
+INTERNAL_PLATFORM_PRIVATE_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/private.txt
+INTERNAL_PLATFORM_PRIVATE_DEX_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/private-dex.txt
 INTERNAL_PLATFORM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/removed.txt
 INTERNAL_PLATFORM_SYSTEM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-api.txt
+INTERNAL_PLATFORM_SYSTEM_PRIVATE_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-private.txt
+INTERNAL_PLATFORM_SYSTEM_PRIVATE_DEX_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-private-dex.txt
 INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-removed.txt
 INTERNAL_PLATFORM_SYSTEM_EXACT_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-exact.txt
 INTERNAL_PLATFORM_TEST_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-api.txt
 INTERNAL_PLATFORM_TEST_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-removed.txt
 INTERNAL_PLATFORM_TEST_EXACT_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-exact.txt
 
+INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-light-greylist.txt
+INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST := frameworks/base/config/hiddenapi-dark-greylist.txt
+INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST := frameworks/base/config/hiddenapi-blacklist.txt
+
 # This is the standard way to name a directory containing prebuilt target
 # objects. E.g., prebuilt/$(TARGET_PREBUILT_TAG)/libc.so
 TARGET_PREBUILT_TAG := android-$(TARGET_ARCH)
@@ -968,6 +992,7 @@
     cacheimage-nodeps \
     bptimage-nodeps \
     vnod vendorimage-nodeps \
+    pnod productimage-nodeps \
     systemotherimage-nodeps \
     ramdisk-nodeps \
     bootimage-nodeps \
diff --git a/core/cxx_stl_setup.mk b/core/cxx_stl_setup.mk
index f07659d..5171b8a 100644
--- a/core/cxx_stl_setup.mk
+++ b/core/cxx_stl_setup.mk
@@ -74,6 +74,16 @@
 ifneq ($(filter $(my_cxx_stl),libc++ libc++_static),)
     my_cflags += -D_USING_LIBCXX
 
+    ifeq ($($(my_prefix)OS),darwin)
+        # libc++'s headers are annotated with availability macros that indicate
+        # which version of Mac OS was the first to ship with a libc++ feature
+        # available in its *system's* libc++.dylib. We do not use the system's
+        # library, but rather ship our own. As such, these availability
+        # attributes are meaningless for us but cause build breaks when we try
+        # to use code that would not be available in the system's dylib.
+        my_cppflags += -D_LIBCPP_DISABLE_AVAILABILITY
+    endif
+
     # Note that the structure of this means that LOCAL_CXX_STL := libc++ will
     # use the static libc++ for static executables.
     ifeq ($(my_link_type),dynamic)
diff --git a/core/definitions.mk b/core/definitions.mk
index a20bf44..64882d9 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2245,6 +2245,9 @@
       $(addprefix -bootclasspath ,$(strip \
           $(call normalize-path-list,$(PRIVATE_BOOTCLASSPATH)) \
           $(PRIVATE_EMPTY_BOOTCLASSPATH)))) \
+    $(if $(PRIVATE_USE_SYSTEM_MODULES), \
+      $(if $(PRIVATE_PATCH_MODULE), \
+        --patch-module=$(PRIVATE_PATCH_MODULE)=$(call normalize-path-list,. $(2)))) \
     $(addprefix -classpath ,$(strip \
         $(call normalize-path-list,$(2)))) \
     $(if $(findstring true,$(PRIVATE_WARNINGS_ENABLE)),$(xlint_unchecked),) \
@@ -2781,18 +2784,53 @@
 # $(3): LOCAL_DEX_PREOPT, if nostripping then leave classes*.dex
 define dexpreopt-copy-jar
 $(2): $(1)
-	@echo $(if $(filter nostripping,$(3)),"Copy: $$@","Copy without dex: $$@")
+	@echo "Copy: $$@"
 	$$(copy-file-to-target)
 	$(if $(filter nostripping,$(3)),,$$(call dexpreopt-remove-classes.dex,$$@))
 endef
 
-# $(1): the .jar or .apk to remove classes.dex
+# $(1): the .jar or .apk to remove classes.dex. Note that if all dex files
+# are uncompressed in the archive, then dexopt will not do a copy of the dex
+# files and we should not strip.
 define dexpreopt-remove-classes.dex
-$(hide) zip --quiet --delete $(1) classes.dex; \
+$(hide) if (zipinfo $1 '*.dex' 2>/dev/null | grep -v ' stor ' >/dev/null) ; then \
+zip --quiet --delete $(1) classes.dex; \
 dex_index=2; \
 while zip --quiet --delete $(1) classes$${dex_index}.dex > /dev/null; do \
   let dex_index=dex_index+1; \
-done
+done \
+fi
+endef
+
+define hiddenapi-copy-dex-files
+$(2): $(1) $(HIDDENAPI) $(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
+      $(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) $(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
+	@rm -rf $(dir $(2))
+	@mkdir -p $(dir $(2))
+	find $(dir $(1)) -maxdepth 1 -name "classes*.dex" | sort | \
+		xargs -I{} cp -f {} $(dir $(2))
+	find $(dir $(2)) -name "classes*.dex" | sort | sed 's/^/--dex=/' | \
+		xargs $(HIDDENAPI) --light-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
+		                   --dark-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) \
+		                   --blacklist=$(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
+endef
+
+define hiddenapi-copy-soong-jar
+$(2): PRIVATE_FOLDER := $(dir $(2))dex-hiddenapi
+$(2): $(1) $(HIDDENAPI) $(SOONG_ZIP) $(MERGE_ZIPS) $(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
+      $(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) $(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
+	@echo "Hidden API: $$@"
+	$$(copy-file-to-target)
+	@rm -rf $${PRIVATE_FOLDER}
+	@mkdir -p $${PRIVATE_FOLDER}
+	unzip -q $(2) 'classes*.dex' -d $${PRIVATE_FOLDER}
+	find $${PRIVATE_FOLDER} -name "classes*.dex" | sort | sed 's/^/--dex=/' | \
+		xargs $(HIDDENAPI) --light-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
+		                   --dark-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) \
+		                   --blacklist=$(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
+	$(SOONG_ZIP) -o $${PRIVATE_FOLDER}/classes.dex.jar -C $${PRIVATE_FOLDER} -D $${PRIVATE_FOLDER}
+	$(MERGE_ZIPS) -D -zipToNotStrip $${PRIVATE_FOLDER}/classes.dex.jar -stripFile "classes*.dex" \
+		$(2) $${PRIVATE_FOLDER}/classes.dex.jar $(1)
 endef
 
 ###########################################################
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 0dcb07f..83c4a95 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -35,12 +35,14 @@
 # Conditional to building on linux, as dex2oat currently does not work on darwin.
 ifeq ($(HOST_OS),linux)
   WITH_DEXPREOPT ?= true
-# For an eng build only pre-opt the boot image and system server. This gives reasonable performance
-# and still allows a simple workflow: building in frameworks/base and syncing.
   ifeq (eng,$(TARGET_BUILD_VARIANT))
+    # Don't strip for quick development turnarounds.
+    DEX_PREOPT_DEFAULT := nostripping
+    # For an eng build only pre-opt the boot image and system server. This gives reasonable performance
+    # and still allows a simple workflow: building in frameworks/base and syncing.
     WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY ?= true
   endif
-# Add mini-debug-info to the boot classpath unless explicitly asked not to.
+  # Add mini-debug-info to the boot classpath unless explicitly asked not to.
   ifneq (false,$(WITH_DEXPREOPT_DEBUG_INFO))
     PRODUCT_DEX_PREOPT_BOOT_FLAGS += --generate-mini-debug-info
   endif
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 79e72c1..af2355a 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -7,11 +7,14 @@
 # Set USE_DEX2OAT_DEBUG to false for only building non-debug versions.
 ifeq ($(USE_DEX2OAT_DEBUG),false)
 DEX2OAT := $(HOST_OUT_EXECUTABLES)/dex2oat$(HOST_EXECUTABLE_SUFFIX)
+PATCHOAT := $(HOST_OUT_EXECUTABLES)/patchoat$(HOST_EXECUTABLE_SUFFIX)
 else
 DEX2OAT := $(HOST_OUT_EXECUTABLES)/dex2oatd$(HOST_EXECUTABLE_SUFFIX)
+PATCHOAT := $(HOST_OUT_EXECUTABLES)/patchoatd$(HOST_EXECUTABLE_SUFFIX)
 endif
 
 DEX2OAT_DEPENDENCY += $(DEX2OAT)
+PATCHOAT_DEPENDENCY += $(PATCHOAT)
 
 # Use the first preloaded-classes file in PRODUCT_COPY_FILES.
 PRELOADED_CLASSES := $(call word-colon,1,$(firstword \
@@ -87,14 +90,17 @@
 # is converted into to boot.art (to match the legacy assumption that boot.art
 # exists), and the rest are converted to boot-<name>.art.
 # In addition, each .art file has an associated .oat file.
-LIBART_TARGET_BOOT_ART_EXTRA_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).art boot-$(jar).oat boot-$(jar).vdex)
-LIBART_TARGET_BOOT_ART_EXTRA_FILES += boot.oat boot.vdex
+LIBART_TARGET_BOOT_ART_EXTRA_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).art boot-$(jar).art.rel boot-$(jar).oat boot-$(jar).vdex)
+LIBART_TARGET_BOOT_ART_EXTRA_FILES += boot.art.rel boot.oat boot.vdex
 
 # If we use a boot image profile.
 my_use_profile_for_boot_image := $(PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE)
 ifeq (,$(my_use_profile_for_boot_image))
-# If not set, use the default.
-my_use_profile_for_boot_image := false
+# If not set, set the default to true if we are not a PDK build. PDK builds
+# can't build the profile since they don't have frameworks/base.
+ifneq (true,$(TARGET_BUILD_PDK))
+my_use_profile_for_boot_image := true
+endif
 endif
 
 ifeq (true,$(my_use_profile_for_boot_image))
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
index 8b71198..ad8f18d 100644
--- a/core/dex_preopt_libart_boot.mk
+++ b/core/dex_preopt_libart_boot.mk
@@ -73,14 +73,16 @@
 
 $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_BOOT_IMAGE_FLAGS := $(my_boot_image_flags)
 $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_2ND_ARCH_VAR_PREFIX := $(my_2nd_arch_prefix)
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_IMAGE_LOCATION := $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION)
 # Use dex2oat debug version for better error reporting
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) : $(LIBART_TARGET_BOOT_DEX_FILES) $(PRELOADED_CLASSES) $(COMPILED_CLASSES) $(DIRTY_IMAGE_OBJECTS) $(DEX2OAT_DEPENDENCY) $(my_out_boot_image_profile_location)
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) : $(LIBART_TARGET_BOOT_DEX_FILES) $(PRELOADED_CLASSES) $(COMPILED_CLASSES) $(DIRTY_IMAGE_OBJECTS) $(DEX2OAT_DEPENDENCY) $(PATCHOAT_DEPENDENCY) $(my_out_boot_image_profile_location)
 	@echo "target dex2oat: $@"
 	@mkdir -p $(dir $@)
 	@mkdir -p $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))
-	@rm -f $(dir $@)/*.art $(dir $@)/*.oat
+	@rm -f $(dir $@)/*.art $(dir $@)/*.oat $(dir $@)/*.art.rel
 	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art
 	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.oat
+	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art.rel
 	$(hide) ANDROID_LOG_TAGS="*:e" $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) \
 		--runtime-arg -Xmx$(DEX2OAT_IMAGE_XMX) \
 		$(PRIVATE_BOOT_IMAGE_FLAGS) \
@@ -99,6 +101,11 @@
 		--multi-image --no-inline-from=core-oj.jar \
 		--abort-on-hard-verifier-error \
 		--abort-on-soft-verifier-error \
-		$(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS)
+		$(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS) && \
+	ANDROID_ROOT=$(PRODUCT_OUT)/system ANDROID_DATA=$(dir $@) $(PATCHOAT) \
+        --input-image-location=$(PRIVATE_IMAGE_LOCATION) \
+        --output-image-relocation-file=$@.rel \
+        --instruction-set=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH) \
+        --base-offset-delta=0x10000000
 
 endif
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 93824c3..e337279 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -1,6 +1,20 @@
 # dexpreopt_odex_install.mk is used to define odex creation rules for JARs and APKs
 # This file depends on variables set in base_rules.mk
-# Output variables: LOCAL_DEX_PREOPT, built_odex, dexpreopt_boot_jar_module
+# Output variables: LOCAL_DEX_PREOPT, LOCAL_UNCOMPRESS_DEX, built_odex,
+#                   dexpreopt_boot_jar_module
+
+# We explicitly uncompress APKs of privileged apps, and used by
+# privileged apps
+LOCAL_UNCOMPRESS_DEX := false
+ifneq (true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))
+ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
+  LOCAL_UNCOMPRESS_DEX := true
+else
+  ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
+    LOCAL_UNCOMPRESS_DEX := true
+  endif  # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
+endif  # LOCAL_PRIVILEGED_MODULE
+endif  # DONT_UNCOMPRESS_PRIV_APPS_DEXS
 
 # Setting LOCAL_DEX_PREOPT based on WITH_DEXPREOPT, LOCAL_DEX_PREOPT, etc
 LOCAL_DEX_PREOPT := $(strip $(LOCAL_DEX_PREOPT))
@@ -46,14 +60,27 @@
 endif
 endif
 
-# if installing into system, and odex are being installed into system_other, don't strip
-ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
 ifeq ($(LOCAL_DEX_PREOPT),true)
+
+# Don't strip with dexes we explicitly uncompress (dexopt will not store the dex code).
+ifeq ($(LOCAL_UNCOMPRESS_DEX),true)
+LOCAL_DEX_PREOPT := nostripping
+endif  # LOCAL_UNCOMPRESS_DEX
+
+# system_other isn't there for an OTA, so don't strip
+# if module is on system, and odex is on system_other.
+ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
 ifneq ($(call install-on-system-other, $(my_module_path)),)
 LOCAL_DEX_PREOPT := nostripping
-endif
-endif
-endif
+endif  # install-on-system-other
+endif  # BOARD_USES_SYSTEM_OTHER_ODEX
+
+# We also don't strip if all dexs are uncompressed (dexopt will not store the dex code),
+# but that requires to inspect the source file, which is too early at this point (as we
+# don't know if the source file will actually be used).
+# See dexpreopt-remove-classes.dex.
+
+endif  # LOCAL_DEX_PREOPT
 
 built_odex :=
 built_vdex :=
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 255c02b..6a7fbd1 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -179,6 +179,7 @@
 TARGET_COPY_OUT_ASAN := $(TARGET_COPY_OUT_DATA)/asan
 TARGET_COPY_OUT_OEM := oem
 TARGET_COPY_OUT_ODM := odm
+TARGET_COPY_OUT_PRODUCT := product
 TARGET_COPY_OUT_ROOT := root
 TARGET_COPY_OUT_RECOVERY := recovery
 
@@ -198,6 +199,17 @@
 TARGET_COPY_OUT_VENDOR := $(_vendor_path_placeholder)
 ###########################################
 
+###########################################
+# Define TARGET_COPY_OUT_PRODUCT to a placeholder, for at this point
+# we don't know if the device wants to build a separate product.img
+# or just build product stuff into system.img.
+# A device can set up TARGET_COPY_OUT_PRODUCT to "product" in its
+# BoardConfig.mk.
+# We'll substitute with the real value after loading BoardConfig.mk.
+_product_path_placeholder := ||PRODUCT-PATH-PH||
+TARGET_COPY_OUT_PRODUCT := $(_product_path_placeholder)
+###########################################
+
 #################################################################
 # Set up minimal BOOTCLASSPATH list of jars to build/execute
 # java code with dalvikvm/art.
@@ -273,6 +285,29 @@
 else ifdef BOARD_USES_VENDORIMAGE
 $(error TARGET_COPY_OUT_VENDOR must be set to 'vendor' to use a vendor image)
 endif
+
+###########################################
+# Now we can substitute with the real value of TARGET_COPY_OUT_PRODUCT
+ifeq ($(TARGET_COPY_OUT_PRODUCT),$(_product_path_placeholder))
+TARGET_COPY_OUT_PRODUCT := system/product
+else ifeq ($(filter product system/product,$(TARGET_COPY_OUT_PRODUCT)),)
+$(error TARGET_COPY_OUT_PRODUCT must be either 'product' or 'system/product', seeing '$(TARGET_COPY_OUT_PRODUCT)'.)
+endif
+PRODUCT_COPY_FILES := $(subst $(_product_path_placeholder),$(TARGET_COPY_OUT_PRODUCT),$(PRODUCT_COPY_FILES))
+
+BOARD_USES_PRODUCTIMAGE :=
+ifdef BOARD_PREBUILT_PRODUCTIMAGE
+BOARD_USES_PRODUCTIMAGE := true
+endif
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+BOARD_USES_PRODUCTIMAGE := true
+endif
+ifeq ($(TARGET_COPY_OUT_PRODUCT),product)
+BOARD_USES_PRODUCTIMAGE := true
+else ifdef BOARD_USES_PRODUCTIMAGE
+$(error TARGET_COPY_OUT_PRODUCT must be set to 'product' to use a product image)
+endif
+
 ###########################################
 # Ensure that only TARGET_RECOVERY_UPDATER_LIBS *or* AB_OTA_UPDATER is set.
 TARGET_RECOVERY_UPDATER_LIBS ?=
@@ -304,6 +339,13 @@
   $(foreach v,$(PRODUCT_EXTRA_VNDK_VERSIONS),$(call check_vndk_version,$(v)))
 endif
 
+# Ensure that BOARD_SYSTEMSDK_VERSIONS are all within PLATFORM_SYSTEMSDK_VERSIONS
+_unsupported_systemsdk_versions := $(filter-out $(PLATFORM_SYSTEMSDK_VERSIONS),$(BOARD_SYSTEMSDK_VERSIONS))
+ifneq (,$(_unsupported_systemsdk_versions))
+  $(error System SDK versions '$(_unsupported_systemsdk_versions)' in BOARD_SYSTEMSDK_VERSIONS are not supported.\
+          Supported versions are $(PLATFORM_SYSTEMSDK_VERSIONS))
+endif
+
 # ---------------------------------------------------------------
 # Set up configuration for target machine.
 # The following must be set:
@@ -618,6 +660,39 @@
 endif
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_APPS := $(TARGET_OUT_ODM_APPS)
 
+TARGET_OUT_PRODUCT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_PRODUCT)
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+target_out_product_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/system
+ifeq ($(SANITIZE_LITE),true)
+# When using SANITIZE_LITE, APKs must not be packaged with sanitized libraries, as they will not
+# work with unsanitized app_process. For simplicity, generate APKs into /data/asan/.
+target_out_product_app_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/product
+else
+target_out_product_app_base := $(TARGET_OUT_PRODUCT)
+endif
+else
+target_out_product_shared_libraries_base := $(TARGET_OUT)
+target_out_product_app_base := $(TARGET_OUT_PRODUCT)
+endif
+
+ifeq ($(TARGET_IS_64_BIT),true)
+TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib64
+else
+TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib
+endif
+TARGET_OUT_PRODUCT_JAVA_LIBRARIES:= $(TARGET_OUT_PRODUCT)/framework
+TARGET_OUT_PRODUCT_APPS := $(target_out_product_app_base)/app
+TARGET_OUT_PRODUCT_APPS_PRIVILEGED := $(target_out_product_app_base)/priv-app
+TARGET_OUT_PRODUCT_ETC := $(TARGET_OUT_PRODUCT)/etc
+
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib/$(TARGET_2ND_ARCH)
+else
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib
+endif
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_APPS := $(TARGET_OUT_PRODUCT_APPS)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_APPS_PRIVILEGED := $(TARGET_OUT_PRODUCT_APPS_PRIVILEGED)
+
 TARGET_OUT_BREAKPAD := $(PRODUCT_OUT)/breakpad
 
 TARGET_OUT_UNSTRIPPED := $(PRODUCT_OUT)/symbols
@@ -657,13 +732,3 @@
 ifeq ($(CALLED_FROM_SETUP),true)
 PRINT_BUILD_CONFIG ?= true
 endif
-
-ifeq ($(USE_CLANG_PLATFORM_BUILD),)
-USE_CLANG_PLATFORM_BUILD := true
-endif
-
-ifneq ($(USE_CLANG_PLATFORM_BUILD),true)
-ifneq ($(USE_CLANG_PLATFORM_BUILD),false)
-$(error USE_CLANG_PLATFORM_BUILD must be true or false)
-endif
-endif
diff --git a/core/java.mk b/core/java.mk
index ee071c9..6f5dce4 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -73,6 +73,7 @@
 full_classes_proguard_jar := $(intermediates.COMMON)/classes-proguard.jar
 full_classes_combined_jar := $(intermediates.COMMON)/classes-combined.jar
 built_dex_intermediate := $(intermediates.COMMON)/dex/classes.dex
+built_dex_hiddenapi := $(intermediates.COMMON)/dex-hiddenapi/classes.dex
 full_classes_stubs_jar := $(intermediates.COMMON)/stubs.jar
 java_source_list_file := $(intermediates.COMMON)/java-source-list
 
@@ -762,7 +763,14 @@
 endif
 endif
 
-$(built_dex): $(built_dex_intermediate)
+ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),) # is_boot_jar
+  $(eval $(call hiddenapi-copy-dex-files,$(built_dex_intermediate),$(built_dex_hiddenapi)))
+  built_dex_copy_from := $(built_dex_hiddenapi)
+else # !is_boot_jar
+  built_dex_copy_from := $(built_dex_intermediate)
+endif # is_boot_jar
+
+$(built_dex): $(built_dex_copy_from)
 	@echo Copying: $@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) rm -f $(dir $@)/classes*.dex
diff --git a/core/java_common.mk b/core/java_common.mk
index cfc9d7f..436f3a3 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -359,6 +359,7 @@
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := $(full_java_bootclasspath_libs)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_EMPTY_BOOTCLASSPATH := $(empty_bootclasspath)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SYSTEM_MODULES := $(my_system_modules_dir)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PATCH_MODULE := $(LOCAL_PATCH_MODULE)
 
 ifndef LOCAL_IS_HOST_MODULE
 # This is set by packages that are linking to other packages that export
diff --git a/core/java_library.mk b/core/java_library.mk
index 8cf0074..1b914f5 100644
--- a/core/java_library.mk
+++ b/core/java_library.mk
@@ -72,10 +72,10 @@
 	$(call add-dex-to-package-arg,$@.tmp)
 	$(hide) $(ZIPTIME) $@.tmp
 	$(call commit-change-for-toc,$@)
-ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
 	$(uncompress-dexs)
 	$(align-package)
-endif  # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
+endif  # LOCAL_UNCOMPRESS_DEX
 
 .KATI_RESTAT: $(common_javalib.jar)
 
diff --git a/core/local_systemsdk.mk b/core/local_systemsdk.mk
new file mode 100644
index 0000000..6dab346
--- /dev/null
+++ b/core/local_systemsdk.mk
@@ -0,0 +1,56 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+ifdef BOARD_SYSTEMSDK_VERSIONS
+  # Apps and jars in vendor or odm partition are forced to build against System SDK.
+  _is_vendor_app :=
+  ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
+    # Note: no need to check LOCAL_MODULE_PATH* since LOCAL_[VENDOR|ODM|OEM]_MODULE is already
+    # set correctly before this is included.
+    _is_vendor_app := true
+  endif
+  ifneq (,$(filter JAVA_LIBRARIES APPS,$(LOCAL_MODULE_CLASS)))
+    ifndef LOCAL_SDK_VERSION
+      ifeq ($(_is_vendor_app),true)
+        LOCAL_SDK_VERSION := system_current
+      endif
+    endif
+  endif
+endif
+
+# Ensure that the selected System SDK version is one of the supported versions.
+# The range of support versions becomes narrower when BOARD_SYSTEMSDK_VERSIONS
+# is set, which is a subset of PLATFORM_SYSTEMSDK_VERSIONS.
+ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
+  ifneq ($(_is_vendor_app),true)
+    # apps bundled in system partition can use all system sdk versions provided by the platform
+    _supported_systemsdk_versions := $(PLATFORM_SYSTEMSDK_VERSIONS)
+  else ifdef BOARD_SYSTEMSDK_VERSIONS
+    # When BOARD_SYSTEMSDK_VERSIONS is set, vendors apps are restricted to use those versions
+    # which is equal to or smaller than PLATFORM_SYSTEMSDK_VERSIONS
+    _supported_systemsdk_versions := $(BOARD_SYSTEMSDK_VERSIONS)
+  else
+    # If not, vendor apks are treated equally to system apps
+    _supported_systemsdk_versions := $(PLATFORM_SYSTEMSDK_VERSIONS)
+  endif
+  _system_sdk_version := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
+  ifneq ($(_system_sdk_version),$(filter $(_system_sdk_version),$(_supported_systemsdk_versions)))
+    $(call pretty-error,Incompatible LOCAL_SDK_VERSION '$(LOCAL_SDK_VERSION)'. \
+           System SDK version '$(_system_sdk_version)' is not supported. Supported versions are: $(_supported_systemsdk_versions))
+  endif
+  _system_sdk_version :=
+  _supported_systemsdk_versions :=
+endif
diff --git a/core/local_vsdk.mk b/core/local_vsdk.mk
deleted file mode 100644
index f798d47..0000000
--- a/core/local_vsdk.mk
+++ /dev/null
@@ -1,19 +0,0 @@
-
-ifdef BOARD_VSDK_VERSION
-# Set LOCAL_SDK_VERSION to system_current, If LOCAL_SDK_VERSION is not defined and LOCAL_VENDOR_MODULE is true
-  _is_vendor_app :=
-  ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_OEM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
-    _is_vendor_app := true
-  else
-    ifneq (,$(filter $(TARGET_OUT_VENDOR)%,$(LOCAL_MODULE_PATH) $(LOCAL_MODULE_PATH_32) $(LOCAL_MODULE_PATH_64)))
-      _is_vendor_app := true
-    endif
-  endif
-  ifneq (,$(filter JAVA_LIBRARIES APPS,$(LOCAL_MODULE_CLASS)))
-    ifndef LOCAL_SDK_VERSION
-      ifeq ($(_is_vendor_app),true)
-        LOCAL_SDK_VERSION := system_current
-      endif
-    endif
-  endif
-endif
diff --git a/core/main.mk b/core/main.mk
index fe178da..98f6ecb 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -205,6 +205,14 @@
 	variables like PRODUCT_SEPOLICY_SPLIT should be used until that is \
 	possible.)
 
+# Sets ro.actionable_compatible_property.enabled to know on runtime whether the whitelist
+# of actionable compatible properties is enabled or not.
+ifeq ($(PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE),true)
+ADDITIONAL_DEFAULT_PROPERTIES += ro.actionable_compatible_property.enabled=false
+else
+ADDITIONAL_DEFAULT_PROPERTIES += ro.actionable_compatible_property.enabled=${PRODUCT_COMPATIBLE_PROPERTY}
+endif
+
 # -----------------------------------------------------------------
 ###
 ### In this section we set up the things that are different
@@ -1033,6 +1041,9 @@
 .PHONY: vendorimage
 vendorimage: $(INSTALLED_VENDORIMAGE_TARGET)
 
+.PHONY: productimage
+productimage: $(INSTALLED_PRODUCTIMAGE_TARGET)
+
 .PHONY: systemotherimage
 systemotherimage: $(INSTALLED_SYSTEMOTHERIMAGE_TARGET)
 
@@ -1056,9 +1067,11 @@
 	$(INSTALLED_CACHEIMAGE_TARGET) \
 	$(INSTALLED_BPTIMAGE_TARGET) \
 	$(INSTALLED_VENDORIMAGE_TARGET) \
+	$(INSTALLED_PRODUCTIMAGE_TARGET) \
 	$(INSTALLED_SYSTEMOTHERIMAGE_TARGET) \
 	$(INSTALLED_FILES_FILE) \
 	$(INSTALLED_FILES_FILE_VENDOR) \
+	$(INSTALLED_FILES_FILE_PRODUCT) \
 	$(INSTALLED_FILES_FILE_SYSTEMOTHER) \
 	soong_docs
 
@@ -1124,6 +1137,7 @@
     $(COVERAGE_ZIP) \
     $(INSTALLED_FILES_FILE) \
     $(INSTALLED_FILES_FILE_VENDOR) \
+    $(INSTALLED_FILES_FILE_PRODUCT) \
     $(INSTALLED_FILES_FILE_SYSTEMOTHER) \
     $(INSTALLED_BUILD_PROP_TARGET) \
     $(BUILT_TARGET_FILES_PACKAGE) \
diff --git a/core/package_internal.mk b/core/package_internal.mk
index e153a8a..4890966 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -111,6 +111,8 @@
         enforce_rro_enabled :=
       else ifeq (true,$(LOCAL_ODM_MODULE))
         enforce_rro_enabled :=
+      else ifeq (true,$(LOCAL_PRODUCT_MODULE))
+        enforce_rro_enabled :=
       endif
     else ifeq ($(filter $(TARGET_OUT)/%,$(LOCAL_MODULE_PATH)),)
       enforce_rro_enabled :=
@@ -580,7 +582,7 @@
 else
 $(LOCAL_BUILT_MODULE): PRIVATE_RESOURCE_LIST := $(all_res_assets)
 $(LOCAL_BUILT_MODULE) : $(all_res_assets) $(full_android_manifest) $(AAPT) $(ZIPALIGN)
-endif
+endif  # LOCAL_USE_AAPT2
 ifdef LOCAL_COMPRESSED_MODULE
 $(LOCAL_BUILT_MODULE) : $(MINIGZIP)
 endif
@@ -605,24 +607,19 @@
 	$(call add-jar-resources-to-package,$@,$(PRIVATE_FULL_CLASSES_JAR),$(PRIVATE_RESOURCE_INTERMEDIATES_DIR))
 endif
 endif  # full_classes_jar
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
+	@# No need to align, sign-package below will do it.
+	$(uncompress-dexs)
+endif
 ifdef LOCAL_DEX_PREOPT
 ifneq ($(BUILD_PLATFORM_ZIP),)
 	@# Keep a copy of apk with classes.dex unstripped
 	$(hide) cp -f $@ $(dir $@)package.dex.apk
 endif  # BUILD_PLATFORM_ZIP
-ifneq (true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))
-ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
-	@# No need to align, sign-package below will do it.
-	$(uncompress-dexs)
-endif  # LOCAL_PRIVILEGED_MODULE
-endif  # DONT_UNCOMPRESS_PRIV_APPS_DEXS
 ifneq (nostripping,$(LOCAL_DEX_PREOPT))
 	$(call dexpreopt-remove-classes.dex,$@)
 endif
-endif
-ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
-	$(uncompress-dexs)
-endif  # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
+endif  # LOCAL_DEX_PREOPT
 	$(sign-package)
 ifdef LOCAL_COMPRESSED_MODULE
 	$(compress-package)
@@ -646,6 +643,10 @@
 $(built_odex) : $(dir $(LOCAL_BUILT_MODULE))% : $(built_dex)
 	$(hide) mkdir -p $(dir $@) && rm -f $@
 	$(add-dex-to-package)
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
+	$(uncompress-dexs)
+	$(align-package)
+endif
 	$(hide) mv $@ $@.input
 	$(call dexpreopt-one-file,$@.input,$@)
 	$(hide) rm $@.input
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index d934338..ea7fd03 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -390,15 +390,9 @@
 $(built_module) : $(my_prebuilt_src_file) | $(ZIPALIGN) $(SIGNAPK_JAR)
 	$(transform-prebuilt-to-target)
 	$(uncompress-shared-libs)
-ifneq (true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))
-ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
 	$(uncompress-dexs)
-else
-  ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
-	  $(uncompress-dexs)
-  endif  # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
-endif  # LOCAL_PRIVILEGED_MODULE
-endif  # DONT_UNCOMPRESS_PRIV_APPS_DEXS
+endif  # LOCAL_UNCOMPRESS_DEX
 ifdef LOCAL_DEX_PREOPT
 ifneq ($(BUILD_PLATFORM_ZIP),)
 	@# Keep a copy of apk with classes.dex unstripped
@@ -423,11 +417,19 @@
 endif  # ! LOCAL_REPLACE_PREBUILT_APK_INSTALLED
 
 ###############################
-## Rule to build the odex file
+## Rule to build the odex file.
+# In case we don't strip the built module, use it, as dexpreopt
+# can do optimizations based on whether the built module only
+# contains uncompressed dex code.
 ifdef LOCAL_DEX_PREOPT
+ifeq (nostripping,$(LOCAL_DEX_PREOPT))
+$(built_odex) : $(built_module)
+	$(call dexpreopt-one-file,$<,$@)
+else
 $(built_odex) : $(my_prebuilt_src_file)
 	$(call dexpreopt-one-file,$<,$@)
 endif
+endif
 
 ###############################
 ## Install split apks.
diff --git a/core/product-graph.mk b/core/product-graph.mk
index 666a207..576d14d 100644
--- a/core/product-graph.mk
+++ b/core/product-graph.mk
@@ -104,6 +104,7 @@
 	$(hide) echo 'PRODUCT_PROPERTY_OVERRIDES=$$(PRODUCTS.$(strip $(1)).PRODUCT_PROPERTY_OVERRIDES)' >> $$@
 	$(hide) echo 'PRODUCT_DEFAULT_PROPERTY_OVERRIDES=$$(PRODUCTS.$(strip $(1)).PRODUCT_DEFAULT_PROPERTY_OVERRIDES)' >> $$@
 	$(hide) echo 'PRODUCT_SYSTEM_DEFAULT_PROPERTIES=$$(PRODUCTS.$(strip $(1)).PRODUCT_SYSTEM_DEFAULT_PROPERTIES)' >> $$@
+	$(hide) echo 'PRODUCT_PRODUCT_PROPERTIES=$$(PRODUCTS.$(strip $(1)).PRODUCT_PRODUCT_PROPERTIES)' >> $$@
 	$(hide) echo 'PRODUCT_CHARACTERISTICS=$$(PRODUCTS.$(strip $(1)).PRODUCT_CHARACTERISTICS)' >> $$@
 	$(hide) echo 'PRODUCT_COPY_FILES=$$(PRODUCTS.$(strip $(1)).PRODUCT_COPY_FILES)' >> $$@
 	$(hide) echo 'PRODUCT_OTA_PUBLIC_KEYS=$$(PRODUCTS.$(strip $(1)).PRODUCT_OTA_PUBLIC_KEYS)' >> $$@
diff --git a/core/product.mk b/core/product.mk
index f15f6b3..8095b27 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -88,6 +88,7 @@
     PRODUCT_BRAND \
     PRODUCT_PROPERTY_OVERRIDES \
     PRODUCT_DEFAULT_PROPERTY_OVERRIDES \
+    PRODUCT_PRODUCT_PROPERTIES \
     PRODUCT_CHARACTERISTICS \
     PRODUCT_COPY_FILES \
     PRODUCT_OTA_PUBLIC_KEYS \
@@ -125,6 +126,7 @@
     PRODUCT_VERITY_SIGNING_KEY \
     PRODUCT_SYSTEM_VERITY_PARTITION \
     PRODUCT_VENDOR_VERITY_PARTITION \
+    PRODUCT_PRODUCT_VERITY_PARTITION \
     PRODUCT_SYSTEM_SERVER_DEBUG_INFO \
     PRODUCT_DEX_PREOPT_MODULE_CONFIGS \
     PRODUCT_DEX_PREOPT_DEFAULT_FLAGS \
@@ -136,6 +138,7 @@
     PRODUCT_SANITIZER_MODULE_CONFIGS \
     PRODUCT_SYSTEM_BASE_FS_PATH \
     PRODUCT_VENDOR_BASE_FS_PATH \
+    PRODUCT_PRODUCT_BASE_FS_PATH \
     PRODUCT_SHIPPING_API_LEVEL \
     VENDOR_PRODUCT_RESTRICT_VENDOR_FILES \
     VENDOR_EXCEPTION_MODULES \
@@ -149,6 +152,8 @@
     PRODUCT_ADB_KEYS \
     PRODUCT_CFI_INCLUDE_PATHS \
     PRODUCT_CFI_EXCLUDE_PATHS \
+    PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE \
+    PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE \
 
 define dump-product
 $(info ==== $(1) ====)\
@@ -302,6 +307,8 @@
 	BOARD_FLASH_BLOCK_SIZE \
 	BOARD_VENDORIMAGE_PARTITION_SIZE \
 	BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE \
+	BOARD_PRODUCTIMAGE_PARTITION_SIZE \
+	BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE \
 	BOARD_INSTALLER_CMDLINE \
 
 
diff --git a/core/product_config.mk b/core/product_config.mk
index 5b0e257..bf607bb 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -365,6 +365,13 @@
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_DEFAULT_PROPERTIES))
 .KATI_READONLY := PRODUCT_SYSTEM_DEFAULT_PROPERTIES
 
+# A list of property assignments, like "key = value", with zero or more
+# whitespace characters on either side of the '='.
+# used for adding properties to build.prop of product partition
+PRODUCT_PRODUCT_PROPERTIES := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_PROPERTIES))
+.KATI_READONLY := PRODUCT_PRODUCT_PROPERTIES
+
 # Should we use the default resources or add any product specific overlays
 PRODUCT_PACKAGE_OVERLAYS := \
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGE_OVERLAYS))
@@ -482,5 +489,13 @@
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_CFI_INCLUDE_PATHS))
 
 # which Soong namespaces to export to Make
-PRODUCT_SOONG_NAMESPACES :=
+PRODUCT_SOONG_NAMESPACES := \
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SOONG_NAMESPACES))
+
+# A flag to override PRODUCT_COMPATIBLE_PROPERTY
+PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE))
+
+# Whether the whitelist of actionable compatible properties should be disabled or not
+PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE))
diff --git a/core/soong_config.mk b/core/soong_config.mk
index bbad4c8..639b019 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -110,10 +110,11 @@
 $(call add_json_bool, Device_uses_hwc2,                  $(filter true,$(TARGET_USES_HWC2)))
 $(call add_json_list, DeviceKernelHeaders,               $(TARGET_PROJECT_SYSTEM_INCLUDES))
 $(call add_json_bool, DevicePrefer32BitExecutables,      $(filter true,$(TARGET_PREFER_32_BIT_EXECUTABLES)))
-$(call add_json_val,  DeviceUsesClang,                   $(if $(USE_CLANG_PLATFORM_BUILD),$(USE_CLANG_PLATFORM_BUILD),false))
 $(call add_json_str,  DeviceVndkVersion,                 $(BOARD_VNDK_VERSION))
 $(call add_json_str,  Platform_vndk_version,             $(PLATFORM_VNDK_VERSION))
 $(call add_json_list, ExtraVndkVersions,                 $(PRODUCT_EXTRA_VNDK_VERSIONS))
+$(call add_json_list, DeviceSystemSdkVersions,           $(BOARD_SYSTEMSDK_VERSIONS))
+$(call add_json_list, Platform_systemsdk_versions,       $(PLATFORM_SYSTEMSDK_VERSIONS))
 $(call add_json_bool, Malloc_not_svelte,                 $(call invert_bool,$(filter true,$(MALLOC_SVELTE))))
 $(call add_json_str,  Override_rs_driver,                $(OVERRIDE_RS_DRIVER))
 
@@ -123,7 +124,7 @@
 $(call add_json_bool, Uml,                               $(filter true,$(TARGET_USER_MODE_LINUX)))
 $(call add_json_str,  VendorPath,                        $(TARGET_COPY_OUT_VENDOR))
 $(call add_json_str,  OdmPath,                           $(TARGET_COPY_OUT_ODM))
-$(call add_json_str,  OemPath,                           $(TARGET_COPY_OUT_OEM))
+$(call add_json_str,  ProductPath,                       $(TARGET_COPY_OUT_PRODUCT))
 $(call add_json_bool, MinimizeJavaDebugInfo,             $(filter true,$(PRODUCT_MINIMIZE_JAVA_DEBUG_INFO)))
 
 $(call add_json_bool, UseGoma,                           $(filter-out false,$(USE_GOMA)))
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 6cf9422..5c2d768 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -40,7 +40,11 @@
 
 ifdef LOCAL_SOONG_DEX_JAR
   ifndef LOCAL_IS_HOST_MODULE
-    $(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
+    ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),)  # is_boot_jar
+      $(eval $(call hiddenapi-copy-soong-jar,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
+    else # !is_boot_jar
+      $(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
+    endif # is_boot_jar
     $(eval $(call add-dependency,$(common_javalib.jar),$(full_classes_jar) $(full_classes_header_jar)))
 
     dex_preopt_profile_src_file := $(common_javalib.jar)
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index f6bc76b..d70dfb4 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -27,6 +27,7 @@
 #     BUILD_DATETIME
 #     PLATFORM_SECURITY_PATCH
 #     PLATFORM_VNDK_VERSION
+#     PLATFORM_SYSTEMSDK_VERSIONS
 #
 
 # Look for an optional file containing overrides of the defaults,
@@ -202,6 +203,32 @@
   endif
 endif
 
+ifndef PLATFORM_SYSTEMSDK_MIN_VERSION
+  # This is the oldest version of system SDK that the platform supports. Contrary
+  # to the public SDK where platform essentially supports all previous SDK versions,
+  # platform supports only a few number of recent system SDK versions as some of
+  # old system APIs are gradually deprecated, removed and then deleted.
+  # However, currently in P, we only support the single latest version since there
+  # is no old system SDK versions. Therefore, this is set to empty for now. This
+  # should later (in post P) be set to a number, like 28.
+  PLATFORM_SYSTEMSDK_MIN_VERSION :=
+endif
+
+# This is the list of system SDK versions that the current platform supports.
+PLATFORM_SYSTEMSDK_VERSIONS :=
+ifneq (,$(PLATFORM_SYSTEMSDK_MIN_VERSION))
+  $(if $(call math_is_number,$(PLATFORM_SYSTEMSDK_MIN_VERSION)),,\
+    $(error PLATFORM_SYSTEMSDK_MIN_VERSION must be a number, but was $(PLATFORM_SYSTEMSDK_MIN_VERSION)))
+  PLATFORM_SYSTEMSDK_VERSIONS := $(call int_range_list,$(PLATFORM_SYSTEMSDK_MIN_VERSION),$(PLATFORM_SDK_VERSION))
+endif
+# Platform always supports the current version
+ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+  PLATFORM_SYSTEMSDK_VERSIONS += $(PLATFORM_SDK_VERSION)
+else
+  PLATFORM_SYSTEMSDK_VERSIONS += $(PLATFORM_VERSION_CODENAME)
+endif
+PLATFORM_SYSTEMSDK_VERSIONS := $(strip $(sort $(PLATFORM_SYSTEMSDK_VERSIONS)))
+
 ifndef PLATFORM_SECURITY_PATCH
     #  Used to indicate the security patch that has been applied to the device.
     #  It must signify that the build includes all security patches issued up through the designated Android Public Security Bulletin.
diff --git a/envsetup.sh b/envsetup.sh
index 576c234..372dffb 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -51,7 +51,7 @@
     cached_vars=`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
     cached_abs_vars=`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_abs_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
     # Call the build system to dump the "<val>=<value>" pairs as a shell script.
-    build_dicts_script=`\cd $T; build/soong/soong_ui.bash --dumpvars-mode \
+    build_dicts_script=`\builtin cd $T; build/soong/soong_ui.bash --dumpvars-mode \
                         --vars="$cached_vars" \
                         --abs-vars="$cached_abs_vars" \
                         --var-prefix=var_cache_ \
diff --git a/help.sh b/help.sh
index 3f39c77..c143542 100755
--- a/help.sh
+++ b/help.sh
@@ -38,6 +38,8 @@
                             Stands for "System, NO Dependencies"
     vnod                    Quickly rebuild the vendor image from built packages
                             Stands for "Vendor, NO Dependencies"
+    pnod                    Quickly rebuild the product image from built packages
+                            Stands for "Product, NO Dependencies"
 
 
 So, for example, you could run:
diff --git a/target/board/Android.mk b/target/board/Android.mk
index f4d6b93..9b2620c 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -37,7 +37,7 @@
 LOCAL_MODULE        := device_manifest.xml
 LOCAL_MODULE_STEM   := manifest.xml
 LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)
+LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)/etc/vintf
 
 GEN := $(local-generated-sources-dir)/manifest.xml
 $(GEN): PRIVATE_DEVICE_MANIFEST_FILE := $(DEVICE_MANIFEST_FILE)
@@ -53,18 +53,28 @@
 BUILT_VENDOR_MANIFEST := $(LOCAL_BUILT_MODULE)
 endif
 
+# VNDK Version in device compatibility matrix and framework manifest
+ifeq ($(BOARD_VNDK_VERSION),current)
+VINTF_VNDK_VERSION := $(PLATFORM_VNDK_VERSION)
+else
+VINTF_VNDK_VERSION := $(BOARD_VNDK_VERSION)
+endif
+
 # Device Compatibility Matrix
 ifdef DEVICE_MATRIX_FILE
 include $(CLEAR_VARS)
 LOCAL_MODULE        := device_compatibility_matrix.xml
 LOCAL_MODULE_STEM   := compatibility_matrix.xml
 LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)
+LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)/etc/vintf
 
 GEN := $(local-generated-sources-dir)/compatibility_matrix.xml
+
+$(GEN): PRIVATE_VINTF_VNDK_VERSION := $(VINTF_VNDK_VERSION)
 $(GEN): $(DEVICE_MATRIX_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	# TODO(b/37342627): put BOARD_VNDK_VERSION & BOARD_VNDK_LIBRARIES into device matrix.
-	$(HOST_OUT_EXECUTABLES)/assemble_vintf -i $< -o $@
+	REQUIRED_VNDK_VERSION=$(PRIVATE_VINTF_VNDK_VERSION) \
+	BOARD_SYSTEMSDK_VERSIONS="$(BOARD_SYSTEMSDK_VERSIONS)" \
+		$(HOST_OUT_EXECUTABLES)/assemble_vintf -i $< -o $@
 
 LOCAL_PREBUILT_MODULE_FILE := $(GEN)
 include $(BUILD_PREBUILT)
@@ -76,7 +86,7 @@
 LOCAL_MODULE        := framework_manifest.xml
 LOCAL_MODULE_STEM   := manifest.xml
 LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT)
+LOCAL_MODULE_PATH   := $(TARGET_OUT)/etc/vintf
 
 GEN := $(local-generated-sources-dir)/manifest.xml
 
@@ -89,9 +99,13 @@
 endif
 endif
 
+$(GEN): PRIVATE_VINTF_VNDK_VERSION := $(VINTF_VNDK_VERSION)
 $(GEN): PRIVATE_FRAMEWORK_MANIFEST_INPUT_FILES := $(FRAMEWORK_MANIFEST_INPUT_FILES)
 $(GEN): $(FRAMEWORK_MANIFEST_INPUT_FILES) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) $(HOST_OUT_EXECUTABLES)/assemble_vintf \
+	BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \
+	PROVIDED_VNDK_VERSIONS="$(PRIVATE_VINTF_VNDK_VERSION) $(PRODUCT_EXTRA_VNDK_VERSIONS)" \
+	PLATFORM_SYSTEMSDK_VERSIONS="$(PLATFORM_SYSTEMSDK_VERSIONS)" \
+		$(HOST_OUT_EXECUTABLES)/assemble_vintf \
 		-i $(call normalize-path-list,$(PRIVATE_FRAMEWORK_MANIFEST_INPUT_FILES)) \
 		-o $@ $(PRIVATE_FLAGS)
 
@@ -99,60 +113,4 @@
 include $(BUILD_PREBUILT)
 BUILT_SYSTEM_MANIFEST := $(LOCAL_BUILT_MODULE)
 
-# Framework Compatibility Matrix
-include $(CLEAR_VARS)
-LOCAL_MODULE        := framework_compatibility_matrix.xml
-LOCAL_MODULE_STEM   := compatibility_matrix.xml
-LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT)
-
-GEN := $(local-generated-sources-dir)/compatibility_matrix.xml
-
-$(GEN): PRIVATE_FLAGS :=
-
-ifdef BUILT_VENDOR_MANIFEST
-$(GEN): $(BUILT_VENDOR_MANIFEST)
-$(GEN): PRIVATE_FLAGS += -c "$(BUILT_VENDOR_MANIFEST)"
-endif
-
-ifeq (true,$(BOARD_AVB_ENABLE))
-$(GEN): $(AVBTOOL)
-# INTERNAL_AVB_SYSTEM_SIGNING_ARGS consists of BOARD_AVB_SYSTEM_KEY_PATH and
-# BOARD_AVB_SYSTEM_ALGORITHM. We should add the dependency of key path, which
-# is a file, here.
-$(GEN): $(BOARD_AVB_SYSTEM_KEY_PATH)
-# Use deferred assignment (=) instead of immediate assignment (:=).
-# Otherwise, cannot get INTERNAL_AVB_SYSTEM_SIGNING_ARGS.
-FRAMEWORK_VBMETA_VERSION = $$("$(AVBTOOL)" add_hashtree_footer \
-                              --print_required_libavb_version \
-                              $(INTERNAL_AVB_SYSTEM_SIGNING_ARGS) \
-                              $(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS))
-else
-FRAMEWORK_VBMETA_VERSION := 0.0
-endif
-
-# All kernel versions that the system image works with.
-KERNEL_VERSIONS := 3.18 4.4 4.9
-KERNEL_CONFIG_DATA := kernel/configs
-
-$(GEN): $(foreach version,$(KERNEL_VERSIONS),\
-	$(wildcard $(KERNEL_CONFIG_DATA)/android-$(version)/android-base*.cfg))
-$(GEN): PRIVATE_FLAGS += $(foreach version,$(KERNEL_VERSIONS),\
-	--kernel=$(version):$(call normalize-path-list,\
-		$(wildcard $(KERNEL_CONFIG_DATA)/android-$(version)/android-base*.cfg)))
-
-KERNEL_VERSIONS :=
-KERNEL_CONFIG_DATA :=
-
-$(GEN): $(FRAMEWORK_COMPATIBILITY_MATRIX_FILES) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	# TODO(b/37405869) (b/37715375) inject avb versions as well for devices that have avb enabled.
-	POLICYVERS=$(POLICYVERS) \
-		BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \
-		FRAMEWORK_VBMETA_VERSION=$(FRAMEWORK_VBMETA_VERSION) \
-		PRODUCT_ENFORCE_VINTF_MANIFEST=$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
-		$(HOST_OUT_EXECUTABLES)/assemble_vintf \
-		-i $(call normalize-path-list,$(FRAMEWORK_COMPATIBILITY_MATRIX_FILES)) \
-		-o $@ $(PRIVATE_FLAGS)
-LOCAL_PREBUILT_MODULE_FILE := $(GEN)
-include $(BUILD_PREBUILT)
-BUILT_SYSTEM_COMPATIBILITY_MATRIX := $(LOCAL_BUILT_MODULE)
+VINTF_VNDK_VERSION :=
diff --git a/target/board/generic/sepolicy/genfs_contexts b/target/board/generic/sepolicy/genfs_contexts
index bdcead1..3b077a6 100644
--- a/target/board/generic/sepolicy/genfs_contexts
+++ b/target/board/generic/sepolicy/genfs_contexts
@@ -2,3 +2,7 @@
 # /sys/bus/platform/devices/ANDR0001:00/properties/android/ which is a symlink to
 # /sys/devices/platform/ANDR0001:00/properties/android/
 genfscon sysfs /devices/platform/ANDR0001:00/properties/android u:object_r:sysfs_dt_firmware_android:s0
+
+# We expect /sys/class/power_supply/* and everything it links to to be labeled
+# as sysfs_batteryinfo.
+genfscon sysfs /devices/platform/GFSH0001:00/power_supply u:object_r:sysfs_batteryinfo:s0
diff --git a/target/board/generic/sepolicy/healthd.te b/target/board/generic/sepolicy/healthd.te
new file mode 100644
index 0000000..ced6704
--- /dev/null
+++ b/target/board/generic/sepolicy/healthd.te
@@ -0,0 +1,2 @@
+# Allow to read /sys/class/power_supply directory
+allow healthd sysfs:dir r_dir_perms;
diff --git a/target/board/generic_arm64_a/BoardConfig.mk b/target/board/generic_arm64_a/BoardConfig.mk
index 8f4043f..34a8ac0 100644
--- a/target/board/generic_arm64_a/BoardConfig.mk
+++ b/target/board/generic_arm64_a/BoardConfig.mk
@@ -23,7 +23,7 @@
 TARGET_CPU_VARIANT := generic
 
 TARGET_2ND_ARCH := arm
-TARGET_2ND_ARCH_VARIANT := armv7-a-neon
+TARGET_2ND_ARCH_VARIANT := armv8-a
 TARGET_2ND_CPU_ABI := armeabi-v7a
 TARGET_2ND_CPU_ABI2 := armeabi
-TARGET_2ND_CPU_VARIANT := cortex-a15
+TARGET_2ND_CPU_VARIANT := generic
diff --git a/target/board/generic_arm64_ab/BoardConfig.mk b/target/board/generic_arm64_ab/BoardConfig.mk
index e0d7372..00afee6 100644
--- a/target/board/generic_arm64_ab/BoardConfig.mk
+++ b/target/board/generic_arm64_ab/BoardConfig.mk
@@ -23,10 +23,10 @@
 TARGET_CPU_VARIANT := generic
 
 TARGET_2ND_ARCH := arm
-TARGET_2ND_ARCH_VARIANT := armv7-a-neon
+TARGET_2ND_ARCH_VARIANT := armv8-a
 TARGET_2ND_CPU_ABI := armeabi-v7a
 TARGET_2ND_CPU_ABI2 := armeabi
-TARGET_2ND_CPU_VARIANT := cortex-a15
+TARGET_2ND_CPU_VARIANT := generic
 
 # Enable A/B update
 TARGET_NO_RECOVERY := true
diff --git a/target/board/generic_x86/BoardConfig.mk b/target/board/generic_x86/BoardConfig.mk
index f8fb88f..a73a31b 100644
--- a/target/board/generic_x86/BoardConfig.mk
+++ b/target/board/generic_x86/BoardConfig.mk
@@ -11,6 +11,10 @@
 TARGET_ARCH_VARIANT := x86
 TARGET_PRELINK_MODULE := false
 
+#emulator now uses 64bit kernel to run 32bit x86 image
+#
+TARGET_USES_64_BIT_BINDER := true
+
 # The IA emulator (qemu) uses the Goldfish devices
 HAVE_HTC_AUDIO_DRIVER := true
 BOARD_USES_GENERIC_AUDIO := true
diff --git a/target/board/treble_common.mk b/target/board/treble_common.mk
index b4777b6..a8c9bc5 100644
--- a/target/board/treble_common.mk
+++ b/target/board/treble_common.mk
@@ -36,18 +36,6 @@
 # Generic AOSP image always requires separate vendor.img
 TARGET_COPY_OUT_VENDOR := vendor
 
-# Enable dex pre-opt to speed up initial boot
-ifeq ($(HOST_OS),linux)
-  ifeq ($(WITH_DEXPREOPT),)
-    WITH_DEXPREOPT := true
-    WITH_DEXPREOPT_PIC := true
-    ifneq ($(TARGET_BUILD_VARIANT),user)
-      # Retain classes.dex in APK's for non-user builds
-      DEX_PREOPT_DEFAULT := nostripping
-    endif
-  endif
-endif
-
 # Generic AOSP image does NOT support HWC1
 TARGET_USES_HWC2 := true
 # Set emulator framebuffer display device buffer count to 3
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 03203ce..811c330 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -24,7 +24,7 @@
 PRODUCT_COPY_FILES += \
     development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
     device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
-    prebuilts/qemu-kernel/x86/3.18/kernel-qemu2:kernel-ranchu
+    prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu-64
 
 include $(SRC_TARGET_DIR)/product/full_x86.mk
 
diff --git a/target/product/base.mk b/target/product/base.mk
index c3eb3b2..750d3fa 100644
--- a/target/product/base.mk
+++ b/target/product/base.mk
@@ -114,6 +114,7 @@
     mtpd \
     ndc \
     netd \
+    perfetto \
     ping \
     ping6 \
     platform.xml \
@@ -132,6 +133,8 @@
     svc \
     tc \
     telecom \
+    traced \
+    traced_probes \
     vdc \
     vold \
     wm
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index b252349..16599cb 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -85,6 +85,7 @@
     telephony-common \
     uiautomator \
     uncrypt \
+    vndk_snapshot_package \
     voip-common \
     webview \
     webview_zygote \
diff --git a/target/product/embedded.mk b/target/product/embedded.mk
index 20f0ebf..18eeb40 100644
--- a/target/product/embedded.mk
+++ b/target/product/embedded.mk
@@ -20,11 +20,13 @@
 PRODUCT_PACKAGES += \
     adb \
     adbd \
+    usbd \
     android.hardware.configstore@1.0-service \
     android.hidl.allocator@1.0-service \
     android.hidl.memory@1.0-impl \
     android.hidl.memory@1.0-impl.vendor \
     atrace \
+    blank_screen \
     bootanimation \
     bootstat \
     charger \
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index 1e82773..b9820d3 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -24,7 +24,7 @@
 PRODUCT_COPY_FILES += \
     development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
     device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
-    prebuilts/qemu-kernel/x86/3.18/kernel-qemu2:kernel-ranchu
+    prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu-64
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
diff --git a/target/product/treble_common.mk b/target/product/treble_common.mk
index 5880bf8..e9a97cd 100644
--- a/target/product/treble_common.mk
+++ b/target/product/treble_common.mk
@@ -70,12 +70,15 @@
 PRODUCT_COPY_FILES += \
     device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml
 
-#GSI support for the devices that disable VNDK enforcing
+# Support for the devices with no VNDK enforcing
 PRODUCT_COPY_FILES += \
-    system/core/rootdir/etc/ld.config.txt:system/etc/ld.config.noenforce.txt \
     build/make/target/product/vndk/init.gsi.rc:system/etc/init/init.gsi.rc \
     build/make/target/product/vndk/init.noenforce.rc:system/etc/init/gsi/init.noenforce.rc
 
-#Set current VNDK version for GSI
+# Name space configuration file for non-enforcing VNDK
+PRODUCT_PACKAGES += \
+    ld.config.noenforce.txt
+
+# Set current VNDK version for GSI
 PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
     ro.gsi.vndk.version=$(PLATFORM_VNDK_VERSION)
diff --git a/target/product/vndk/Android.mk b/target/product/vndk/Android.mk
index a134d02..93aaf37 100644
--- a/target/product/vndk/Android.mk
+++ b/target/product/vndk/Android.mk
@@ -77,26 +77,19 @@
 	@chmod a+x $@
 
 include $(CLEAR_VARS)
-LOCAL_MODULE := vndk_current
+LOCAL_MODULE := vndk_package
 LOCAL_REQUIRED_MODULES := \
     $(addsuffix .vendor,$(VNDK_CORE_LIBRARIES)) \
     $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES)) \
     $(LLNDK_LIBRARIES) \
     llndk.libraries.txt \
     vndksp.libraries.txt
-
 include $(BUILD_PHONY_PACKAGE)
 
 include $(CLEAR_VARS)
-LOCAL_MODULE := vndk_package
-ifeq (current,$(BOARD_VNDK_VERSION))
+LOCAL_MODULE := vndk_snapshot_package
 LOCAL_REQUIRED_MODULES := \
-    vndk_current
-else
-LOCAL_REQUIRED_MODULES := \
-    vndk_v$(BOARD_VNDK_VERSION)_$(TARGET_ARCH)
-endif
-LOCAL_REQUIRED_MODULES += \
     $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),vndk_v$(vndk_ver)_$(TARGET_ARCH))
 include $(BUILD_PHONY_PACKAGE)
+
 endif # BOARD_VNDK_VERSION is set
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 9601d88..240e5c9 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -52,7 +52,6 @@
 import shutil
 import subprocess
 import sys
-import tempfile
 import uuid
 import zipfile
 
@@ -75,6 +74,10 @@
 OPTIONS.is_signing = False
 
 
+# Partitions that should have their care_map added to META/care_map.txt.
+PARTITIONS_WITH_CARE_MAP = ('system', 'vendor', 'product')
+
+
 class OutputFile(object):
   def __init__(self, output_zip, input_dir, prefix, name):
     self._output_zip = output_zip
@@ -94,13 +97,10 @@
 
 
 def GetCareMap(which, imgname):
-  """Generate care_map of system (or vendor) partition"""
-
-  assert which in ("system", "vendor")
+  """Generates the care_map for the given partition."""
+  assert which in PARTITIONS_WITH_CARE_MAP
 
   simg = sparse_img.SparseImage(imgname)
-  care_map_list = [which]
-
   care_map_ranges = simg.care_map
   key = which + "_adjusted_partition_size"
   adjusted_blocks = OPTIONS.info_dict.get(key)
@@ -109,8 +109,7 @@
     care_map_ranges = care_map_ranges.intersect(rangelib.RangeSet(
         "0-%d" % (adjusted_blocks,)))
 
-  care_map_list.append(care_map_ranges.to_string_raw())
-  return care_map_list
+  return [which, care_map_ranges.to_string_raw()]
 
 
 def AddSystem(output_zip, prefix="IMAGES/", recovery_img=None, boot_img=None):
@@ -173,6 +172,20 @@
   return img.name
 
 
+def AddProduct(output_zip, prefix="IMAGES/"):
+  """Turn the contents of PRODUCT into a product image and store it in output_zip."""
+
+  img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "product.img")
+  if os.path.exists(img.input_name):
+    print("product.img already exists in %s, no need to rebuild..." % (prefix,))
+    return img.input_name
+
+  block_list = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "product.map")
+  CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "product", img,
+              block_list=block_list)
+  return img.name
+
+
 def AddDtbo(output_zip, prefix="IMAGES/"):
   """Adds the DTBO image.
 
@@ -462,6 +475,126 @@
   img.Write()
 
 
+def AddRadioImagesForAbOta(output_zip, ab_partitions):
+  """Adds the radio images needed for A/B OTA to the output file.
+
+  It parses the list of A/B partitions, looks for the missing ones from RADIO/
+  or VENDOR_IMAGES/ dirs, and copies them to IMAGES/ of the output file (or
+  dir).
+
+  It also ensures that on returning from the function all the listed A/B
+  partitions must have their images available under IMAGES/.
+
+  Args:
+    output_zip: The output zip file (needs to be already open), or None to
+        write images to OPTIONS.input_tmp/.
+    ab_partitions: The list of A/B partitions.
+
+  Raises:
+    AssertionError: If it can't find an image.
+  """
+  for partition in ab_partitions:
+    img_name = partition.strip() + ".img"
+    prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+    if os.path.exists(prebuilt_path):
+      print("%s already exists, no need to overwrite..." % (img_name,))
+      continue
+
+    img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
+    if os.path.exists(img_radio_path):
+      if output_zip:
+        common.ZipWrite(output_zip, img_radio_path, "IMAGES/" + img_name)
+      else:
+        shutil.copy(img_radio_path, prebuilt_path)
+      continue
+
+    # Walk through VENDOR_IMAGES/ since files could be under subdirs.
+    img_vendor_dir = os.path.join(OPTIONS.input_tmp, "VENDOR_IMAGES")
+    for root, _, files in os.walk(img_vendor_dir):
+      if img_name in files:
+        if output_zip:
+          common.ZipWrite(output_zip, os.path.join(root, img_name),
+                          "IMAGES/" + img_name)
+        else:
+          shutil.copy(os.path.join(root, img_name), prebuilt_path)
+        break
+
+    # Assert that the image is present under IMAGES/ now.
+    if output_zip:
+      # Zip spec says: All slashes MUST be forward slashes.
+      img_path = 'IMAGES/' + img_name
+      assert img_path in output_zip.namelist(), "cannot find " + img_name
+    else:
+      img_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+      assert os.path.exists(img_path), "cannot find " + img_name
+
+
+def AddCareMapTxtForAbOta(output_zip, ab_partitions, image_paths):
+  """Generates and adds care_map.txt for system and vendor partitions.
+
+  Args:
+    output_zip: The output zip file (needs to be already open), or None to
+        write images to OPTIONS.input_tmp/.
+    ab_partitions: The list of A/B partitions.
+    image_paths: A map from the partition name to the image path.
+  """
+  care_map_list = []
+  for partition in ab_partitions:
+    partition = partition.strip()
+    if partition not in PARTITIONS_WITH_CARE_MAP:
+      continue
+
+    verity_block_device = "{}_verity_block_device".format(partition)
+    avb_hashtree_enable = "avb_{}_hashtree_enable".format(partition)
+    if (verity_block_device in OPTIONS.info_dict or
+        OPTIONS.info_dict.get(avb_hashtree_enable) == "true"):
+      image_path = image_paths[partition]
+      assert os.path.exists(image_path)
+      care_map_list += GetCareMap(partition, image_path)
+
+  if care_map_list:
+    care_map_path = "META/care_map.txt"
+    if output_zip and care_map_path not in output_zip.namelist():
+      common.ZipWriteStr(output_zip, care_map_path, '\n'.join(care_map_list))
+    else:
+      with open(os.path.join(OPTIONS.input_tmp, care_map_path), 'w') as fp:
+        fp.write('\n'.join(care_map_list))
+      if output_zip:
+        OPTIONS.replace_updated_files_list.append(care_map_path)
+
+
+def AddPackRadioImages(output_zip, images):
+  """Copies images listed in META/pack_radioimages.txt from RADIO/ to IMAGES/.
+
+  Args:
+    output_zip: The output zip file (needs to be already open), or None to
+        write images to OPTIONS.input_tmp/.
+    images: A list of image names.
+
+  Raises:
+    AssertionError: If a listed image can't be found.
+  """
+  for image in images:
+    img_name = image.strip()
+    _, ext = os.path.splitext(img_name)
+    if not ext:
+      img_name += ".img"
+
+    prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+    if os.path.exists(prebuilt_path):
+      print("%s already exists, no need to overwrite..." % (img_name,))
+      continue
+
+    img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
+    assert os.path.exists(img_radio_path), \
+        "Failed to find %s at %s" % (img_name, img_radio_path)
+
+    if output_zip:
+      common.ZipWrite(output_zip, img_radio_path, "IMAGES/" + img_name)
+    else:
+      shutil.copy(img_radio_path, prebuilt_path)
+
+
 def ReplaceUpdatedFiles(zip_filename, files_list):
   """Updates all the ZIP entries listed in files_list.
 
@@ -502,13 +635,16 @@
       print("target_files appears to already contain images.")
       sys.exit(1)
 
-  # vendor.img is unlike system.img or system_other.img. Because it could be
-  # built from source, or dropped into target_files.zip as a prebuilt blob. We
-  # consider either of them as vendor.img being available, which could be used
-  # when generating vbmeta.img for AVB.
+  # {vendor,product}.img is unlike system.img or system_other.img. Because it could
+  # be built from source, or dropped into target_files.zip as a prebuilt blob.
+  # We consider either of them as {vendor,product}.img being available, which could
+  # be used when generating vbmeta.img for AVB.
   has_vendor = (os.path.isdir(os.path.join(OPTIONS.input_tmp, "VENDOR")) or
                 os.path.exists(os.path.join(OPTIONS.input_tmp, "IMAGES",
                                             "vendor.img")))
+  has_product = (os.path.isdir(os.path.join(OPTIONS.input_tmp, "PRODUCT")) or
+                 os.path.exists(os.path.join(OPTIONS.input_tmp, "IMAGES",
+                                             "product.img")))
   has_system_other = os.path.isdir(os.path.join(OPTIONS.input_tmp,
                                                 "SYSTEM_OTHER"))
 
@@ -589,12 +725,16 @@
           recovery_two_step_image.AddToZip(output_zip)
 
   banner("system")
-  partitions['system'] = system_img_path = AddSystem(
+  partitions['system'] = AddSystem(
       output_zip, recovery_img=recovery_image, boot_img=boot_image)
 
   if has_vendor:
     banner("vendor")
-    partitions['vendor'] = vendor_img_path = AddVendor(output_zip)
+    partitions['vendor'] = AddVendor(output_zip)
+
+  if has_product:
+    banner("product")
+    partitions['product'] = AddProduct(output_zip)
 
   if has_system_other:
     banner("system_other")
@@ -618,95 +758,28 @@
     banner("vbmeta")
     AddVBMeta(output_zip, partitions)
 
-  # For devices using A/B update, copy over images from RADIO/ and/or
-  # VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
-  # images ready under IMAGES/. All images should have '.img' as extension.
   banner("radio")
-  ab_partitions = os.path.join(OPTIONS.input_tmp, "META", "ab_partitions.txt")
-  if os.path.exists(ab_partitions):
-    with open(ab_partitions, 'r') as f:
-      lines = f.readlines()
-    # For devices using A/B update, generate care_map for system and vendor
-    # partitions (if present), then write this file to target_files package.
-    care_map_list = []
-    for line in lines:
-      if line.strip() == "system" and (
-          "system_verity_block_device" in OPTIONS.info_dict or
-          OPTIONS.info_dict.get("avb_system_hashtree_enable") == "true"):
-        assert os.path.exists(system_img_path)
-        care_map_list += GetCareMap("system", system_img_path)
-      if line.strip() == "vendor" and (
-          "vendor_verity_block_device" in OPTIONS.info_dict or
-          OPTIONS.info_dict.get("avb_vendor_hashtree_enable") == "true"):
-        assert os.path.exists(vendor_img_path)
-        care_map_list += GetCareMap("vendor", vendor_img_path)
+  ab_partitions_txt = os.path.join(OPTIONS.input_tmp, "META",
+                                   "ab_partitions.txt")
+  if os.path.exists(ab_partitions_txt):
+    with open(ab_partitions_txt, 'r') as f:
+      ab_partitions = f.readlines()
 
-      img_name = line.strip() + ".img"
-      prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
-      if os.path.exists(prebuilt_path):
-        print("%s already exists, no need to overwrite..." % (img_name,))
-        continue
+    # For devices using A/B update, copy over images from RADIO/ and/or
+    # VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
+    # images ready under IMAGES/. All images should have '.img' as extension.
+    AddRadioImagesForAbOta(output_zip, ab_partitions)
 
-      img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
-      if os.path.exists(img_radio_path):
-        if output_zip:
-          common.ZipWrite(output_zip, img_radio_path,
-                          os.path.join("IMAGES", img_name))
-        else:
-          shutil.copy(img_radio_path, prebuilt_path)
-      else:
-        img_vendor_dir = os.path.join(OPTIONS.input_tmp, "VENDOR_IMAGES")
-        for root, _, files in os.walk(img_vendor_dir):
-          if img_name in files:
-            if output_zip:
-              common.ZipWrite(output_zip, os.path.join(root, img_name),
-                              os.path.join("IMAGES", img_name))
-            else:
-              shutil.copy(os.path.join(root, img_name), prebuilt_path)
-            break
-
-      if output_zip:
-        # Zip spec says: All slashes MUST be forward slashes.
-        img_path = 'IMAGES/' + img_name
-        assert img_path in output_zip.namelist(), "cannot find " + img_name
-      else:
-        img_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
-        assert os.path.exists(img_path), "cannot find " + img_name
-
-    if care_map_list:
-      care_map_path = "META/care_map.txt"
-      if output_zip and care_map_path not in output_zip.namelist():
-        common.ZipWriteStr(output_zip, care_map_path, '\n'.join(care_map_list))
-      else:
-        with open(os.path.join(OPTIONS.input_tmp, care_map_path), 'w') as fp:
-          fp.write('\n'.join(care_map_list))
-        if output_zip:
-          OPTIONS.replace_updated_files_list.append(care_map_path)
+    # Generate care_map.txt for system and vendor partitions (if present), then
+    # write this file to target_files package.
+    AddCareMapTxtForAbOta(output_zip, ab_partitions, partitions)
 
   # Radio images that need to be packed into IMAGES/, and product-img.zip.
-  pack_radioimages = os.path.join(
+  pack_radioimages_txt = os.path.join(
       OPTIONS.input_tmp, "META", "pack_radioimages.txt")
-  if os.path.exists(pack_radioimages):
-    with open(pack_radioimages, 'r') as f:
-      lines = f.readlines()
-    for line in lines:
-      img_name = line.strip()
-      _, ext = os.path.splitext(img_name)
-      if not ext:
-        img_name += ".img"
-      prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
-      if os.path.exists(prebuilt_path):
-        print("%s already exists, no need to overwrite..." % (img_name,))
-        continue
-
-      img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
-      assert os.path.exists(img_radio_path), \
-          "Failed to find %s at %s" % (img_name, img_radio_path)
-      if output_zip:
-        common.ZipWrite(output_zip, img_radio_path,
-                        os.path.join("IMAGES", img_name))
-      else:
-        shutil.copy(img_radio_path, prebuilt_path)
+  if os.path.exists(pack_radioimages_txt):
+    with open(pack_radioimages_txt, 'r') as f:
+      AddPackRadioImages(output_zip, f.readlines())
 
   if output_zip:
     common.ZipClose(output_zip)
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 69750b2..f366853 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -1385,8 +1385,8 @@
       assert patch_start == patch_size
       return split_info_list
 
-    def AddSplitTransferForLargeApks():
-      """Create split transfers for large apk files.
+    def SplitLargeApks():
+      """Split the large apks files.
 
       Example: Chrome.apk will be split into
         src-0: Chrome.apk-0, tgt-0: Chrome.apk-0
@@ -1452,22 +1452,22 @@
 
           split_src_name = "{}-{}".format(src_name, index)
           split_tgt_name = "{}-{}".format(tgt_name, index)
-          transfer_split = Transfer(split_tgt_name, split_src_name,
-                                    split_tgt_ranges, split_src_ranges,
-                                    self.tgt.RangeSha1(split_tgt_ranges),
-                                    self.src.RangeSha1(split_src_ranges),
-                                    "diff", self.transfers)
-          transfer_split.patch = patch_content
+          split_large_apks.append((split_tgt_name,
+                                   split_src_name,
+                                   split_tgt_ranges,
+                                   split_src_ranges,
+                                   patch_content))
 
     print("Finding transfers...")
 
     large_apks = []
+    split_large_apks = []
     cache_size = common.OPTIONS.cache_size
     split_threshold = 0.125
     max_blocks_per_transfer = int(cache_size * split_threshold /
                                   self.tgt.blocksize)
     empty = RangeSet()
-    for tgt_fn, tgt_ranges in self.tgt.file_map.items():
+    for tgt_fn, tgt_ranges in sorted(self.tgt.file_map.items()):
       if tgt_fn == "__ZERO":
         # the special "__ZERO" domain is all the blocks not contained
         # in any file and that are filled with zeros.  We have a
@@ -1511,13 +1511,23 @@
       AddTransfer(tgt_fn, None, tgt_ranges, empty, "new", self.transfers)
 
     transfer_lock = threading.Lock()
-    threads = [threading.Thread(target=AddSplitTransferForLargeApks)
+    threads = [threading.Thread(target=SplitLargeApks)
                for _ in range(self.threads)]
     for th in threads:
       th.start()
     while threads:
       threads.pop().join()
 
+    # Sort the split transfers for large apks to generate a determinate package.
+    split_large_apks.sort()
+    for (tgt_name, src_name, tgt_ranges, src_ranges,
+         patch) in split_large_apks:
+      transfer_split = Transfer(tgt_name, src_name, tgt_ranges, src_ranges,
+                                self.tgt.RangeSha1(tgt_ranges),
+                                self.src.RangeSha1(src_ranges),
+                                "diff", self.transfers)
+      transfer_split.patch = patch
+
   def AbbreviateSourceNames(self):
     for k in self.src.file_map.keys():
       b = os.path.basename(k)
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index ed60188..123ec7c 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -325,8 +325,10 @@
     else:
       return True, unsparse_image_path
   inflate_command = ["simg2img", sparse_image_path, unsparse_image_path]
-  (_, exit_code) = RunCommand(inflate_command)
+  (inflate_output, exit_code) = RunCommand(inflate_command)
   if exit_code != 0:
+    print("Error: '%s' failed with exit code %d:\n%s" % (
+        inflate_command, exit_code, inflate_output))
     os.remove(unsparse_image_path)
     return False, None
   return True, unsparse_image_path
@@ -553,6 +555,8 @@
         build_command.extend(["-U", prop_dict["uuid"]])
       if "hash_seed" in prop_dict:
         build_command.extend(["-S", prop_dict["hash_seed"]])
+    if "ext4_share_dup_blocks" in prop_dict:
+      build_command.append("-c")
     if "selinux_fc" in prop_dict:
       build_command.append(prop_dict["selinux_fc"])
   elif fs_type.startswith("squash"):
@@ -569,12 +573,12 @@
       build_command.extend(["-c", prop_dict["selinux_fc"]])
     if "block_list" in prop_dict:
       build_command.extend(["-B", prop_dict["block_list"]])
+    if "squashfs_block_size" in prop_dict:
+      build_command.extend(["-b", prop_dict["squashfs_block_size"]])
     if "squashfs_compressor" in prop_dict:
       build_command.extend(["-z", prop_dict["squashfs_compressor"]])
     if "squashfs_compressor_opt" in prop_dict:
       build_command.extend(["-zo", prop_dict["squashfs_compressor_opt"]])
-    if "squashfs_block_size" in prop_dict:
-      build_command.extend(["-b", prop_dict["squashfs_block_size"]])
     if prop_dict.get("squashfs_disable_4k_align") == "true":
       build_command.extend(["-a"])
   elif fs_type.startswith("f2fs"):
@@ -607,7 +611,8 @@
 
   (mkfs_output, exit_code) = RunCommand(build_command)
   if exit_code != 0:
-    print("Error: '%s' failed with exit code %d" % (build_command, exit_code))
+    print("Error: '%s' failed with exit code %d:\n%s" % (
+        build_command, exit_code, mkfs_output))
     return False
 
   # Check if there's enough headroom space available for ext4 image.
@@ -654,13 +659,13 @@
 
     # Run e2fsck on the inflated image file
     e2fsck_command = ["e2fsck", "-f", "-n", unsparse_image]
-    (_, exit_code) = RunCommand(e2fsck_command)
+    (e2fsck_output, exit_code) = RunCommand(e2fsck_command)
 
     os.remove(unsparse_image)
 
     if exit_code != 0:
-      print("Error: '%s' failed with exit code %d" % (e2fsck_command,
-                                                      exit_code))
+      print("Error: '%s' failed with exit code %d:\n%s" % (
+          e2fsck_command, exit_code, e2fsck_output))
       return False
 
   return True
@@ -720,6 +725,7 @@
     copy_prop("system_root_image", "system_root_image")
     copy_prop("ramdisk_dir", "ramdisk_dir")
     copy_prop("ramdisk_fs_config", "ramdisk_fs_config")
+    copy_prop("ext4_share_dup_blocks", "ext4_share_dup_blocks")
     copy_prop("system_squashfs_compressor", "squashfs_compressor")
     copy_prop("system_squashfs_compressor_opt", "squashfs_compressor_opt")
     copy_prop("system_squashfs_block_size", "squashfs_block_size")
@@ -765,12 +771,29 @@
     copy_prop("vendor_size", "partition_size")
     copy_prop("vendor_journal_size", "journal_size")
     copy_prop("vendor_verity_block_device", "verity_block_device")
+    copy_prop("ext4_share_dup_blocks", "ext4_share_dup_blocks")
     copy_prop("vendor_squashfs_compressor", "squashfs_compressor")
     copy_prop("vendor_squashfs_compressor_opt", "squashfs_compressor_opt")
     copy_prop("vendor_squashfs_block_size", "squashfs_block_size")
     copy_prop("vendor_squashfs_disable_4k_align", "squashfs_disable_4k_align")
     copy_prop("vendor_base_fs_file", "base_fs_file")
     copy_prop("vendor_extfs_inode_count", "extfs_inode_count")
+  elif mount_point == "product":
+    copy_prop("avb_product_hashtree_enable", "avb_hashtree_enable")
+    copy_prop("avb_product_add_hashtree_footer_args",
+              "avb_add_hashtree_footer_args")
+    copy_prop("avb_product_key_path", "avb_key_path")
+    copy_prop("avb_product_algorithm", "avb_algorithm")
+    copy_prop("product_fs_type", "fs_type")
+    copy_prop("product_size", "partition_size")
+    copy_prop("product_journal_size", "journal_size")
+    copy_prop("product_verity_block_device", "verity_block_device")
+    copy_prop("product_squashfs_compressor", "squashfs_compressor")
+    copy_prop("product_squashfs_compressor_opt", "squashfs_compressor_opt")
+    copy_prop("product_squashfs_block_size", "squashfs_block_size")
+    copy_prop("product_squashfs_disable_4k_align", "squashfs_disable_4k_align")
+    copy_prop("product_base_fs_file", "base_fs_file")
+    copy_prop("product_extfs_inode_count", "extfs_inode_count")
   elif mount_point == "oem":
     copy_prop("fs_type", "fs_type")
     copy_prop("oem_size", "partition_size")
@@ -824,6 +847,8 @@
       mount_point = "vendor"
     elif image_filename == "oem.img":
       mount_point = "oem"
+    elif image_filename == "product.img":
+      mount_point = "product"
     else:
       print("error: unknown image file name ", image_filename, file=sys.stderr)
       sys.exit(1)
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index c4877e0..db63fd3 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -53,11 +53,13 @@
 
 import common
 
-# Work around a bug in python's zipfile module that prevents opening
-# of zipfiles if any entry has an extra field of between 1 and 3 bytes
-# (which is common with zipaligned APKs).  This overrides the
-# ZipInfo._decodeExtra() method (which contains the bug) with an empty
-# version (since we don't need to decode the extra field anyway).
+# Work around a bug in Python's zipfile module that prevents opening of zipfiles
+# if any entry has an extra field of between 1 and 3 bytes (which is common with
+# zipaligned APKs). This overrides the ZipInfo._decodeExtra() method (which
+# contains the bug) with an empty version (since we don't need to decode the
+# extra field anyway).
+# Issue #14315: https://bugs.python.org/issue14315, fixed in Python 2.7.8 and
+# Python 3.5.0 alpha 1.
 class MyZipInfo(zipfile.ZipInfo):
   def _decodeExtra(self):
     pass
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index ebebd63..632cc11 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -78,7 +78,7 @@
 
 
 # The partitions allowed to be signed by AVB (Android verified boot 2.0).
-AVB_PARTITIONS = ('boot', 'recovery', 'system', 'vendor', 'dtbo')
+AVB_PARTITIONS = ('boot', 'recovery', 'system', 'vendor', 'product', 'dtbo')
 
 
 class ErrorCode(object):
@@ -1385,7 +1385,7 @@
           p.kill()
           th.join()
 
-      if err or p.returncode != 0:
+      if p.returncode != 0:
         print("WARNING: failure running %s:\n%s\n" % (
             diff_program, "".join(err)))
         self.patch = None
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 1a4383c..95b7303 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -165,7 +165,6 @@
 OPTIONS.updater_binary = None
 OPTIONS.oem_source = None
 OPTIONS.oem_no_mount = False
-OPTIONS.fallback_to_full = True
 OPTIONS.full_radio = False
 OPTIONS.full_bootloader = False
 # Stash size cannot exceed cache_size * threshold.
@@ -311,6 +310,56 @@
       script.AssertOemProperty(prop, values, oem_no_mount)
 
 
+class PayloadSigner(object):
+  """A class that wraps the payload signing works.
+
+  When generating a Payload, hashes of the payload and metadata files will be
+  signed with the device key, either by calling an external payload signer or
+  by calling openssl with the package key. This class provides a unified
+  interface, so that callers can just call PayloadSigner.Sign().
+
+  If an external payload signer has been specified (OPTIONS.payload_signer), it
+  calls the signer with the provided args (OPTIONS.payload_signer_args). Note
+  that the signing key should be provided as part of the payload_signer_args.
+  Otherwise without an external signer, it uses the package key
+  (OPTIONS.package_key) and calls openssl for the signing works.
+  """
+
+  def __init__(self):
+    if OPTIONS.payload_signer is None:
+      # Prepare the payload signing key.
+      private_key = OPTIONS.package_key + OPTIONS.private_key_suffix
+      pw = OPTIONS.key_passwords[OPTIONS.package_key]
+
+      cmd = ["openssl", "pkcs8", "-in", private_key, "-inform", "DER"]
+      cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
+      signing_key = common.MakeTempFile(prefix="key-", suffix=".key")
+      cmd.extend(["-out", signing_key])
+
+      get_signing_key = common.Run(cmd, verbose=False, stdout=subprocess.PIPE,
+                                   stderr=subprocess.STDOUT)
+      stdoutdata, _ = get_signing_key.communicate()
+      assert get_signing_key.returncode == 0, \
+          "Failed to get signing key: {}".format(stdoutdata)
+
+      self.signer = "openssl"
+      self.signer_args = ["pkeyutl", "-sign", "-inkey", signing_key,
+                          "-pkeyopt", "digest:sha256"]
+    else:
+      self.signer = OPTIONS.payload_signer
+      self.signer_args = OPTIONS.payload_signer_args
+
+  def Sign(self, in_file):
+    """Signs the given input file. Returns the output filename."""
+    out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
+    cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
+    signing = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    stdoutdata, _ = signing.communicate()
+    assert signing.returncode == 0, \
+        "Failed to sign the input file: {}".format(stdoutdata)
+    return out_file
+
+
 def SignOutput(temp_zip_name, output_zip_name):
   pw = OPTIONS.key_passwords[OPTIONS.package_key]
 
@@ -528,12 +577,7 @@
   if target_info.oem_props and not OPTIONS.oem_no_mount:
     target_info.WriteMountOemScript(script)
 
-  metadata = {
-      "post-build": target_info.fingerprint,
-      "pre-device": target_info.device,
-      "post-timestamp": target_info.GetBuildProp("ro.build.date.utc"),
-      "ota-type" : "BLOCK",
-  }
+  metadata = GetPackageMetadata(target_info)
 
   device_specific = common.DeviceSpecificParams(
       input_zip=input_zip,
@@ -683,23 +727,6 @@
                      compress_type=zipfile.ZIP_STORED)
 
 
-def GetBuildProp(prop, info_dict):
-  """Returns the inquired build property from a given info_dict."""
-  try:
-    return info_dict.get("build.prop", {})[prop]
-  except KeyError:
-    raise common.ExternalError("couldn't find %s in build.prop" % (prop,))
-
-
-def GetVendorBuildProp(prop, info_dict):
-  """Returns the inquired vendor build property from a given info_dict."""
-  try:
-    return info_dict.get("vendor.build.prop", {})[prop]
-  except KeyError:
-    raise common.ExternalError(
-        "couldn't find %s in vendor.build.prop" % (prop,))
-
-
 def HandleDowngradeMetadata(metadata, target_info, source_info):
   # Only incremental OTAs are allowed to reach here.
   assert OPTIONS.incremental_source is not None
@@ -729,6 +756,57 @@
     metadata["post-timestamp"] = post_timestamp
 
 
+def GetPackageMetadata(target_info, source_info=None):
+  """Generates and returns the metadata dict.
+
+  It generates a dict() that contains the info to be written into an OTA
+  package (META-INF/com/android/metadata). It also handles the detection of
+  downgrade / timestamp override / data wipe based on the global options.
+
+  Args:
+    target_info: The BuildInfo instance that holds the target build info.
+    source_info: The BuildInfo instance that holds the source build info, or
+        None if generating full OTA.
+
+  Returns:
+    A dict to be written into package metadata entry.
+  """
+  assert isinstance(target_info, BuildInfo)
+  assert source_info is None or isinstance(source_info, BuildInfo)
+
+  metadata = {
+      'post-build' : target_info.fingerprint,
+      'post-build-incremental' : target_info.GetBuildProp(
+          'ro.build.version.incremental'),
+  }
+
+  if target_info.is_ab:
+    metadata['ota-type'] = 'AB'
+    metadata['ota-required-cache'] = '0'
+  else:
+    metadata['ota-type'] = 'BLOCK'
+
+  if OPTIONS.wipe_user_data:
+    metadata['ota-wipe'] = 'yes'
+
+  is_incremental = source_info is not None
+  if is_incremental:
+    metadata['pre-build'] = source_info.fingerprint
+    metadata['pre-build-incremental'] = source_info.GetBuildProp(
+        'ro.build.version.incremental')
+    metadata['pre-device'] = source_info.device
+  else:
+    metadata['pre-device'] = target_info.device
+
+  # Detect downgrades, or fill in the post-timestamp.
+  if is_incremental:
+    HandleDowngradeMetadata(metadata, target_info, source_info)
+  else:
+    metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc')
+
+  return metadata
+
+
 def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
   target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
   source_info = BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
@@ -746,12 +824,7 @@
     if not OPTIONS.oem_no_mount:
       source_info.WriteMountOemScript(script)
 
-  metadata = {
-      "pre-device": source_info.device,
-      "ota-type": "BLOCK",
-  }
-
-  HandleDowngradeMetadata(metadata, target_info, source_info)
+  metadata = GetPackageMetadata(target_info, source_info)
 
   device_specific = common.DeviceSpecificParams(
       source_zip=source_zip,
@@ -763,13 +836,6 @@
       metadata=metadata,
       info_dict=source_info)
 
-  metadata["pre-build"] = source_info.fingerprint
-  metadata["post-build"] = target_info.fingerprint
-  metadata["pre-build-incremental"] = source_info.GetBuildProp(
-      "ro.build.version.incremental")
-  metadata["post-build-incremental"] = target_info.GetBuildProp(
-      "ro.build.version.incremental")
-
   source_boot = common.GetBootableImage(
       "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT", source_info)
   target_boot = common.GetBootableImage(
@@ -1060,20 +1126,8 @@
   # The place where the output from the subprocess should go.
   log_file = sys.stdout if OPTIONS.verbose else subprocess.PIPE
 
-  # A/B updater expects a signing key in RSA format. Gets the key ready for
-  # later use in step 3, unless a payload_signer has been specified.
-  if OPTIONS.payload_signer is None:
-    cmd = ["openssl", "pkcs8",
-           "-in", OPTIONS.package_key + OPTIONS.private_key_suffix,
-           "-inform", "DER"]
-    pw = OPTIONS.key_passwords[OPTIONS.package_key]
-    cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
-    rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
-    cmd.extend(["-out", rsa_key])
-    p1 = common.Run(cmd, verbose=False, stdout=log_file,
-                    stderr=subprocess.STDOUT)
-    p1.communicate()
-    assert p1.returncode == 0, "openssl pkcs8 failed"
+  # Get the PayloadSigner to be used in step 3.
+  payload_signer = PayloadSigner()
 
   # Stage the output zip package for package signing.
   temp_zip_file = tempfile.NamedTemporaryFile()
@@ -1088,24 +1142,7 @@
     source_info = None
 
   # Metadata to comply with Android OTA package format.
-  metadata = {
-      "post-build" : target_info.fingerprint,
-      "post-build-incremental" : target_info.GetBuildProp(
-          "ro.build.version.incremental"),
-      "ota-required-cache" : "0",
-      "ota-type" : "AB",
-  }
-
-  if source_file is not None:
-    metadata["pre-device"] = source_info.device
-    metadata["pre-build"] = source_info.fingerprint
-    metadata["pre-build-incremental"] = source_info.GetBuildProp(
-        "ro.build.version.incremental")
-
-    HandleDowngradeMetadata(metadata, target_info, source_info)
-  else:
-    metadata["pre-device"] = target_info.device
-    metadata["post-timestamp"] = target_info.GetBuildProp("ro.build.date.utc")
+  metadata = GetPackageMetadata(target_info, source_info)
 
   # 1. Generate payload.
   payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
@@ -1131,37 +1168,11 @@
   assert p1.returncode == 0, "brillo_update_payload hash failed"
 
   # 3. Sign the hashes and insert them back into the payload file.
-  signed_payload_sig_file = common.MakeTempFile(prefix="signed-sig-",
-                                                suffix=".bin")
-  signed_metadata_sig_file = common.MakeTempFile(prefix="signed-sig-",
-                                                 suffix=".bin")
   # 3a. Sign the payload hash.
-  if OPTIONS.payload_signer is not None:
-    cmd = [OPTIONS.payload_signer]
-    cmd.extend(OPTIONS.payload_signer_args)
-  else:
-    cmd = ["openssl", "pkeyutl", "-sign",
-           "-inkey", rsa_key,
-           "-pkeyopt", "digest:sha256"]
-  cmd.extend(["-in", payload_sig_file,
-              "-out", signed_payload_sig_file])
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "openssl sign payload failed"
+  signed_payload_sig_file = payload_signer.Sign(payload_sig_file)
 
   # 3b. Sign the metadata hash.
-  if OPTIONS.payload_signer is not None:
-    cmd = [OPTIONS.payload_signer]
-    cmd.extend(OPTIONS.payload_signer_args)
-  else:
-    cmd = ["openssl", "pkeyutl", "-sign",
-           "-inkey", rsa_key,
-           "-pkeyopt", "digest:sha256"]
-  cmd.extend(["-in", metadata_sig_file,
-              "-out", signed_metadata_sig_file])
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "openssl sign metadata failed"
+  signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
 
   # 3c. Insert the signatures back into the payload file.
   signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
@@ -1305,8 +1316,6 @@
       OPTIONS.block_based = True
     elif o in ("-b", "--binary"):
       OPTIONS.updater_binary = a
-    elif o in ("--no_fallback_to_full",):
-      OPTIONS.fallback_to_full = False
     elif o == "--stash_threshold":
       try:
         OPTIONS.stash_threshold = float(a)
@@ -1344,7 +1353,6 @@
                                  "oem_settings=",
                                  "oem_no_mount",
                                  "verify",
-                                 "no_fallback_to_full",
                                  "stash_threshold=",
                                  "log_diff=",
                                  "payload_signer=",
@@ -1370,16 +1378,34 @@
   assert not (OPTIONS.downgrade and OPTIONS.timestamp), \
       "Cannot have --downgrade AND --override_timestamp both"
 
-  # Load the dict file from the zip directly to have a peek at the OTA type.
-  # For packages using A/B update, unzipping is not needed.
+  # Load the build info dicts from the zip directly or the extracted input
+  # directory. We don't need to unzip the entire target-files zips, because they
+  # won't be needed for A/B OTAs (brillo_update_payload does that on its own).
+  # When loading the info dicts, we don't need to provide the second parameter
+  # to common.LoadInfoDict(). Specifying the second parameter allows replacing
+  # some properties with their actual paths, such as 'selinux_fc',
+  # 'ramdisk_dir', which won't be used during OTA generation.
   if OPTIONS.extracted_input is not None:
-    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input,
-                                            OPTIONS.extracted_input)
+    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input)
   else:
-    input_zip = zipfile.ZipFile(args[0], "r")
-    OPTIONS.info_dict = common.LoadInfoDict(input_zip)
-    common.ZipClose(input_zip)
+    with zipfile.ZipFile(args[0], 'r') as input_zip:
+      OPTIONS.info_dict = common.LoadInfoDict(input_zip)
 
+  if OPTIONS.verbose:
+    print("--- target info ---")
+    common.DumpInfoDict(OPTIONS.info_dict)
+
+  # Load the source build dict if applicable.
+  if OPTIONS.incremental_source is not None:
+    OPTIONS.target_info_dict = OPTIONS.info_dict
+    with zipfile.ZipFile(OPTIONS.incremental_source, 'r') as source_zip:
+      OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
+
+    if OPTIONS.verbose:
+      print("--- source info ---")
+      common.DumpInfoDict(OPTIONS.source_info_dict)
+
+  # Load OEM dicts if provided.
   OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
 
   ab_update = OPTIONS.info_dict.get("ab_update") == "true"
@@ -1396,20 +1422,6 @@
     OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
 
   if ab_update:
-    if OPTIONS.incremental_source is not None:
-      OPTIONS.target_info_dict = OPTIONS.info_dict
-      source_zip = zipfile.ZipFile(OPTIONS.incremental_source, "r")
-      OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
-      common.ZipClose(source_zip)
-
-    if OPTIONS.verbose:
-      print("--- target info ---")
-      common.DumpInfoDict(OPTIONS.info_dict)
-
-      if OPTIONS.incremental_source is not None:
-        print("--- source info ---")
-        common.DumpInfoDict(OPTIONS.source_info_dict)
-
     WriteABOTAPackageWithBrilloScript(
         target_file=args[0],
         output_file=args[1],
@@ -1418,49 +1430,45 @@
     print("done.")
     return
 
+  # Sanity check the loaded info dicts first.
+  if OPTIONS.info_dict.get("no_recovery") == "true":
+    raise common.ExternalError(
+        "--- target build has specified no recovery ---")
+
+  # Non-A/B OTAs rely on /cache partition to store temporary files.
+  cache_size = OPTIONS.info_dict.get("cache_size")
+  if cache_size is None:
+    print("--- can't determine the cache partition size ---")
+  OPTIONS.cache_size = cache_size
+
   if OPTIONS.extra_script is not None:
     OPTIONS.extra_script = open(OPTIONS.extra_script).read()
 
   if OPTIONS.extracted_input is not None:
     OPTIONS.input_tmp = OPTIONS.extracted_input
-    OPTIONS.target_tmp = OPTIONS.input_tmp
-    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp,
-                                            OPTIONS.input_tmp)
     input_zip = zipfile.ZipFile(args[0], "r")
   else:
     print("unzipping target target-files...")
     OPTIONS.input_tmp, input_zip = common.UnzipTemp(
         args[0], UNZIP_PATTERN)
+  OPTIONS.target_tmp = OPTIONS.input_tmp
 
-    OPTIONS.target_tmp = OPTIONS.input_tmp
-    OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.target_tmp)
-
-  if OPTIONS.verbose:
-    print("--- target info ---")
-    common.DumpInfoDict(OPTIONS.info_dict)
-
-  # If the caller explicitly specified the device-specific extensions
-  # path via -s/--device_specific, use that.  Otherwise, use
-  # META/releasetools.py if it is present in the target target_files.
-  # Otherwise, take the path of the file from 'tool_extensions' in the
-  # info dict and look for that in the local filesystem, relative to
-  # the current directory.
-
+  # If the caller explicitly specified the device-specific extensions path via
+  # -s / --device_specific, use that. Otherwise, use META/releasetools.py if it
+  # is present in the target target_files. Otherwise, take the path of the file
+  # from 'tool_extensions' in the info dict and look for that in the local
+  # filesystem, relative to the current directory.
   if OPTIONS.device_specific is None:
     from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
     if os.path.exists(from_input):
       print("(using device-specific extensions from target_files)")
       OPTIONS.device_specific = from_input
     else:
-      OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions", None)
+      OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions")
 
   if OPTIONS.device_specific is not None:
     OPTIONS.device_specific = os.path.abspath(OPTIONS.device_specific)
 
-  if OPTIONS.info_dict.get("no_recovery") == "true":
-    raise common.ExternalError(
-        "--- target build has specified no recovery ---")
-
   # Set up the output zip. Create a temporary zip file if signing is needed.
   if OPTIONS.no_signing:
     if os.path.exists(args[1]):
@@ -1472,46 +1480,26 @@
     output_zip = zipfile.ZipFile(temp_zip_file, "w",
                                  compression=zipfile.ZIP_DEFLATED)
 
-  # Non A/B OTAs rely on /cache partition to store temporary files.
-  cache_size = OPTIONS.info_dict.get("cache_size", None)
-  if cache_size is None:
-    print("--- can't determine the cache partition size ---")
-  OPTIONS.cache_size = cache_size
-
   # Generate a full OTA.
   if OPTIONS.incremental_source is None:
     WriteFullOTAPackage(input_zip, output_zip)
 
-  # Generate an incremental OTA. It will fall back to generate a full OTA on
-  # failure unless no_fallback_to_full is specified.
+  # Generate an incremental OTA.
   else:
     print("unzipping source target-files...")
     OPTIONS.source_tmp, source_zip = common.UnzipTemp(
         OPTIONS.incremental_source,
         UNZIP_PATTERN)
-    OPTIONS.target_info_dict = OPTIONS.info_dict
-    OPTIONS.source_info_dict = common.LoadInfoDict(source_zip,
-                                                   OPTIONS.source_tmp)
-    if OPTIONS.verbose:
-      print("--- source info ---")
-      common.DumpInfoDict(OPTIONS.source_info_dict)
-    try:
-      WriteBlockIncrementalOTAPackage(input_zip, source_zip, output_zip)
-      if OPTIONS.log_diff:
-        out_file = open(OPTIONS.log_diff, 'w')
-        import target_files_diff
-        target_files_diff.recursiveDiff('',
-                                        OPTIONS.source_tmp,
-                                        OPTIONS.input_tmp,
-                                        out_file)
-        out_file.close()
-    except ValueError:
-      if not OPTIONS.fallback_to_full:
-        raise
-      print("--- failed to build incremental; falling back to full ---")
-      OPTIONS.incremental_source = None
-      WriteFullOTAPackage(input_zip, output_zip)
 
+    WriteBlockIncrementalOTAPackage(input_zip, source_zip, output_zip)
+
+    if OPTIONS.log_diff:
+      with open(OPTIONS.log_diff, 'w') as out_file:
+        import target_files_diff
+        target_files_diff.recursiveDiff(
+            '', OPTIONS.source_tmp, OPTIONS.input_tmp, out_file)
+
+  common.ZipClose(input_zip)
   common.ZipClose(output_zip)
 
   # Sign the generated zip package unless no_signing is specified.
diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py
new file mode 100644
index 0000000..e449ca8
--- /dev/null
+++ b/tools/releasetools/test_add_img_to_target_files.py
@@ -0,0 +1,168 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import os.path
+import unittest
+import zipfile
+
+import common
+from add_img_to_target_files import AddPackRadioImages, AddRadioImagesForAbOta
+
+
+OPTIONS = common.OPTIONS
+
+
+class AddImagesToTargetFilesTest(unittest.TestCase):
+
+  def setUp(self):
+    OPTIONS.input_tmp = common.MakeTempDir()
+
+  def tearDown(self):
+    common.Cleanup()
+
+  @staticmethod
+  def _create_images(images, prefix):
+    """Creates images under OPTIONS.input_tmp/prefix."""
+    path = os.path.join(OPTIONS.input_tmp, prefix)
+    if not os.path.exists(path):
+      os.mkdir(path)
+
+    for image in images:
+      image_path = os.path.join(path, image + '.img')
+      with open(image_path, 'wb') as image_fp:
+        image_fp.write(image.encode())
+
+    images_path = os.path.join(OPTIONS.input_tmp, 'IMAGES')
+    if not os.path.exists(images_path):
+      os.mkdir(images_path)
+    return images, images_path
+
+  def test_AddRadioImagesForAbOta_imageExists(self):
+    """Tests the case with existing images under IMAGES/."""
+    images, images_path = self._create_images(['aboot', 'xbl'], 'IMAGES')
+    AddRadioImagesForAbOta(None, images)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddRadioImagesForAbOta_copyFromRadio(self):
+    """Tests the case that copies images from RADIO/."""
+    images, images_path = self._create_images(['aboot', 'xbl'], 'RADIO')
+    AddRadioImagesForAbOta(None, images)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddRadioImagesForAbOta_copyFromRadio_zipOutput(self):
+    images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
+
+    # Set up the output zip.
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      AddRadioImagesForAbOta(output_zip, images)
+
+    with zipfile.ZipFile(output_file, 'r') as verify_zip:
+      for image in images:
+        self.assertIn('IMAGES/' + image + '.img', verify_zip.namelist())
+
+  def test_AddRadioImagesForAbOta_copyFromVendorImages(self):
+    """Tests the case that copies images from VENDOR_IMAGES/."""
+    vendor_images_path = os.path.join(OPTIONS.input_tmp, 'VENDOR_IMAGES')
+    os.mkdir(vendor_images_path)
+
+    partitions = ['aboot', 'xbl']
+    for index, partition in enumerate(partitions):
+      subdir = os.path.join(vendor_images_path, 'subdir-{}'.format(index))
+      os.mkdir(subdir)
+
+      partition_image_path = os.path.join(subdir, partition + '.img')
+      with open(partition_image_path, 'wb') as partition_fp:
+        partition_fp.write(partition.encode())
+
+    # Set up the output dir.
+    images_path = os.path.join(OPTIONS.input_tmp, 'IMAGES')
+    os.mkdir(images_path)
+
+    AddRadioImagesForAbOta(None, partitions)
+
+    for partition in partitions:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, partition + '.img')))
+
+  def test_AddRadioImagesForAbOta_missingImages(self):
+    images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
+    self.assertRaises(AssertionError, AddRadioImagesForAbOta, None,
+                      images + ['baz'])
+
+  def test_AddRadioImagesForAbOta_missingImages_zipOutput(self):
+    images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
+
+    # Set up the output zip.
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      self.assertRaises(AssertionError, AddRadioImagesForAbOta, output_zip,
+                        images + ['baz'])
+
+  def test_AddPackRadioImages(self):
+    images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
+    AddPackRadioImages(None, images)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddPackRadioImages_with_suffix(self):
+    images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
+    images_with_suffix = [image + '.img' for image in images]
+    AddPackRadioImages(None, images_with_suffix)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddPackRadioImages_zipOutput(self):
+    images, _ = self._create_images(['foo', 'bar'], 'RADIO')
+
+    # Set up the output zip.
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      AddPackRadioImages(output_zip, images)
+
+    with zipfile.ZipFile(output_file, 'r') as verify_zip:
+      for image in images:
+        self.assertIn('IMAGES/' + image + '.img', verify_zip.namelist())
+
+  def test_AddPackRadioImages_imageExists(self):
+    images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
+
+    # Additionally create images under IMAGES/ so that they should be skipped.
+    images, images_path = self._create_images(['foo', 'bar'], 'IMAGES')
+
+    AddPackRadioImages(None, images)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddPackRadioImages_missingImages(self):
+    images, _ = self._create_images(['foo', 'bar'], 'RADIO')
+    AddPackRadioImages(None, images)
+
+    self.assertRaises(AssertionError, AddPackRadioImages, None,
+                      images + ['baz'])
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 0948c61..fa6655b 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -15,11 +15,20 @@
 #
 
 import copy
+import os.path
 import unittest
 
 import common
 from ota_from_target_files import (
-    _LoadOemDicts, BuildInfo, WriteFingerprintAssertion)
+    _LoadOemDicts, BuildInfo, GetPackageMetadata, PayloadSigner,
+    WriteFingerprintAssertion)
+
+
+def get_testdata_dir():
+  """Returns the testdata dir, in relative to the script dir."""
+  # The script dir is the one we want, which could be different from pwd.
+  current_dir = os.path.dirname(os.path.realpath(__file__))
+  return os.path.join(current_dir, 'testdata')
 
 
 class MockScriptWriter(object):
@@ -300,3 +309,258 @@
       self.assertEqual('foo', oem_dict['xyz'])
       self.assertEqual('bar', oem_dict['a.b.c'])
       self.assertEqual('{}'.format(i), oem_dict['ro.build.index'])
+
+
+class OtaFromTargetFilesTest(unittest.TestCase):
+
+  TEST_TARGET_INFO_DICT = {
+      'build.prop' : {
+          'ro.product.device' : 'product-device',
+          'ro.build.fingerprint' : 'build-fingerprint-target',
+          'ro.build.version.incremental' : 'build-version-incremental-target',
+          'ro.build.date.utc' : '1500000000',
+      },
+  }
+
+  TEST_SOURCE_INFO_DICT = {
+      'build.prop' : {
+          'ro.product.device' : 'product-device',
+          'ro.build.fingerprint' : 'build-fingerprint-source',
+          'ro.build.version.incremental' : 'build-version-incremental-source',
+          'ro.build.date.utc' : '1400000000',
+      },
+  }
+
+  def setUp(self):
+    # Reset the global options as in ota_from_target_files.py.
+    common.OPTIONS.incremental_source = None
+    common.OPTIONS.downgrade = False
+    common.OPTIONS.timestamp = False
+    common.OPTIONS.wipe_user_data = False
+
+  def test_GetPackageMetadata_abOta_full(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    target_info_dict['ab_update'] = 'true'
+    target_info = BuildInfo(target_info_dict, None)
+    metadata = GetPackageMetadata(target_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'AB',
+            'ota-required-cache' : '0',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_abOta_incremental(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    target_info_dict['ab_update'] = 'true'
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
+    common.OPTIONS.incremental_source = ''
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'AB',
+            'ota-required-cache' : '0',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_nonAbOta_full(self):
+    target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    metadata = GetPackageMetadata(target_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_nonAbOta_incremental(self):
+    target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    source_info = BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
+    common.OPTIONS.incremental_source = ''
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_wipe(self):
+    target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    common.OPTIONS.wipe_user_data = True
+    metadata = GetPackageMetadata(target_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'ota-wipe' : 'yes',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+        },
+        metadata)
+
+  @staticmethod
+  def _test_GetPackageMetadata_swapBuildTimestamps(target_info, source_info):
+    (target_info['build.prop']['ro.build.date.utc'],
+     source_info['build.prop']['ro.build.date.utc']) = (
+         source_info['build.prop']['ro.build.date.utc'],
+         target_info['build.prop']['ro.build.date.utc'])
+
+  def test_GetPackageMetadata_unintentionalDowngradeDetected(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+    self._test_GetPackageMetadata_swapBuildTimestamps(
+        target_info_dict, source_info_dict)
+
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(source_info_dict, None)
+    common.OPTIONS.incremental_source = ''
+    self.assertRaises(RuntimeError, GetPackageMetadata, target_info,
+                      source_info)
+
+  def test_GetPackageMetadata_downgrade(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+    self._test_GetPackageMetadata_swapBuildTimestamps(
+        target_info_dict, source_info_dict)
+
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(source_info_dict, None)
+    common.OPTIONS.incremental_source = ''
+    common.OPTIONS.downgrade = True
+    common.OPTIONS.wipe_user_data = True
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-downgrade' : 'yes',
+            'ota-type' : 'BLOCK',
+            'ota-wipe' : 'yes',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_overrideTimestamp(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+    self._test_GetPackageMetadata_swapBuildTimestamps(
+        target_info_dict, source_info_dict)
+
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(source_info_dict, None)
+    common.OPTIONS.incremental_source = ''
+    common.OPTIONS.timestamp = True
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-timestamp' : '1500000001',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+
+class PayloadSignerTest(unittest.TestCase):
+
+  SIGFILE = 'sigfile.bin'
+  SIGNED_SIGFILE = 'signed-sigfile.bin'
+
+  def setUp(self):
+    self.testdata_dir = get_testdata_dir()
+    self.assertTrue(os.path.exists(self.testdata_dir))
+
+    common.OPTIONS.payload_signer = None
+    common.OPTIONS.payload_signer_args = []
+    common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
+    common.OPTIONS.key_passwords = {
+        common.OPTIONS.package_key : None,
+    }
+
+  def tearDown(self):
+    common.Cleanup()
+
+  def _assertFilesEqual(self, file1, file2):
+    with open(file1, 'rb') as fp1, open(file2, 'rb') as fp2:
+      self.assertEqual(fp1.read(), fp2.read())
+
+  def test_init(self):
+    payload_signer = PayloadSigner()
+    self.assertEqual('openssl', payload_signer.signer)
+
+  def test_init_withPassword(self):
+    common.OPTIONS.package_key = os.path.join(
+        self.testdata_dir, 'testkey_with_passwd')
+    common.OPTIONS.key_passwords = {
+        common.OPTIONS.package_key : 'foo',
+    }
+    payload_signer = PayloadSigner()
+    self.assertEqual('openssl', payload_signer.signer)
+
+  def test_init_withExternalSigner(self):
+    common.OPTIONS.payload_signer = 'abc'
+    common.OPTIONS.payload_signer_args = ['arg1', 'arg2']
+    payload_signer = PayloadSigner()
+    self.assertEqual('abc', payload_signer.signer)
+    self.assertEqual(['arg1', 'arg2'], payload_signer.signer_args)
+
+  def test_Sign(self):
+    payload_signer = PayloadSigner()
+    input_file = os.path.join(self.testdata_dir, self.SIGFILE)
+    signed_file = payload_signer.Sign(input_file)
+
+    verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
+    self._assertFilesEqual(verify_file, signed_file)
+
+  def test_Sign_withExternalSigner_openssl(self):
+    """Uses openssl as the external payload signer."""
+    common.OPTIONS.payload_signer = 'openssl'
+    common.OPTIONS.payload_signer_args = [
+        'pkeyutl', '-sign', '-keyform', 'DER', '-inkey',
+        os.path.join(self.testdata_dir, 'testkey.pk8'),
+        '-pkeyopt', 'digest:sha256']
+    payload_signer = PayloadSigner()
+    input_file = os.path.join(self.testdata_dir, self.SIGFILE)
+    signed_file = payload_signer.Sign(input_file)
+
+    verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
+    self._assertFilesEqual(verify_file, signed_file)
+
+  def test_Sign_withExternalSigner_script(self):
+    """Uses testdata/payload_signer.sh as the external payload signer."""
+    common.OPTIONS.payload_signer = os.path.join(
+        self.testdata_dir, 'payload_signer.sh')
+    common.OPTIONS.payload_signer_args = [
+        os.path.join(self.testdata_dir, 'testkey.pk8')]
+    payload_signer = PayloadSigner()
+    input_file = os.path.join(self.testdata_dir, self.SIGFILE)
+    signed_file = payload_signer.Sign(input_file)
+
+    verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
+    self._assertFilesEqual(verify_file, signed_file)
diff --git a/tools/releasetools/testdata/payload_signer.sh b/tools/releasetools/testdata/payload_signer.sh
new file mode 100755
index 0000000..a44ef34
--- /dev/null
+++ b/tools/releasetools/testdata/payload_signer.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+# The script will be called with 'payload_signer.sh <key> -in <input> -out <output>'.
+openssl pkeyutl -sign -keyform DER -inkey $1 -pkeyopt digest:sha256 -in $3 -out $5
diff --git a/tools/releasetools/testdata/sigfile.bin b/tools/releasetools/testdata/sigfile.bin
new file mode 100644
index 0000000..8682216
--- /dev/null
+++ b/tools/releasetools/testdata/sigfile.bin
@@ -0,0 +1 @@
+ºQàÂÜ¢”¡½¨Gpø£Õùù°ÔÖ'[4KéL¡c
\ No newline at end of file
diff --git a/tools/releasetools/testdata/signed-sigfile.bin b/tools/releasetools/testdata/signed-sigfile.bin
new file mode 100644
index 0000000..86d2f9e
--- /dev/null
+++ b/tools/releasetools/testdata/signed-sigfile.bin
@@ -0,0 +1,2 @@
+R¡&‹EÿsÁ%ø?¹|¤œ&Í€ñzbSŠA[ßtqç†WKґl¦àÙÙås¥Ò~Fcæ	`ž¯¾Í#
+T{Ý×Û½F­ÒÁŸxƒø1‰6̋=Q°•ŒVæ^Tß°ØxX£¶/þ#©êI'ÜîtcLp““¬­ŸëovzђRá:õóWþ9(¹Á26Û̬ábÂBP1¸6ãnÒß±QÕC©gh;r‰²O}%Ľõˆáo6ã”d“ê´Éãå2Y`¦ÕÛ¼ª¥_R“OrCa,èI"n(`–ínñÜÐbaiö¹Å¨ÔäS„×Ê)kžO[`6c¬e
\ No newline at end of file
diff --git a/tools/releasetools/testdata/testkey.pk8 b/tools/releasetools/testdata/testkey.pk8
new file mode 100644
index 0000000..99be291
--- /dev/null
+++ b/tools/releasetools/testdata/testkey.pk8
Binary files differ
diff --git a/tools/releasetools/testdata/testkey.x509.pem b/tools/releasetools/testdata/testkey.x509.pem
new file mode 100644
index 0000000..65c8085
--- /dev/null
+++ b/tools/releasetools/testdata/testkey.x509.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIJAN/FvjYzGNOKMA0GCSqGSIb3DQEBCwUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xODAxMTgwMDM0NTFaFw00NTA2MDUwMDM0NTFaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAL478jti8FoJkDcqu8/sStOHoNLdwC+MtjYa
+QADs1ZxcggKxXBYy0xkAw75G2T+jddjuvncCaDy57Z5vQPlZzyBRUR4NB1FkmxzP
+kJPCYL9v9gFZAFI+Sda/beF/tliNHkcyT9eWY5+vKUChpnMnIq8tIG75mL1y9mVJ
+k5ueg5hHwlAkSGNiBifwnDJxXiLVVNC8SrFeTJbeQTtFb/wleBGoji8Mgp6GblIW
+LaO3R5Tv+O7/x/c4ZCQueDgNXZA9/BD4DuRp34RhUjV0EZiQ016xYHejvkDuMlDV
+/JWD9dDM4plKSLWWtObevDQA6sGJd0+51s77gva+CKmQ8j39tU0CAwEAAaNTMFEw
+HQYDVR0OBBYEFNJPJZDpq6tc/19Z2kxPA2bj9D6UMB8GA1UdIwQYMBaAFNJPJZDp
+q6tc/19Z2kxPA2bj9D6UMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQAD
+ggEBABSUG9qrwV3WcClDJwqkNLN4yeVVYzkRMGA8/XqOiYrW4zh0mKDLfr6OeU1C
+AKwZBLhhql59Po25r4gcwPiTN2DkoCfb3T59XG8J54PAgTQjIAZ3J+mGZplnmuD3
+wj+UGUpPe0qTr33ZPoJfwxVo4RVnOt/UCsIGXch0HS/BIdpechqP0w4rOHUbq6EA
+8UEi5irKSDOU9b/5rD/tX2f4nGwJlKQEHWrsj9LLKlaL7fX36ghoSxN/pBJOhedg
+/VjT6xbaEwfyhC6Zj9av5Xl7UdpYt+rBMroAGenz0OSxKhIphdcx4ZMhvfkBoYG9
+Crupdqe+kUsfg2RlPb5grQ3klMo=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/testkey_with_passwd.pk8 b/tools/releasetools/testdata/testkey_with_passwd.pk8
new file mode 100644
index 0000000..3d567de
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_with_passwd.pk8
Binary files differ
diff --git a/tools/releasetools/testdata/testkey_with_passwd.x509.pem b/tools/releasetools/testdata/testkey_with_passwd.x509.pem
new file mode 100644
index 0000000..449396e
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_with_passwd.x509.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIJANefUd3Piu0yMA0GCSqGSIb3DQEBCwUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xODAxMTgwMDI3NDRaFw00NTA2MDUwMDI3NDRaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBALBoA4c+qCQKapQAVGclbousC5J/L0TNZJEd
+KSW2nzXUHIwgTQ3r82227xkIvjnqXMCsc0q3/N2gGKR4sHqA30JO9Dyfgsx1ISaR
+GXe5cG048m5U5snplQgvPovtah9ZyvwNPzWPYC3uceJaDxKQKwVdsV+mOWM6WmpQ
+bdLO37jxfytyAbzaz3sG5HA3FSB8rX/xDM6If18NsxSHpcjaOjZXC4Fg6wlp0klY
+5/qhFEdmieu2zQVelXjoJfKSku8tPa7kZeDU/F3uLUq/U/xvFk7NVsRV+QvYOdQK
+1QECc/3yv1TKNAN3huWTgzCX6bMHmi09Npw3MQaGY0oS34cH9x0CAwEAAaNTMFEw
+HQYDVR0OBBYEFNsJZ0n9Opeea0rVAzL+1jwkDKzPMB8GA1UdIwQYMBaAFNsJZ0n9
+Opeea0rVAzL+1jwkDKzPMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQAD
+ggEBAJ/bzIzA+NrYwPEv56XKf6Vuj81+M1rTHAsH9PqbOvJT7iM7aU7wAl6vmXAo
+DQtvKoOBMdIXprapwe0quHCQm7PGxg+RRegr+dcTSVJFv1plnODOBOEAVlEfFwuW
+Cz0USF2jrNq+4ciH5zPL1a31ONb1rMkxJXQ/tAi0x8m6tZz+jsbE0wO6qB80UmkA
+4WY2Tu/gnAvFpD8plkiU0EKwedBHAcaFFZkQp23MKsVZ3UBqsqzzfXDYV1Oa6rIy
+XIZpI2Gx75pvAb57T2ap/yl0DBEAu7Nmpll0GCsgeJVdy7tS4LNj96Quya3CHWQw
+WNTVuan0KZqwDIm4Xn1oHUFQ9vc=
+-----END CERTIFICATE-----
diff --git a/tools/warn.py b/tools/warn.py
index 62feac3..f42fb96 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -1010,12 +1010,7 @@
      'severity': Severity.HIGH,
      'description':
          'Java: Checks for unguarded accesses to fields and methods with @GuardedBy annotations',
-     'patterns': [r".*: warning: \[GuardedByChecker\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Invalid @GuardedBy expression',
-     'patterns': [r".*: warning: \[GuardedByValidator\] .+"]},
+     'patterns': [r".*: warning: \[GuardedBy\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':