Merge "DO NOT MERGE - Merge pie-platform-release (PPRL.190105.001) into master"
diff --git a/core/binary.mk b/core/binary.mk
index be10c2d..df0e1a5 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -76,7 +76,7 @@
my_native_coverage := false
endif
-ifeq ($(strip $(ENABLE_XOM)),true)
+ifneq ($(strip $(ENABLE_XOM)),false)
ifndef LOCAL_IS_HOST_MODULE
my_xom := true
# Disable XOM in excluded paths.
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index 3eaf55b..59b21bc 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -1,21 +1,24 @@
DEX_PREOPT_CONFIG := $(PRODUCT_OUT)/dexpreopt.config
-# list of boot classpath jars for dexpreopt
-DEXPREOPT_BOOT_JARS_MODULES := $(strip $(filter-out conscrypt,$(PRODUCT_BOOT_JARS)))
-PRODUCT_BOOTCLASSPATH_JARS := $(strip $(DEXPREOPT_BOOT_JARS_MODULES) $(filter conscrypt,$(PRODUCT_BOOT_JARS)))
-PRODUCT_BOOTCLASSPATH := $(subst $(space),:,$(foreach m,$(PRODUCT_BOOTCLASSPATH_JARS),/system/framework/$(m).jar))
+UPDATABLE_BOOT_MODULES := conscrypt
+UPDATABLE_BOOT_JARS := /apex/com.android.conscrypt/javalib/conscrypt.jar
+NON_UPDATABLE_BOOT_MODULES := $(filter-out $(UPDATABLE_BOOT_MODULES), $(PRODUCT_BOOT_JARS))
+NON_UPDATABLE_BOOT_JARS := $(foreach m,$(NON_UPDATABLE_BOOT_MODULES),/system/framework/$(m).jar)
+ALL_BOOT_JARS := $(NON_UPDATABLE_BOOT_JARS) $(UPDATABLE_BOOT_JARS)
+ALL_BOOT_MODULES := $(NON_UPDATABLE_BOOT_MODULES) $(UPDATABLE_BOOT_MODULES)
-PRODUCT_SYSTEM_SERVER_CLASSPATH := $(subst $(space),:,$(foreach m,$(PRODUCT_SYSTEM_SERVER_JARS),/system/framework/$(m).jar))
+PRODUCT_BOOTCLASSPATH := $(subst $(space),:,$(ALL_BOOT_JARS))
+DEXPREOPT_BOOT_JARS_MODULES := $(NON_UPDATABLE_BOOT_MODULES)
+DEXPREOPT_BOOTCLASSPATH_DEX_LOCATIONS := $(NON_UPDATABLE_BOOT_JARS)
+DEXPREOPT_BOOTCLASSPATH_DEX_FILES := $(foreach jar,$(DEXPREOPT_BOOTCLASSPATH_DEX_LOCATIONS),$(PRODUCT_OUT)$(jar))
+
+# Create paths for boot image.
DEXPREOPT_BUILD_DIR := $(OUT_DIR)
DEXPREOPT_PRODUCT_DIR_FULL_PATH := $(PRODUCT_OUT)/dex_bootjars
DEXPREOPT_PRODUCT_DIR := $(patsubst $(DEXPREOPT_BUILD_DIR)/%,%,$(DEXPREOPT_PRODUCT_DIR_FULL_PATH))
DEXPREOPT_BOOT_JAR_DIR := system/framework
DEXPREOPT_BOOT_JAR_DIR_FULL_PATH := $(DEXPREOPT_PRODUCT_DIR_FULL_PATH)/$(DEXPREOPT_BOOT_JAR_DIR)
-
-DEXPREOPT_BOOTCLASSPATH_DEX_LOCATIONS := $(foreach m,$(PRODUCT_BOOTCLASSPATH_JARS),/$(DEXPREOPT_BOOT_JAR_DIR)/$(m).jar)
-DEXPREOPT_BOOTCLASSPATH_DEX_FILES := $(foreach jar,$(DEXPREOPT_BOOTCLASSPATH_DEX_LOCATIONS),$(PRODUCT_OUT)$(jar))
-
DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/boot.art
DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$(DEX2OAT_TARGET_ARCH)/boot.art
@@ -24,6 +27,8 @@
$(TARGET_2ND_ARCH_VAR_PREFIX)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH)/boot.art
endif
+PRODUCT_SYSTEM_SERVER_CLASSPATH := $(subst $(space),:,$(foreach m,$(PRODUCT_SYSTEM_SERVER_JARS),/system/framework/$(m).jar))
+
# The default value for LOCAL_DEX_PREOPT
DEX_PREOPT_DEFAULT ?= true
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 631db0a..9d320b9 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -7,8 +7,8 @@
# The full system boot classpath
LIBART_TARGET_BOOT_JARS := $(DEXPREOPT_BOOT_JARS_MODULES)
-LIBART_TARGET_BOOT_DEX_LOCATIONS := $(foreach jar,$(LIBART_TARGET_BOOT_JARS),/$(DEXPREOPT_BOOT_JAR_DIR)/$(jar).jar)
-LIBART_TARGET_BOOT_DEX_FILES := $(foreach jar,$(LIBART_TARGET_BOOT_JARS),$(call intermediates-dir-for,JAVA_LIBRARIES,$(jar),,COMMON)/javalib.jar)
+LIBART_TARGET_BOOT_DEX_LOCATIONS := $(DEXPREOPT_BOOTCLASSPATH_DEX_LOCATIONS)
+LIBART_TARGET_BOOT_DEX_FILES := $(foreach mod,$(NON_UPDATABLE_BOOT_MODULES),$(call intermediates-dir-for,JAVA_LIBRARIES,$(mod),,COMMON)/javalib.jar)
# dex preopt on the bootclasspath produces multiple files. The first dex file
# is converted into to boot.art (to match the legacy assumption that boot.art
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 69790cb..58c458a 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -122,8 +122,7 @@
ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
my_module_multilib := $(LOCAL_MULTILIB)
# If the module is not an SDK library and it's a system server jar, only preopt the primary arch.
- my_filtered_lib_name := $(patsubst %.impl,%,$(LOCAL_MODULE))
- ifeq (,$(filter $(JAVA_SDK_LIBRARIES),$(my_filtered_lib_name)))
+ ifeq (,$(filter $(JAVA_SDK_LIBRARIES),$(LOCAL_MODULE)))
# For a Java library, by default we build odex for both 1st arch and 2nd arch.
# But it can be overridden with "LOCAL_MULTILIB := first".
ifneq (,$(filter $(PRODUCT_SYSTEM_SERVER_JARS),$(LOCAL_MODULE)))
@@ -192,7 +191,7 @@
$(call add_json_list, OptionalUsesLibraries, $(LOCAL_OPTIONAL_USES_LIBRARIES))
$(call add_json_list, UsesLibraries, $(LOCAL_USES_LIBRARIES))
$(call add_json_map, LibraryPaths)
- $(foreach lib,$(sort $(LOCAL_USES_LIBRARIES) $(LOCAL_OPTIONAL_USES_LIBRARIES) org.apache.http.legacy.impl android.hidl.base-V1.0-java android.hidl.manager-V1.0-java),\
+ $(foreach lib,$(sort $(LOCAL_USES_LIBRARIES) $(LOCAL_OPTIONAL_USES_LIBRARIES) org.apache.http.legacy android.hidl.base-V1.0-java android.hidl.manager-V1.0-java),\
$(call add_json_str, $(lib), $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar))
$(call end_json_map)
$(call add_json_list, Archs, $(my_dexpreopt_archs))
@@ -234,7 +233,7 @@
my_dexpreopt_deps := $(my_dex_jar)
my_dexpreopt_deps += $(if $(my_process_profile),$(LOCAL_DEX_PREOPT_PROFILE))
my_dexpreopt_deps += \
- $(foreach lib,$(sort $(LOCAL_USES_LIBRARIES) $(LOCAL_OPTIONAL_USES_LIBRARIES) org.apache.http.legacy.impl android.hidl.base-V1.0-java android.hidl.manager-V1.0-java),\
+ $(foreach lib,$(sort $(LOCAL_USES_LIBRARIES) $(LOCAL_OPTIONAL_USES_LIBRARIES) org.apache.http.legacy android.hidl.base-V1.0-java android.hidl.manager-V1.0-java),\
$(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar)
my_dexpreopt_deps += $(LOCAL_DEX_PREOPT_IMAGE_LOCATION)
# TODO: default boot images
diff --git a/core/envsetup.mk b/core/envsetup.mk
index f5babb6..8150fb6 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -206,7 +206,7 @@
#################################################################
# Set up minimal BOOTCLASSPATH list of jars to build/execute
# java code with dalvikvm/art.
-TARGET_CORE_JARS := core-oj core-libart core-simple conscrypt okhttp bouncycastle apache-xml
+TARGET_CORE_JARS := core-oj core-libart conscrypt okhttp bouncycastle apache-xml
ifeq ($(EMMA_INSTRUMENT),true)
ifneq ($(EMMA_INSTRUMENT_STATIC),true)
# For instrumented build, if Jacoco is not being included statically
diff --git a/core/java_common.mk b/core/java_common.mk
index ac26e5e..4e331d0 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -280,7 +280,7 @@
sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(call resolve-prebuilt-sdk-module,system_current,$(lib_name)))
else
# When SDK libraries are referenced from modules built without SDK, provide the all APIs to them
- sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(lib_name).impl)
+ sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(lib_name))
endif
else
ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
diff --git a/core/main.mk b/core/main.mk
index 282821c..c84cbe0 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -1305,8 +1305,8 @@
# Build files and then package it into the rom formats
.PHONY: droidcore
-droidcore: files \
- systemimage \
+droidcore: $(filter $(HOST_OUT_ROOT)/%,$(modules_to_install)) \
+ $(INSTALLED_SYSTEMIMAGE_TARGET) \
$(INSTALLED_RAMDISK_TARGET) \
$(INSTALLED_BOOTIMAGE_TARGET) \
$(INSTALLED_RECOVERYIMAGE_TARGET) \
@@ -1337,6 +1337,7 @@
$(INSTALLED_FILES_JSON_ROOT) \
$(INSTALLED_FILES_FILE_RECOVERY) \
$(INSTALLED_FILES_JSON_RECOVERY) \
+ $(INSTALLED_ANDROID_INFO_TXT_TARGET) \
soong_docs
# dist_files only for putting your library into the dist directory with a full build.
diff --git a/core/package_internal.mk b/core/package_internal.mk
index c657f2e..31cb01e 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -88,7 +88,7 @@
endif
# If LOCAL_MODULE matches a rule in PRODUCT_MANIFEST_PACKAGE_NAME_OVERRIDES,
-# override the manfest package name by the (first) rule matched
+# override the manifest package name by the (first) rule matched
override_manifest_name := $(strip $(word 1,\
$(foreach rule,$(PRODUCT_MANIFEST_PACKAGE_NAME_OVERRIDES),\
$(eval _pkg_name_pat := $(call word-colon,1,$(rule)))\
@@ -100,7 +100,7 @@
))
ifneq (,$(override_manifest_name))
-# Note: this can override LOCAL_MANFEST_PACKAGE_NAME value set in Android.mk
+# Note: this can override LOCAL_MANIFEST_PACKAGE_NAME value set in Android.mk
LOCAL_MANIFEST_PACKAGE_NAME := $(override_manifest_name)
endif
diff --git a/core/pdk_config.mk b/core/pdk_config.mk
index b2c9e9e..383c11a 100644
--- a/core/pdk_config.mk
+++ b/core/pdk_config.mk
@@ -20,7 +20,6 @@
target/common/obj/JAVA_LIBRARIES/conscrypt_intermediates \
target/common/obj/JAVA_LIBRARIES/core-oj_intermediates \
target/common/obj/JAVA_LIBRARIES/core-libart_intermediates \
- target/common/obj/JAVA_LIBRARIES/core-simple_intermediates \
target/common/obj/JAVA_LIBRARIES/legacy-test_intermediates \
target/common/obj/JAVA_LIBRARIES/legacy-android-test_intermediates \
target/common/obj/JAVA_LIBRARIES/ext_intermediates \
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index f723633..91865bc 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -74,7 +74,28 @@
$(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(dir $(LOCAL_BUILT_MODULE))package.dex.apk))
endif
-$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(LOCAL_BUILT_MODULE)))
+# Run veridex on product, product_services and vendor modules.
+# We skip it for unbundled app builds where we cannot build veridex.
+module_run_appcompat :=
+ifeq (true,$(filter true, \
+ $(LOCAL_PRODUCT_MODULE) $(LOCAL_PRODUCT_SERVICES_MODULE) \
+ $(LOCAL_VENDOR_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
+ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK))) # ! unbundled app build
+ module_run_appcompat := true
+endif
+endif
+
+ifeq ($(module_run_appcompat),true)
+ $(LOCAL_BUILT_MODULE): $(appcompat-files)
+ $(LOCAL_BUILT_MODULE): PRIVATE_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
+ $(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE)
+ @echo "Copy: $@"
+ $(copy-file-to-target)
+ $(call appcompat-header, aapt2)
+ $(run-appcompat)
+else
+ $(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(LOCAL_BUILT_MODULE)))
+endif
my_built_installed := $(foreach f,$(LOCAL_SOONG_BUILT_INSTALLED),\
$(call word-colon,1,$(f)):$(PRODUCT_OUT)$(call word-colon,2,$(f)))
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 77329c3..2363f2b 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -83,7 +83,7 @@
$(call add_json_bool, EnableCFI, $(call invert_bool,$(filter false,$(ENABLE_CFI))))
$(call add_json_list, CFIExcludePaths, $(CFI_EXCLUDE_PATHS) $(PRODUCT_CFI_EXCLUDE_PATHS))
$(call add_json_list, CFIIncludePaths, $(CFI_INCLUDE_PATHS) $(PRODUCT_CFI_INCLUDE_PATHS))
-$(call add_json_bool, EnableXOM, $(filter true,$(ENABLE_XOM)))
+$(call add_json_bool, EnableXOM, $(call invert_bool,$(filter false,$(ENABLE_XOM))))
$(call add_json_list, XOMExcludePaths, $(XOM_EXCLUDE_PATHS) $(PRODUCT_XOM_EXCLUDE_PATHS))
$(call add_json_list, IntegerOverflowExcludePaths, $(INTEGER_OVERFLOW_EXCLUDE_PATHS) $(PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS))
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index cfda44e..7ccbd68 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -72,17 +72,23 @@
endif
endif # TURBINE_ENABLED != false
+
ifdef LOCAL_SOONG_DEX_JAR
+ # Hidden API for boot jars
+ ifndef LOCAL_IS_HOST_MODULE
+ ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),) # is_boot_jar
+ # Derive greylist from classes.jar.
+ # We use full_classes_jar here, which is the post-proguard jar (on the basis that we also
+ # have a full_classes_pre_proguard_jar). This is consistent with the equivalent code in
+ # java.mk.
+ $(eval $(call hiddenapi-generate-csv,$(full_classes_jar),$(hiddenapi_flags_csv),$(hiddenapi_metadata_csv)))
+ $(eval $(call hiddenapi-copy-soong-jar,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
+ endif
+ endif
+
ifneq ($(LOCAL_UNINSTALLABLE_MODULE),true)
ifndef LOCAL_IS_HOST_MODULE
ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),) # is_boot_jar
- # Derive greylist from classes.jar.
- # We use full_classes_jar here, which is the post-proguard jar (on the basis that we also
- # have a full_classes_pre_proguard_jar). This is consistent with the equivalent code in
- # java.mk.
- $(eval $(call hiddenapi-generate-csv,$(full_classes_jar),$(hiddenapi_flags_csv),$(hiddenapi_metadata_csv)))
- $(eval $(call hiddenapi-copy-soong-jar,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
-
ifeq (true,$(WITH_DEXPREOPT))
# For libart, the boot jars' odex files are replaced by $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE).
# We use this installed_odex trick to get boot.art installed.
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index 9eb3ab3..2c56162 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -25,7 +25,7 @@
# If ONE_SHOT_MAKEFILE is set, our view of the world is smaller, so don't
# rewrite the file in that came.
ifndef ONE_SHOT_MAKEFILE
-files: $(MODULE_INFO_JSON)
+droidcore: $(MODULE_INFO_JSON)
endif
$(call dist-for-goals, general-tests, $(MODULE_INFO_JSON))
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index f9e9ee1..05ce35f 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -25,8 +25,8 @@
# Enable dynamic system image size and reserved 64MB in it.
BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE := 67108864
-# GSI always requires separate vendor packages to vendor.img
-TARGET_COPY_OUT_VENDOR := vendor
+# GSI forces product packages to /system for now.
+TARGET_COPY_OUT_PRODUCT := system/product
# Creates metadata partition mount point under root for
# the devices with metadata parition
diff --git a/target/board/BoardConfigMainlineCommon.mk b/target/board/BoardConfigMainlineCommon.mk
index ec3c74f..46e5d93 100644
--- a/target/board/BoardConfigMainlineCommon.mk
+++ b/target/board/BoardConfigMainlineCommon.mk
@@ -8,6 +8,10 @@
TARGET_USERIMAGES_USE_EXT4 := true
+# Mainline devices must have /vendor and /product partitions.
+TARGET_COPY_OUT_VENDOR := vendor
+TARGET_COPY_OUT_PRODUCT := product
+
# system-as-root is mandatory from Android P
TARGET_NO_RECOVERY := true
BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
diff --git a/target/product/base.mk b/target/product/base.mk
index 1ecbf4a..804a2ee 100644
--- a/target/product/base.mk
+++ b/target/product/base.mk
@@ -18,3 +18,4 @@
# up by partition.
$(call inherit-product, $(SRC_TARGET_DIR)/product/base_system.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/base_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/base_product.mk)
diff --git a/target/product/base_product.mk b/target/product/base_product.mk
new file mode 100644
index 0000000..6531304
--- /dev/null
+++ b/target/product/base_product.mk
@@ -0,0 +1,19 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Base modules and settings for the product partition.
+PRODUCT_PACKAGES += \
+ healthd \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index df793bf..069828c 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -79,6 +79,7 @@
fsck_msdos \
fs_config_files_system \
fs_config_dirs_system \
+ gsid \
heapprofd \
heapprofd_client \
gatekeeperd \
@@ -292,7 +293,7 @@
PRODUCT_PACKAGES += framework-oahl-backward-compatibility
PRODUCT_BOOT_JARS += framework-oahl-backward-compatibility
else
-PRODUCT_BOOT_JARS += org.apache.http.legacy.impl
+PRODUCT_BOOT_JARS += org.apache.http.legacy
endif
PRODUCT_COPY_FILES += \
@@ -307,7 +308,7 @@
PRODUCT_PACKAGES += framework-atb-backward-compatibility
PRODUCT_BOOT_JARS += framework-atb-backward-compatibility
else
-PRODUCT_BOOT_JARS += android.test.base.impl
+PRODUCT_BOOT_JARS += android.test.base
endif
PRODUCT_COPY_FILES += system/core/rootdir/init.zygote32.rc:root/init.zygote32.rc
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 1bb4bee..9bb45d1 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -53,10 +53,6 @@
vndservice \
vndservicemanager \
-# Base modules and settings for the product partition.
-PRODUCT_PACKAGES += \
- healthd \
-
# VINTF data for vendor image
PRODUCT_PACKAGES += \
device_manifest.xml \
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index b432a91..9718dc6 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -18,11 +18,12 @@
# means most android products, but excludes wearables.
#
# Note: Do not add any contents directly to this file. Choose either
-# media_system or media_vendor depending on partition (also consider
-# base_<x>.mk or handheld_<x>.mk.
+# media_<x> depending on partition also consider base_<x>.mk or
+# handheld_<x>.mk.
$(call inherit-product, $(SRC_TARGET_DIR)/product/media_system.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/media_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/media_product.mk)
PRODUCT_BRAND := generic
PRODUCT_DEVICE := generic
diff --git a/target/product/full_base_telephony.mk b/target/product/full_base_telephony.mk
index ee59090..af4097d 100644
--- a/target/product/full_base_telephony.mk
+++ b/target/product/full_base_telephony.mk
@@ -28,5 +28,4 @@
frameworks/native/data/etc/handheld_core_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/handheld_core_hardware.xml
$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony.mk)
diff --git a/target/product/generic.mk b/target/product/generic.mk
index 7a9732d..cc856f4 100644
--- a/target/product/generic.mk
+++ b/target/product/generic.mk
@@ -18,8 +18,7 @@
# It includes the base Android platform.
$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_no_telephony.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony.mk)
# Overrides
PRODUCT_BRAND := generic
diff --git a/target/product/generic_no_telephony.mk b/target/product/generic_no_telephony.mk
index 5346476..324d36f 100644
--- a/target/product/generic_no_telephony.mk
+++ b/target/product/generic_no_telephony.mk
@@ -22,6 +22,7 @@
$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_product.mk)
PRODUCT_BRAND := generic
PRODUCT_DEVICE := generic
diff --git a/target/product/handheld_product.mk b/target/product/handheld_product.mk
new file mode 100644
index 0000000..063b951
--- /dev/null
+++ b/target/product/handheld_product.mk
@@ -0,0 +1,43 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This makefile contains the product partition contents for
+# a generic phone or tablet device. Only add something here if
+# it definitely doesn't belong on other types of devices (if it
+# does, use base_vendor.mk).
+$(call inherit-product, $(SRC_TARGET_DIR)/product/media_product.mk)
+
+# /product packages
+PRODUCT_PACKAGES += \
+ Browser2 \
+ Calendar \
+ Camera2 \
+ Contacts \
+ DeskClock \
+ Email \
+ Gallery2 \
+ LatinIME \
+ Launcher3QuickStep \
+ Music \
+ OneTimeInitializer \
+ PrintRecommendationService \
+ Provision \
+ QuickSearchBox \
+ Settings \
+ SettingsIntelligence \
+ StorageManager \
+ SystemUI \
+ WallpaperCropper \
diff --git a/target/product/handheld_vendor.mk b/target/product/handheld_vendor.mk
index b9970e9..ca7760a 100644
--- a/target/product/handheld_vendor.mk
+++ b/target/product/handheld_vendor.mk
@@ -30,25 +30,3 @@
power.default \
SysuiDarkThemeOverlay \
vibrator.default \
-
-# /product packages
-PRODUCT_PACKAGES += \
- Browser2 \
- Calendar \
- Camera2 \
- Contacts \
- DeskClock \
- Email \
- Gallery2 \
- LatinIME \
- Launcher3QuickStep \
- Music \
- OneTimeInitializer \
- PrintRecommendationService \
- Provision \
- QuickSearchBox \
- Settings \
- SettingsIntelligence \
- StorageManager \
- SystemUI \
- WallpaperCropper \
diff --git a/target/product/mainline.mk b/target/product/mainline.mk
index 44dcd60..59bad98 100644
--- a/target/product/mainline.mk
+++ b/target/product/mainline.mk
@@ -18,4 +18,6 @@
# for the other partitions.
$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_product.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_product.mk)
diff --git a/target/product/mainline_arm64.mk b/target/product/mainline_arm64.mk
index 6122ac1..c876636 100644
--- a/target/product/mainline_arm64.mk
+++ b/target/product/mainline_arm64.mk
@@ -37,6 +37,8 @@
system/app/Calendar/Calendar.apk \
system/app/Camera2/Camera2.apk \
system/app/DeskClock/DeskClock.apk \
+ system/app/DeskClock/oat/arm64/DeskClock.odex \
+ system/app/DeskClock/oat/arm64/DeskClock.vdex \
system/app/Email/Email.apk \
system/app/Gallery2/Gallery2.apk \
system/app/LatinIME/LatinIME.apk \
@@ -45,6 +47,7 @@
system/app/Music/Music.apk \
system/app/PrintRecommendationService/PrintRecommendationService.apk \
system/app/QuickSearchBox/QuickSearchBox.apk \
+ system/app/webview/webview.apk \
system/bin/healthd \
system/etc/init/healthd.rc \
system/etc/vintf/manifest/manifest_healthd.xml \
@@ -55,6 +58,8 @@
system/lib64/libjni_latinime.so \
system/lib64/libjni_tinyplanet.so \
system/priv-app/CarrierConfig/CarrierConfig.apk \
+ system/priv-app/CarrierConfig/oat/arm64/CarrierConfig.odex \
+ system/priv-app/CarrierConfig/oat/arm64/CarrierConfig.vdex \
system/priv-app/Contacts/Contacts.apk \
system/priv-app/Dialer/Dialer.apk \
system/priv-app/Launcher3QuickStep/Launcher3QuickStep.apk \
diff --git a/target/product/media_product.mk b/target/product/media_product.mk
new file mode 100644
index 0000000..17c24ee
--- /dev/null
+++ b/target/product/media_product.mk
@@ -0,0 +1,25 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This makefile contains the product partition contents for
+# media-capable devices (non-wearables). Only add something here
+# if it definitely doesn't belong on wearables. Otherwise, choose
+# base_vendor.mk.
+$(call inherit-product, $(SRC_TARGET_DIR)/product/base_product.mk)
+
+# /product packages
+PRODUCT_PACKAGES += \
+ webview \
diff --git a/target/product/media_system.mk b/target/product/media_system.mk
index 992e2ed..65ee073 100644
--- a/target/product/media_system.mk
+++ b/target/product/media_system.mk
@@ -59,7 +59,6 @@
requestsync \
StatementService \
vndk_snapshot_package \
- webview \
PRODUCT_COPY_FILES += \
@@ -75,7 +74,7 @@
services \
ethernet-service \
wifi-service \
- com.android.location.provider.impl \
+ com.android.location.provider \
PRODUCT_COPY_FILES += \
system/core/rootdir/etc/public.libraries.android.txt:system/etc/public.libraries.txt
diff --git a/target/product/media_vendor.mk b/target/product/media_vendor.mk
index f30e6f3..7d4af64 100644
--- a/target/product/media_vendor.mk
+++ b/target/product/media_vendor.mk
@@ -24,7 +24,3 @@
PRODUCT_PACKAGES += \
libaudiopreprocessing \
libwebrtc_audio_preprocessing \
-
-# /product packages
-PRODUCT_PACKAGES += \
- webview \
diff --git a/target/product/telephony.mk b/target/product/telephony.mk
new file mode 100644
index 0000000..e0eb159
--- /dev/null
+++ b/target/product/telephony.mk
@@ -0,0 +1,20 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# All modules for telephony
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_product.mk)
diff --git a/target/product/telephony_product.mk b/target/product/telephony_product.mk
new file mode 100644
index 0000000..70d4828
--- /dev/null
+++ b/target/product/telephony_product.mk
@@ -0,0 +1,23 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This is the list of modules that are specific to products that have telephony
+# hardware, and install to the product partition.
+
+# /product packages
+PRODUCT_PACKAGES += \
+ CarrierConfig \
+ Dialer \
diff --git a/target/product/telephony_vendor.mk b/target/product/telephony_vendor.mk
index 4cff16d..86dbcc9 100644
--- a/target/product/telephony_vendor.mk
+++ b/target/product/telephony_vendor.mk
@@ -21,9 +21,4 @@
PRODUCT_PACKAGES := \
rild \
-# /product packages
-PRODUCT_PACKAGES += \
- CarrierConfig \
- Dialer \
-
PRODUCT_COPY_FILES := \
diff --git a/target/product/treble_common.mk b/target/product/treble_common.mk
index 7642876..7e4a98e 100644
--- a/target/product/treble_common.mk
+++ b/target/product/treble_common.mk
@@ -21,8 +21,7 @@
# Generic system image inherits from AOSP with telephony
$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony.mk)
# Enable dynamic partition size
PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 80d4023..456f791 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -483,7 +483,8 @@
# choose the transfers for conversion. The number '1024' can be further
# tweaked here to balance the package size and build time.
if max_stashed_blocks > stash_limit + 1024:
- self.SelectAndConvertDiffTransfersToNew()
+ self.SelectAndConvertDiffTransfersToNew(
+ max_stashed_blocks - stash_limit)
# Regenerate the sequence as the graph has changed.
self.FindSequenceForTransfers()
@@ -830,7 +831,8 @@
num_of_bytes = new_blocks * self.tgt.blocksize
logger.info(
" Total %d blocks (%d bytes) are packed as new blocks due to "
- "insufficient cache size.", new_blocks, num_of_bytes)
+ "insufficient cache size. Maximum blocks stashed simultaneously: %d",
+ new_blocks, num_of_bytes, max_stashed_blocks)
return new_blocks, max_stashed_blocks
def ComputePatches(self, prefix):
@@ -1339,7 +1341,7 @@
return patches
- def SelectAndConvertDiffTransfersToNew(self):
+ def SelectAndConvertDiffTransfersToNew(self, violated_stash_blocks):
"""Converts the diff transfers to reduce the max simultaneous stash.
Since the 'new' data is compressed with deflate, we can select the 'diff'
@@ -1347,6 +1349,8 @@
compressed data. Ideally, we want to convert the transfers with a small
size increase, but using a large number of stashed blocks.
"""
+ TransferSizeScore = namedtuple("TransferSizeScore",
+ "xf, used_stash_blocks, score")
logger.info("Selecting diff commands to convert to new.")
diff_queue = []
@@ -1360,7 +1364,7 @@
# for the remaining.
result = self.ComputePatchesForInputList(diff_queue, True)
- removed_stashed_blocks = 0
+ conversion_candidates = []
for xf_index, patch_info, compressed_size in result:
xf = self.transfers[xf_index]
if not xf.patch_info:
@@ -1368,21 +1372,43 @@
size_ratio = len(xf.patch_info.content) * 100.0 / compressed_size
diff_style = "imgdiff" if xf.patch_info.imgdiff else "bsdiff"
- logger.info("%s, target size: %d, style: %s, patch size: %d,"
+ logger.info("%s, target size: %d blocks, style: %s, patch size: %d,"
" compression_size: %d, ratio %.2f%%", xf.tgt_name,
xf.tgt_ranges.size(), diff_style,
len(xf.patch_info.content), compressed_size, size_ratio)
+ used_stash_blocks = sum(sr.size() for _, sr in xf.use_stash)
# Convert the transfer to new if the compressed size is smaller or equal.
# We don't need to maintain the stash_before lists here because the
# graph will be regenerated later.
if len(xf.patch_info.content) >= compressed_size:
- removed_stashed_blocks += sum(sr.size() for _, sr in xf.use_stash)
- logger.info("Converting %s to new", xf.tgt_name)
- xf.ConvertToNew()
+ # Add the transfer to the candidate list with negative score. And it
+ # will be converted later.
+ conversion_candidates.append(TransferSizeScore(xf, used_stash_blocks,
+ -1))
+ elif used_stash_blocks > 0:
+ # This heuristic represents the size increase in the final package to
+ # remove per unit of stashed data.
+ score = ((compressed_size - len(xf.patch_info.content)) * 100.0
+ / used_stash_blocks)
+ conversion_candidates.append(TransferSizeScore(xf, used_stash_blocks,
+ score))
+ # Transfers with lower score (i.e. less expensive to convert) will be
+ # converted first.
+ conversion_candidates.sort(key=lambda x: x.score)
- # TODO(xunchang) convert more transfers by sorting:
- # (compressed size - patch_size) / used_stashed_blocks
+ # TODO(xunchang), improve the logic to find the transfers to convert, e.g.
+ # convert the ones that contribute to the max stash, run ReviseStashSize
+ # multiple times etc.
+ removed_stashed_blocks = 0
+ for xf, used_stash_blocks, _ in conversion_candidates:
+ logger.info("Converting %s to new", xf.tgt_name)
+ xf.ConvertToNew()
+ removed_stashed_blocks += used_stash_blocks
+ # Experiments show that we will get a smaller package size if we remove
+ # slightly more stashed blocks than the violated stash blocks.
+ if removed_stashed_blocks >= violated_stash_blocks:
+ break
logger.info("Removed %d stashed blocks", removed_stashed_blocks)
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 521b319..ad0432d 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -75,8 +75,13 @@
"""
cmd = ["find", path, "-print"]
output = common.RunAndCheckOutput(cmd, verbose=False)
- # TODO(b/122328872) Fix estimation algorithm to not need the multiplier.
- return output.count('\n') * 2
+ # increase by > 4% as number of files and directories is not whole picture.
+ inodes = output.count('\n')
+ spare_inodes = inodes * 4 // 100
+ min_spare_inodes = 8
+ if spare_inodes < min_spare_inodes:
+ spare_inodes = min_spare_inodes
+ return inodes + spare_inodes
def GetFilesystemCharacteristics(image_path, sparse_image=True):
@@ -436,8 +441,8 @@
size -= free_size
size += reserved_size
if reserved_size == 0:
- # add .2% margin
- size = size * 1002 // 1000
+ # add .3% margin
+ size = size * 1003 // 1000
# Use a minimum size, otherwise we will fail to calculate an AVB footer
# or fail to construct an ext4 image.
size = max(size, 256 * 1024)
@@ -448,8 +453,12 @@
extfs_inode_count = prop_dict["extfs_inode_count"]
inodes = int(fs_dict.get("Inode count", extfs_inode_count))
inodes -= int(fs_dict.get("Free inodes", "0"))
- # add .2% margin
- inodes = inodes * 1002 // 1000
+ # add .2% margin or 1 inode, whichever is greater
+ spare_inodes = inodes * 2 // 1000
+ min_spare_inodes = 1
+ if spare_inodes < min_spare_inodes:
+ spare_inodes = min_spare_inodes
+ inodes += spare_inodes
prop_dict["extfs_inode_count"] = str(inodes)
prop_dict["partition_size"] = str(size)
logger.info(
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index dcc083c..be2c108 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -14,6 +14,7 @@
from __future__ import print_function
+import collections
import copy
import errno
import getopt
@@ -1523,6 +1524,13 @@
"""Called at the start of full OTA installation."""
return self._DoCall("FullOTA_InstallBegin")
+ def FullOTA_GetBlockDifferences(self):
+ """Called during full OTA installation and verification.
+ Implementation should return a list of BlockDifference objects describing
+ the update on each additional partitions.
+ """
+ return self._DoCall("FullOTA_GetBlockDifferences")
+
def FullOTA_InstallEnd(self):
"""Called at the end of full OTA installation; typically this is
used to install the image for the device's baseband processor."""
@@ -1551,6 +1559,13 @@
verification is complete)."""
return self._DoCall("IncrementalOTA_InstallBegin")
+ def IncrementalOTA_GetBlockDifferences(self):
+ """Called during incremental OTA installation and verification.
+ Implementation should return a list of BlockDifference objects describing
+ the update on each additional partitions.
+ """
+ return self._DoCall("IncrementalOTA_GetBlockDifferences")
+
def IncrementalOTA_InstallEnd(self):
"""Called at the end of incremental OTA installation; typically
this is used to install the image for the device's baseband
@@ -1745,11 +1760,29 @@
self.touched_src_ranges = b.touched_src_ranges
self.touched_src_sha1 = b.touched_src_sha1
- if src is None:
- _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
+ # On devices with dynamic partitions, for new partitions,
+ # src is None but OPTIONS.source_info_dict is not.
+ if OPTIONS.source_info_dict is None:
+ is_dynamic_build = OPTIONS.info_dict.get(
+ "use_dynamic_partitions") == "true"
else:
- _, self.device = GetTypeAndDevice("/" + partition,
- OPTIONS.source_info_dict)
+ is_dynamic_build = OPTIONS.source_info_dict.get(
+ "use_dynamic_partitions") == "true"
+
+ # For dynamic partitions builds, always check partition list in target build
+ # because new partitions may be added.
+ is_dynamic = is_dynamic_build and partition in shlex.split(
+ OPTIONS.info_dict.get("dynamic_partition_list", "").strip())
+
+ if is_dynamic:
+ self.device = 'map_partition("%s")' % partition
+ else:
+ if OPTIONS.source_info_dict is None:
+ _, device_path = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
+ else:
+ _, device_path = GetTypeAndDevice("/" + partition,
+ OPTIONS.source_info_dict)
+ self.device = '"%s"' % device_path
@property
def required_cache(self):
@@ -1768,7 +1801,7 @@
self._WriteUpdate(script, output_zip)
if write_verify_script:
- self._WritePostInstallVerifyScript(script)
+ self.WritePostInstallVerifyScript(script)
def WriteStrictVerifyScript(self, script):
"""Verify all the blocks in the care_map, including clobbered blocks.
@@ -1782,11 +1815,11 @@
ranges = self.tgt.care_map
ranges_str = ranges.to_string_raw()
script.AppendExtra(
- 'range_sha1("%s", "%s") == "%s" && ui_print(" Verified.") || '
- 'ui_print("\\"%s\\" has unexpected contents.");' % (
+ 'range_sha1(%s, "%s") == "%s" && ui_print(" Verified.") || '
+ 'ui_print("%s has unexpected contents.");' % (
self.device, ranges_str,
self.tgt.TotalSha1(include_clobbered_blocks=True),
- self.device))
+ self.partition))
script.AppendExtra("")
def WriteVerifyScript(self, script, touched_blocks_only=False):
@@ -1811,7 +1844,7 @@
ranges_str = ranges.to_string_raw()
script.AppendExtra(
- 'if (range_sha1("%s", "%s") == "%s" || block_image_verify("%s", '
+ 'if (range_sha1(%s, "%s") == "%s" || block_image_verify(%s, '
'package_extract_file("%s.transfer.list"), "%s.new.dat", '
'"%s.patch.dat")) then' % (
self.device, ranges_str, expected_sha1,
@@ -1828,7 +1861,7 @@
# this check fails, give an explicit log message about the partition
# having been remounted R/W (the most likely explanation).
if self.check_first_block:
- script.AppendExtra('check_first_block("%s");' % (self.device,))
+ script.AppendExtra('check_first_block(%s);' % (self.device,))
# If version >= 4, try block recovery before abort update
if partition == "system":
@@ -1836,8 +1869,8 @@
else:
code = ErrorCode.VENDOR_RECOVER_FAILURE
script.AppendExtra((
- 'ifelse (block_image_recover("{device}", "{ranges}") && '
- 'block_image_verify("{device}", '
+ 'ifelse (block_image_recover({device}, "{ranges}") && '
+ 'block_image_verify({device}, '
'package_extract_file("{partition}.transfer.list"), '
'"{partition}.new.dat", "{partition}.patch.dat"), '
'ui_print("{partition} recovered successfully."), '
@@ -1859,14 +1892,14 @@
'abort("E%d: %s partition has unexpected contents");\n'
'endif;') % (code, partition))
- def _WritePostInstallVerifyScript(self, script):
+ def WritePostInstallVerifyScript(self, script):
partition = self.partition
script.Print('Verifying the updated %s image...' % (partition,))
# Unlike pre-install verification, clobbered_blocks should not be ignored.
ranges = self.tgt.care_map
ranges_str = ranges.to_string_raw()
script.AppendExtra(
- 'if range_sha1("%s", "%s") == "%s" then' % (
+ 'if range_sha1(%s, "%s") == "%s" then' % (
self.device, ranges_str,
self.tgt.TotalSha1(include_clobbered_blocks=True)))
@@ -1875,7 +1908,7 @@
if self.tgt.extended:
ranges_str = self.tgt.extended.to_string_raw()
script.AppendExtra(
- 'if range_sha1("%s", "%s") == "%s" then' % (
+ 'if range_sha1(%s, "%s") == "%s" then' % (
self.device, ranges_str,
self._HashZeroBlocks(self.tgt.extended.size())))
script.Print('Verified the updated %s image.' % (partition,))
@@ -1941,7 +1974,7 @@
else:
code = ErrorCode.VENDOR_UPDATE_FAILURE
- call = ('block_image_update("{device}", '
+ call = ('block_image_update({device}, '
'package_extract_file("{partition}.transfer.list"), '
'"{new_data_name}", "{partition}.patch.dat") ||\n'
' abort("E{code}: Failed to update {partition} image.");'.format(
@@ -2134,3 +2167,209 @@
logger.info("putting script in %s", sh_location)
output_sink(sh_location, sh)
+
+
+class DynamicPartitionUpdate(object):
+ def __init__(self, src_group=None, tgt_group=None, progress=None,
+ block_difference=None):
+ self.src_group = src_group
+ self.tgt_group = tgt_group
+ self.progress = progress
+ self.block_difference = block_difference
+
+ @property
+ def src_size(self):
+ if not self.block_difference:
+ return 0
+ return DynamicPartitionUpdate._GetSparseImageSize(self.block_difference.src)
+
+ @property
+ def tgt_size(self):
+ if not self.block_difference:
+ return 0
+ return DynamicPartitionUpdate._GetSparseImageSize(self.block_difference.tgt)
+
+ @staticmethod
+ def _GetSparseImageSize(img):
+ if not img:
+ return 0
+ return img.blocksize * img.total_blocks
+
+
+class DynamicGroupUpdate(object):
+ def __init__(self, src_size=None, tgt_size=None):
+ # None: group does not exist. 0: no size limits.
+ self.src_size = src_size
+ self.tgt_size = tgt_size
+
+
+class DynamicPartitionsDifference(object):
+ def __init__(self, info_dict, block_diffs, progress_dict=None,
+ source_info_dict=None):
+ if progress_dict is None:
+ progress_dict = dict()
+
+ self._remove_all_before_apply = False
+ if source_info_dict is None:
+ self._remove_all_before_apply = True
+ source_info_dict = dict()
+
+ block_diff_dict = {e.partition:e for e in block_diffs}
+ assert len(block_diff_dict) == len(block_diffs), \
+ "Duplicated BlockDifference object for {}".format(
+ [partition for partition, count in
+ collections.Counter(e.partition for e in block_diffs).items()
+ if count > 1])
+
+ dynamic_partitions = set(shlex.split(info_dict.get(
+ "dynamic_partition_list", "").strip()))
+ assert set(block_diff_dict.keys()) == dynamic_partitions, \
+ "Dynamic partitions: {}, BlockDifference objects: {}".format(
+ list(dynamic_partitions), list(block_diff_dict.keys()))
+
+ self._partition_updates = dict()
+
+ for p, block_diff in block_diff_dict.items():
+ self._partition_updates[p] = DynamicPartitionUpdate()
+ self._partition_updates[p].block_difference = block_diff
+
+ for p, progress in progress_dict.items():
+ if p in self._partition_updates:
+ self._partition_updates[p].progress = progress
+
+ tgt_groups = shlex.split(info_dict.get(
+ "super_partition_groups", "").strip())
+ src_groups = shlex.split(source_info_dict.get(
+ "super_partition_groups", "").strip())
+
+ for g in tgt_groups:
+ for p in shlex.split(info_dict.get(
+ "super_%s_partition_list" % g, "").strip()):
+ assert p in self._partition_updates, \
+ "{} is in target super_{}_partition_list but no BlockDifference " \
+ "object is provided.".format(p, g)
+ self._partition_updates[p].tgt_group = g
+
+ for g in src_groups:
+ for p in shlex.split(source_info_dict.get(
+ "super_%s_partition_list" % g, "").strip()):
+ assert p in self._partition_updates, \
+ "{} is in source super_{}_partition_list but no BlockDifference " \
+ "object is provided.".format(p, g)
+ self._partition_updates[p].src_group = g
+
+ if self._partition_updates:
+ logger.info("Updating dynamic partitions %s",
+ self._partition_updates.keys())
+
+ self._group_updates = dict()
+
+ for g in tgt_groups:
+ self._group_updates[g] = DynamicGroupUpdate()
+ self._group_updates[g].tgt_size = int(info_dict.get(
+ "super_%s_group_size" % g, "0").strip())
+
+ for g in src_groups:
+ if g not in self._group_updates:
+ self._group_updates[g] = DynamicGroupUpdate()
+ self._group_updates[g].src_size = int(source_info_dict.get(
+ "super_%s_group_size" % g, "0").strip())
+
+ self._Compute()
+
+ def WriteScript(self, script, output_zip, write_verify_script=False):
+ script.Comment('--- Start patching dynamic partitions ---')
+ for p, u in self._partition_updates.items():
+ if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
+ script.Comment('Patch partition %s' % p)
+ u.block_difference.WriteScript(script, output_zip, progress=u.progress,
+ write_verify_script=False)
+
+ op_list_path = MakeTempFile()
+ with open(op_list_path, 'w') as f:
+ for line in self._op_list:
+ f.write('{}\n'.format(line))
+
+ ZipWrite(output_zip, op_list_path, "dynamic_partitions_op_list")
+
+ script.Comment('Update dynamic partition metadata')
+ script.AppendExtra('assert(update_dynamic_partitions('
+ 'package_extract_file("dynamic_partitions_op_list")));')
+
+ if write_verify_script:
+ for p, u in self._partition_updates.items():
+ if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
+ u.block_difference.WritePostInstallVerifyScript(script)
+ script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
+
+ for p, u in self._partition_updates.items():
+ if u.tgt_size and u.src_size <= u.tgt_size:
+ script.Comment('Patch partition %s' % p)
+ u.block_difference.WriteScript(script, output_zip, progress=u.progress,
+ write_verify_script=write_verify_script)
+ if write_verify_script:
+ script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
+
+ script.Comment('--- End patching dynamic partitions ---')
+
+ def _Compute(self):
+ self._op_list = list()
+
+ def append(line):
+ self._op_list.append(line)
+
+ def comment(line):
+ self._op_list.append("# %s" % line)
+
+ if self._remove_all_before_apply:
+ comment('Remove all existing dynamic partitions and groups before '
+ 'applying full OTA')
+ append('remove_all_groups')
+
+ for p, u in self._partition_updates.items():
+ if u.src_group and not u.tgt_group:
+ append('remove %s' % p)
+
+ for p, u in self._partition_updates.items():
+ if u.src_group and u.tgt_group and u.src_group != u.tgt_group:
+ comment('Move partition %s from %s to default' % (p, u.src_group))
+ append('move %s default' % p)
+
+ for p, u in self._partition_updates.items():
+ if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
+ comment('Shrink partition %s from %d to %d' %
+ (p, u.src_size, u.tgt_size))
+ append('resize %s %s' % (p, u.tgt_size))
+
+ for g, u in self._group_updates.items():
+ if u.src_size is not None and u.tgt_size is None:
+ append('remove_group %s' % g)
+ if (u.src_size is not None and u.tgt_size is not None and
+ u.src_size > u.tgt_size):
+ comment('Shrink group %s from %d to %d' % (g, u.src_size, u.tgt_size))
+ append('resize_group %s %d' % (g, u.tgt_size))
+
+ for g, u in self._group_updates.items():
+ if u.src_size is None and u.tgt_size is not None:
+ comment('Add group %s with maximum size %d' % (g, u.tgt_size))
+ append('add_group %s %d' % (g, u.tgt_size))
+ if (u.src_size is not None and u.tgt_size is not None and
+ u.src_size < u.tgt_size):
+ comment('Grow group %s from %d to %d' % (g, u.src_size, u.tgt_size))
+ append('resize_group %s %d' % (g, u.tgt_size))
+
+ for p, u in self._partition_updates.items():
+ if u.tgt_group and not u.src_group:
+ comment('Add partition %s to group %s' % (p, u.tgt_group))
+ append('add %s %s' % (p, u.tgt_group))
+
+ for p, u in self._partition_updates.items():
+ if u.tgt_size and u.src_size < u.tgt_size:
+ comment('Grow partition %s from %d to %d' % (p, u.src_size, u.tgt_size))
+ append('resize %s %d' % (p, u.tgt_size))
+
+ for p, u in self._partition_updates.items():
+ if u.src_group and u.tgt_group and u.src_group != u.tgt_group:
+ comment('Move partition %s from default to %s' %
+ (p, u.tgt_group))
+ append('move %s %s' % (p, u.tgt_group))
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 7ec8ad8..3fbcbcf 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -826,32 +826,51 @@
# See the notes in WriteBlockIncrementalOTAPackage().
allow_shared_blocks = target_info.get('ext4_share_dup_blocks') == "true"
- # Full OTA is done as an "incremental" against an empty source image. This
- # has the effect of writing new data from the package to the entire
- # partition, but lets us reuse the updater code that writes incrementals to
- # do it.
- system_tgt = common.GetSparseImage("system", OPTIONS.input_tmp, input_zip,
- allow_shared_blocks)
- system_tgt.ResetFileMap()
- system_diff = common.BlockDifference("system", system_tgt, src=None)
- system_diff.WriteScript(script, output_zip,
- write_verify_script=OPTIONS.verify)
+ def GetBlockDifference(partition):
+ # Full OTA is done as an "incremental" against an empty source image. This
+ # has the effect of writing new data from the package to the entire
+ # partition, but lets us reuse the updater code that writes incrementals to
+ # do it.
+ tgt = common.GetSparseImage(partition, OPTIONS.input_tmp, input_zip,
+ allow_shared_blocks)
+ tgt.ResetFileMap()
+ diff = common.BlockDifference(partition, tgt, src=None)
+ return diff
- boot_img = common.GetBootableImage(
- "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
+ device_specific_diffs = device_specific.FullOTA_GetBlockDifferences()
+ if device_specific_diffs:
+ assert all(isinstance(diff, common.BlockDifference)
+ for diff in device_specific_diffs), \
+ "FullOTA_GetBlockDifferences is not returning a list of " \
+ "BlockDifference objects"
+ progress_dict = dict()
+ block_diffs = [GetBlockDifference("system")]
if HasVendorPartition(input_zip):
- script.ShowProgress(0.1, 0)
+ block_diffs.append(GetBlockDifference("vendor"))
+ progress_dict["vendor"] = 0.1
+ if device_specific_diffs:
+ block_diffs += device_specific_diffs
- vendor_tgt = common.GetSparseImage("vendor", OPTIONS.input_tmp, input_zip,
- allow_shared_blocks)
- vendor_tgt.ResetFileMap()
- vendor_diff = common.BlockDifference("vendor", vendor_tgt)
- vendor_diff.WriteScript(script, output_zip,
- write_verify_script=OPTIONS.verify)
+ if target_info.get('use_dynamic_partitions') == "true":
+ # Use empty source_info_dict to indicate that all partitions / groups must
+ # be re-added.
+ dynamic_partitions_diff = common.DynamicPartitionsDifference(
+ info_dict=OPTIONS.info_dict,
+ block_diffs=block_diffs,
+ progress_dict=progress_dict)
+ dynamic_partitions_diff.WriteScript(script, output_zip,
+ write_verify_script=OPTIONS.verify)
+ else:
+ for block_diff in block_diffs:
+ block_diff.WriteScript(script, output_zip,
+ progress=progress_dict.get(block_diff.partition),
+ write_verify_script=OPTIONS.verify)
AddCompatibilityArchiveIfTrebleEnabled(input_zip, output_zip, target_info)
+ boot_img = common.GetBootableImage(
+ "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
common.CheckSize(boot_img.data, "boot.img", target_info)
common.ZipWriteStr(output_zip, "boot.img", boot_img.data)
@@ -1571,18 +1590,43 @@
system_diff.WriteVerifyScript(script, touched_blocks_only=True)
if vendor_diff:
vendor_diff.WriteVerifyScript(script, touched_blocks_only=True)
+ device_specific_diffs = device_specific.IncrementalOTA_GetBlockDifferences()
+ if device_specific_diffs:
+ assert all(isinstance(diff, common.BlockDifference)
+ for diff in device_specific_diffs), \
+ "IncrementalOTA_GetBlockDifferences is not returning a list of " \
+ "BlockDifference objects"
+ for diff in device_specific_diffs:
+ diff.WriteVerifyScript(script, touched_blocks_only=True)
script.Comment("---- start making changes here ----")
device_specific.IncrementalOTA_InstallBegin()
- system_diff.WriteScript(script, output_zip,
- progress=0.8 if vendor_diff else 0.9,
- write_verify_script=OPTIONS.verify)
-
+ block_diffs = [system_diff]
+ progress_dict = {"system": 0.8 if vendor_diff else 0.9}
if vendor_diff:
- vendor_diff.WriteScript(script, output_zip, progress=0.1,
- write_verify_script=OPTIONS.verify)
+ block_diffs.append(vendor_diff)
+ progress_dict["vendor"] = 0.1
+ if device_specific_diffs:
+ block_diffs += device_specific_diffs
+
+ if OPTIONS.source_info_dict.get("use_dynamic_partitions") == "true":
+ if OPTIONS.target_info_dict.get("use_dynamic_partitions") != "true":
+ raise RuntimeError(
+ "can't generate incremental that disables dynamic partitions")
+ dynamic_partitions_diff = common.DynamicPartitionsDifference(
+ info_dict=OPTIONS.target_info_dict,
+ source_info_dict=OPTIONS.source_info_dict,
+ block_diffs=block_diffs,
+ progress_dict=progress_dict)
+ dynamic_partitions_diff.WriteScript(
+ script, output_zip, write_verify_script=OPTIONS.verify)
+ else:
+ for block_diff in block_diffs:
+ block_diff.WriteScript(script, output_zip,
+ progress=progress_dict.get(block_diff.partition),
+ write_verify_script=OPTIONS.verify)
if OPTIONS.two_step:
common.ZipWriteStr(output_zip, "boot.img", target_boot.data)