Merge "Remove sepolicy_split from Soong."
diff --git a/Changes.md b/Changes.md
index 53ff007..3e48bad 100644
--- a/Changes.md
+++ b/Changes.md
@@ -86,6 +86,11 @@
$(TARGET): myscript.py $(sort $(shell find my/python/lib -name '*.py'))
PYTHONPATH=my/python/lib:$$PYTHONPATH myscript.py -o $@
```
+### Stop using PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE directly {#PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE}
+
+Specify Framework Compatibility Matrix Version in device manifest by adding a `target-level`
+attribute to the root element `<manifest>`. If `PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE`
+is 26 or 27, you can add `"target-level"="1"` to your device manifest instead.
### Other envsetup.sh variables {#other_envsetup_variables}
diff --git a/core/Makefile b/core/Makefile
index 20d14d2..85ae96e 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -841,7 +841,8 @@
tools_notice_file_txt := $(HOST_OUT_INTERMEDIATES)/NOTICE.txt
tools_notice_file_html := $(HOST_OUT_INTERMEDIATES)/NOTICE.html
-ifeq ($(PRODUCT_FULL_TREBLE),true)
+# TODO(b/69865032): Make PRODUCT_NOTICE_SPLIT the default behavior.
+ifeq ($(PRODUCT_NOTICE_SPLIT),true)
target_notice_file_html_or_xml := $(TARGET_OUT_INTERMEDIATES)/NOTICE.xml
target_notice_file_html_or_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE.xml.gz
installed_notice_html_or_xml_gz := $(TARGET_OUT)/etc/NOTICE.xml.gz
@@ -1043,7 +1044,6 @@
$(if $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "system_fs_type=$(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
$(if $(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "system_extfs_inode_count=$(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
$(if $(BOARD_SYSTEMIMAGE_JOURNAL_SIZE),$(hide) echo "system_journal_size=$(BOARD_SYSTEMIMAGE_JOURNAL_SIZE)" >> $(1))
-$(if $(BOARD_HAS_EXT4_RESERVED_BLOCKS),$(hide) echo "has_ext4_reserved_blocks=$(BOARD_HAS_EXT4_RESERVED_BLOCKS)" >> $(1))
$(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "system_squashfs_compressor=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
$(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "system_squashfs_compressor_opt=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
$(if $(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "system_squashfs_block_size=$(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
@@ -1456,9 +1456,6 @@
du -sm $(TARGET_OUT) 1>&2;\
if [ "$(INTERNAL_USERIMAGES_EXT_VARIANT)" == "ext4" ]; then \
maxsize=$(BOARD_SYSTEMIMAGE_PARTITION_SIZE); \
- if [ "$(BOARD_HAS_EXT4_RESERVED_BLOCKS)" == "true" ]; then \
- maxsize=$$((maxsize - 4096 * 4096)); \
- fi; \
echo "The max is $$(( maxsize / 1048576 )) MB." 1>&2 ;\
else \
echo "The max is $$(( $(BOARD_SYSTEMIMAGE_PARTITION_SIZE) / 1048576 )) MB." 1>&2 ;\
@@ -2174,6 +2171,7 @@
$(HOST_OUT_EXECUTABLES)/simg2img \
$(HOST_OUT_EXECUTABLES)/e2fsck \
$(HOST_OUT_EXECUTABLES)/build_verity_tree \
+ $(HOST_OUT_EXECUTABLES)/generate_verity_key \
$(HOST_OUT_EXECUTABLES)/verity_signer \
$(HOST_OUT_EXECUTABLES)/verity_verifier \
$(HOST_OUT_EXECUTABLES)/append2simg \
@@ -2199,6 +2197,7 @@
$(HOST_LIBRARY_PATH)/liblog$(HOST_SHLIB_SUFFIX) \
$(HOST_LIBRARY_PATH)/libcutils$(HOST_SHLIB_SUFFIX) \
$(HOST_LIBRARY_PATH)/libselinux$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libcrypto_utils$(HOST_SHLIB_SUFFIX) \
$(HOST_LIBRARY_PATH)/libcrypto-host$(HOST_SHLIB_SUFFIX) \
$(HOST_LIBRARY_PATH)/libext2fs-host$(HOST_SHLIB_SUFFIX) \
$(HOST_LIBRARY_PATH)/libext2_blkid-host$(HOST_SHLIB_SUFFIX) \
@@ -2480,9 +2479,6 @@
ifdef BOARD_RECOVERYIMAGE_PARTITION_SIZE
$(hide) echo "recovery_size=$(BOARD_RECOVERYIMAGE_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
endif
-ifdef BOARD_HAS_EXT4_RESERVED_BLOCKS
- $(hide) echo "has_ext4_reserved_blocks=$(BOARD_HAS_EXT4_RESERVED_BLOCKS)" >> $(zip_root)/META/misc_info.txt
-endif
ifdef TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS
@# TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS can be empty to indicate that nothing but defaults should be used.
$(hide) echo "recovery_mount_options=$(TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS)" >> $(zip_root)/META/misc_info.txt
diff --git a/core/binary.mk b/core/binary.mk
index c468079..6920373 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -88,7 +88,7 @@
endif
# Make sure we've built the NDK.
- my_additional_dependencies += $(SOONG_OUT_DIR)/ndk.timestamp
+ my_additional_dependencies += $(SOONG_OUT_DIR)/ndk_base.timestamp
# mips32r6 is not supported by the NDK. No released NDK contains these
# libraries, but the r10 in prebuilts/ndk had a local hack to add them :(
@@ -187,7 +187,7 @@
endif
LOCAL_NDK_STL_VARIANT := $(strip $(LOCAL_NDK_STL_VARIANT))
ifeq (,$(LOCAL_NDK_STL_VARIANT))
- LOCAL_NDK_STL_VARIANT := c++_shared
+ LOCAL_NDK_STL_VARIANT := system
endif
ifneq (1,$(words $(filter none system stlport_static stlport_shared c++_static c++_shared gnustl_static, $(LOCAL_NDK_STL_VARIANT))))
$(error $(LOCAL_PATH): Unknown LOCAL_NDK_STL_VARIANT $(LOCAL_NDK_STL_VARIANT))
@@ -207,51 +207,26 @@
endif
else # LOCAL_NDK_STL_VARIANT is not stlport_* either
ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT)))
- # Pre-r11 NDKs used libgabi++ for libc++'s C++ ABI, but r11 and later use
- # libc++abi.
- #
- # r13 no longer has the inner directory as a side effect of just using
- # external/libcxx.
- ifeq (r10,$(LOCAL_NDK_VERSION))
- my_ndk_stl_include_path := \
- $(my_ndk_source_root)/cxx-stl/llvm-libc++/libcxx/include
- my_ndk_stl_include_path += \
- $(my_ndk_source_root)/cxx-stl/llvm-libc++/gabi++/include
- else ifeq (r11,$(LOCAL_NDK_VERSION))
- my_ndk_stl_include_path := \
- $(my_ndk_source_root)/cxx-stl/llvm-libc++/libcxx/include
- my_ndk_stl_include_path += \
- $(my_ndk_source_root)/cxx-stl/llvm-libc++abi/libcxxabi/include
- else
- my_ndk_stl_include_path := \
- $(my_ndk_source_root)/cxx-stl/llvm-libc++/include
- my_ndk_stl_include_path += \
- $(my_ndk_source_root)/cxx-stl/llvm-libc++abi/include
- endif
+ my_ndk_stl_include_path := \
+ $(my_ndk_source_root)/cxx-stl/llvm-libc++/include
+ my_ndk_stl_include_path += \
+ $(my_ndk_source_root)/cxx-stl/llvm-libc++abi/include
my_ndk_stl_include_path += $(my_ndk_source_root)/android/support/include
my_libcxx_libdir := \
$(my_ndk_source_root)/cxx-stl/llvm-libc++/libs/$(my_cpu_variant)
- ifneq (,$(filter r10 r11,$(LOCAL_NDK_VERSION)))
- ifeq (c++_static,$(LOCAL_NDK_STL_VARIANT))
- my_ndk_stl_static_lib := $(my_libcxx_libdir)/libc++_static.a
- else
- my_ndk_stl_shared_lib_fullpath := $(my_libcxx_libdir)/libc++_shared.so
- endif
+ ifeq (c++_static,$(LOCAL_NDK_STL_VARIANT))
+ my_ndk_stl_static_lib := \
+ $(my_libcxx_libdir)/libc++_static.a \
+ $(my_libcxx_libdir)/libc++abi.a
else
- ifeq (c++_static,$(LOCAL_NDK_STL_VARIANT))
- my_ndk_stl_static_lib := \
- $(my_libcxx_libdir)/libc++_static.a \
- $(my_libcxx_libdir)/libc++abi.a
- else
- my_ndk_stl_shared_lib_fullpath := $(my_libcxx_libdir)/libc++_shared.so
- endif
+ my_ndk_stl_shared_lib_fullpath := $(my_libcxx_libdir)/libc++_shared.so
+ endif
- my_ndk_stl_static_lib += $(my_libcxx_libdir)/libandroid_support.a
- ifneq (,$(filter armeabi armeabi-v7a,$(my_cpu_variant)))
- my_ndk_stl_static_lib += $(my_libcxx_libdir)/libunwind.a
- endif
+ my_ndk_stl_static_lib += $(my_libcxx_libdir)/libandroid_support.a
+ ifneq (,$(filter armeabi armeabi-v7a,$(my_cpu_variant)))
+ my_ndk_stl_static_lib += $(my_libcxx_libdir)/libunwind.a
endif
my_ldlibs += -ldl
@@ -870,6 +845,9 @@
###########################################################
## Compile the .proto files to .cc (or .c) and then to .o
###########################################################
+ifeq ($(strip $(LOCAL_PROTOC_OPTIMIZE_TYPE)),)
+ LOCAL_PROTOC_OPTIMIZE_TYPE := lite
+endif
proto_sources := $(filter %.proto,$(my_src_files))
ifneq ($(proto_sources),)
proto_gen_dir := $(generated_sources_dir)/proto
@@ -891,7 +869,7 @@
endif
my_proto_c_includes := external/protobuf/src
my_cflags += -DGOOGLE_PROTOBUF_NO_RTTI
-my_protoc_flags := --cpp_out=$(proto_gen_dir)
+my_protoc_flags := --cpp_out=$(if $(filter lite lite-static,$(LOCAL_PROTOC_OPTIMIZE_TYPE)),lite:,)$(proto_gen_dir)
my_protoc_deps :=
endif
my_proto_c_includes += $(proto_gen_dir)
@@ -1736,16 +1714,15 @@
ifneq ($(LOCAL_TIDY_CHECKS),)
my_tidy_checks := $(my_tidy_checks),$(LOCAL_TIDY_CHECKS)
endif
- # Set up global default clang-tidy flags, which is none.
- my_tidy_flags := $(WITH_TIDY_FLAGS)
- # Use local clang-tidy flags if specified.
- ifneq ($(LOCAL_TIDY_FLAGS),)
- my_tidy_flags := $(LOCAL_TIDY_FLAGS)
- endif
+ my_tidy_flags += $(WITH_TIDY_FLAGS) $(LOCAL_TIDY_FLAGS)
# If tidy flags are not specified, default to check all header files.
ifeq ($(my_tidy_flags),)
my_tidy_flags := $(call default_tidy_header_filter,$(LOCAL_PATH))
endif
+ # If clang-tidy is not enabled globally, add the -quiet flag.
+ ifeq (,$(filter 1 true,$(WITH_TIDY)))
+ my_tidy_flags += -quiet
+ endif
# We might be using the static analyzer through clang-tidy.
# https://bugs.llvm.org/show_bug.cgi?id=32914
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index fc2adde..09f9be5 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -227,8 +227,9 @@
LOCAL_SDK_VERSION:=
LOCAL_SHARED_ANDROID_LIBRARIES:=
LOCAL_SHARED_LIBRARIES:=
-LOCAL_SOONG_HEADER_JAR :=
+LOCAL_SOONG_CLASSES_JAR :=
LOCAL_SOONG_DEX_JAR :=
+LOCAL_SOONG_HEADER_JAR :=
LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR :=
LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE :=
LOCAL_SOONG_RRO_DIRS :=
diff --git a/core/config.mk b/core/config.mk
index e43793a..3b1c157 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -75,6 +75,7 @@
ANDROID_EMULATOR_PREBUILTS \
ANDROID_PRE_BUILD_PATHS \
,See $(CHANGES_URL)#other_envsetup_variables)
+$(KATI_obsolete_var PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE,Set FCM Version in device manifest instead. See $(CHANGES_URL)#PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE)
CHANGES_URL :=
@@ -747,15 +748,25 @@
PRODUCT_FULL_TREBLE := true
endif
+# TODO(b/69865032): Make PRODUCT_NOTICE_SPLIT the default behavior and remove
+# references to it here and below.
+ifdef PRODUCT_NOTICE_SPLIT_OVERRIDE
+ $(error PRODUCT_NOTICE_SPLIT_OVERRIDE cannot be set.)
+endif
+
requirements := \
PRODUCT_TREBLE_LINKER_NAMESPACES \
PRODUCT_SEPOLICY_SPLIT \
PRODUCT_ENFORCE_VINTF_MANIFEST \
+ PRODUCT_NOTICE_SPLIT
# If it is overriden, then the requirement override is taken, otherwise it's
# PRODUCT_FULL_TREBLE
$(foreach req,$(requirements),$(eval \
$(req) := $(if $($(req)_OVERRIDE),$($(req)_OVERRIDE),$(PRODUCT_FULL_TREBLE))))
+# If the requirement is false for any reason, then it's not PRODUCT_FULL_TREBLE
+$(foreach req,$(requirements),$(eval \
+ PRODUCT_FULL_TREBLE := $(if $(filter false,$($(req))),false,$(PRODUCT_FULL_TREBLE))))
PRODUCT_FULL_TREBLE_OVERRIDE ?=
$(foreach req,$(requirements),$(eval $(req)_OVERRIDE ?=))
@@ -784,44 +795,7 @@
endif
FRAMEWORK_MANIFEST_FILE := system/libhidl/manifest.xml
-
-# Compatibility matrix versioning:
-# MATRIX_LEVEL_OVERRIDE defined: MATRIX_LEVEL = MATRIX_LEVEL_OVERRIDE
-# MATRIX_LEVEL_OVERRIDE undefined:
-# FULL_TREBLE != true: MATRIX_LEVEL = legacy
-# FULL_TREBLE == true:
-# SHIPPING_API_LEVEL defined: MATRIX_LEVEL = SHIPPING_API_LEVEL
-# SHIPPING_API_LEVEL undefined: MATRIX_LEVEL = PLATFORM_SDK_VERSION
-# MATRIX_LEVEL == legacy => legacy.xml
-# MATRIX_LEVEL <= 26 => 26.xml
-# MATRIX_LEVEL == 27 => 27.xml # define when 27 releases
-# MATRIX_LEVEL == 28 => 28.xml # define when 28 releases
-# ...
-# otherwise => current.xml
-
-ifneq ($(PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE),)
- PRODUCT_COMPATIBILITY_MATRIX_LEVEL := $(PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE)
-else ifneq ($(PRODUCT_FULL_TREBLE),true)
- PRODUCT_COMPATIBILITY_MATRIX_LEVEL := legacy
-else ifneq ($(PRODUCT_SHIPPING_API_LEVEL),)
- PRODUCT_COMPATIBILITY_MATRIX_LEVEL := $(PRODUCT_SHIPPING_API_LEVEL)
-else
- PRODUCT_COMPATIBILITY_MATRIX_LEVEL := $(PLATFORM_SDK_VERSION)
-endif
-
-ifeq ($(strip $(PRODUCT_COMPATIBILITY_MATRIX_LEVEL)),legacy)
- FRAMEWORK_COMPATIBILITY_MATRIX_FILE := hardware/interfaces/compatibility_matrix.legacy.xml
-else ifeq ($(call math_gt_or_eq,$(PRODUCT_COMPATIBILITY_MATRIX_LEVEL),26),)
- # All PRODUCT_FULL_TREBLE devices with shipping API levels < 26 get the level 26 manifest
- # as that is the first.
- FRAMEWORK_COMPATIBILITY_MATRIX_FILE := hardware/interfaces/compatibility_matrix.26.xml
-else ifeq ($(call math_gt_or_eq,$(PRODUCT_COMPATIBILITY_MATRIX_LEVEL),28),)
- # All shipping API levels with released compatibility matrices get the corresponding matrix.
- FRAMEWORK_COMPATIBILITY_MATRIX_FILE := \
- hardware/interfaces/compatibility_matrix.$(PRODUCT_COMPATIBILITY_MATRIX_LEVEL).xml
-else
- FRAMEWORK_COMPATIBILITY_MATRIX_FILE := hardware/interfaces/compatibility_matrix.current.xml
-endif
+FRAMEWORK_COMPATIBILITY_MATRIX_FILES := $(wildcard hardware/interfaces/compatibility_matrix.*.xml)
BUILD_NUMBER_FROM_FILE := $$(cat $(OUT_DIR)/build_number.txt)
BUILD_DATETIME_FROM_FILE := $$(cat $(OUT_DIR)/build_date.txt)
diff --git a/core/definitions.mk b/core/definitions.mk
index 405f7c6..5ad78fd 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2415,15 +2415,16 @@
define transform-classes-d8.jar-to-dex
@echo "target Dex: $(PRIVATE_MODULE)"
@mkdir -p $(dir $@)
-$(hide) rm -rf $(dir $@)classes*.dex $(dir $@)*.class $(dir $@)d8_input
-$(hide) unzip -qq -d $(dir $@)d8_input $< "*.class"
+$(hide) rm -f $(dir $@)classes*.dex $(dir $@)d8_input.jar
+$(hide) $(ZIP2ZIP) -j -i $< -o $(dir $@)d8_input.jar "**/*.class"
$(hide) $(DX_COMMAND) \
--output $(dir $@) \
--min-api $(PRIVATE_MIN_SDK_VERSION) \
+ $(subst --main-dex-list=, --main-dex-list , \
$(subst --no-locals, --release, \
- $(filter-out --core-library --multi-dex,$(PRIVATE_DX_FLAGS))) \
- $$(find $(dir $@)d8_input -name *.class | sort)
-$(hide) rm -fr $(dir $@)d8_input
+ $(filter-out --core-library --multi-dex --minimal-main-dex,$(PRIVATE_DX_FLAGS)))) \
+ $(dir $@)d8_input.jar
+$(hide) rm -f $(dir $@)d8_input.jar
endef
# Create a mostly-empty .jar file that we'll add to later.
diff --git a/core/java_common.mk b/core/java_common.mk
index a816324..aac5982 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -38,6 +38,9 @@
###########################################################
## .proto files: Compile proto files to .java
###########################################################
+ifeq ($(strip $(LOCAL_PROTOC_OPTIMIZE_TYPE)),)
+ LOCAL_PROTOC_OPTIMIZE_TYPE := lite
+endif
proto_sources := $(filter %.proto,$(LOCAL_SRC_FILES))
# Because names of the .java files compiled from .proto files are unknown until the
# .proto files are compiled, we use a timestamp file as depedency.
@@ -67,7 +70,7 @@
endif
endif
$(proto_java_sources_file_stamp): PRIVATE_PROTOC_FLAGS := $(LOCAL_PROTOC_FLAGS)
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_PARAMS := $(LOCAL_PROTO_JAVA_OUTPUT_PARAMS)
+$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_PARAMS := $(if $(filter lite,$(LOCAL_PROTOC_OPTIMIZE_TYPE)),lite$(if $(LOCAL_PROTO_JAVA_OUTPUT_PARAMS),:,),)$(LOCAL_PROTO_JAVA_OUTPUT_PARAMS)
$(proto_java_sources_file_stamp) : $(proto_sources_fullpath) $(PROTOC)
$(call transform-proto-to-java)
diff --git a/core/main.mk b/core/main.mk
index 621d309..fe178da 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -126,6 +126,8 @@
$(error ADDITIONAL_BUILD_PROPERTIES must not be set before here: $(ADDITIONAL_BUILD_PROPERTIES))
endif
+ADDITIONAL_BUILD_PROPERTIES :=
+
#
# -----------------------------------------------------------------
# Add the product-defined properties to the build properties.
@@ -198,6 +200,11 @@
ADDITIONAL_BUILD_PROPERTIES += ro.treble.enabled=${PRODUCT_FULL_TREBLE}
+$(KATI_obsolete_var PRODUCT_FULL_TREBLE,\
+ Code should be written to work regardless of a device being Treble or \
+ variables like PRODUCT_SEPOLICY_SPLIT should be used until that is \
+ possible.)
+
# -----------------------------------------------------------------
###
### In this section we set up the things that are different
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index e94c019..633ef0c 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -16,8 +16,26 @@
ifdef LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
$(eval $(call copy-one-file,$(LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR),\
$(intermediates.COMMON)/jacoco-report-classes.jar))
+ $(call add-dependency,$(common_javalib.jar),\
+ $(intermediates.COMMON)/jacoco-report-classes.jar)
endif
+full_classes_jar := $(intermediates.COMMON)/classes.jar
+full_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.jar
+full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
+
+$(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),$(full_classes_jar)))
+$(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),$(full_classes_pre_proguard_jar)))
+
+ifneq ($(TURBINE_DISABLED),false)
+ifdef LOCAL_SOONG_HEADER_JAR
+$(eval $(call copy-one-file,$(LOCAL_SOONG_HEADER_JAR),$(full_classes_header_jar)))
+else
+$(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_header_jar)))
+endif
+endif # TURBINE_DISABLED != false
+
+
$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(LOCAL_BUILT_MODULE)))
ifdef LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
@@ -45,8 +63,8 @@
PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
ifdef LOCAL_CERTIFICATE
- PACKAGES.$(LOCAL_MODULE).PRIVATE_KEY := $(LOCAL_CERTIFICATE)
- PACKAGES.$(LOCAL_MODULE).CERTIFICATE := $(patsubst %.x509.pem,%.pk8,$(LOCAL_CERTIFICATE))
+ PACKAGES.$(LOCAL_MODULE).CERTIFICATE := $(LOCAL_CERTIFICATE)
+ PACKAGES.$(LOCAL_MODULE).PRIVATE_KEY := $(patsubst %.x509.pem,%.pk8,$(LOCAL_CERTIFICATE))
endif
ifndef LOCAL_IS_HOST_MODULE
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 5136582..11ca473 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -117,7 +117,6 @@
$(call add_json_bool, Malloc_not_svelte, $(call invert_bool,$(filter true,$(MALLOC_SVELTE))))
$(call add_json_str, Override_rs_driver, $(OVERRIDE_RS_DRIVER))
-$(call add_json_bool, Treble, $(filter true,$(PRODUCT_FULL_TREBLE)))
$(call add_json_bool, Treble_linker_namespaces, $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
$(call add_json_bool, Enforce_vintf_manifest, $(filter true,$(PRODUCT_ENFORCE_VINTF_MANIFEST)))
diff --git a/core/tasks/tradefed-tests-list.mk b/core/tasks/tradefed-tests-list.mk
index 3b5f5da..bcbdfcf 100644
--- a/core/tasks/tradefed-tests-list.mk
+++ b/core/tasks/tradefed-tests-list.mk
@@ -19,12 +19,12 @@
$(foreach dir, $(COMPATIBILITY.tradefed_tests_dir), \
$(eval tradefed_tests += $(shell find $(dir) -type f -name "*.xml")))
tradefed_tests_list_intermediates := $(call intermediates-dir-for,PACKAGING,tradefed_tests_list,HOST,COMMON)
-tradefed_tests_list_zip := $(tradefed_tests_list_intermediates)/tradefed-tests-list.zip
+tradefed_tests_list_zip := $(tradefed_tests_list_intermediates)/tradefed-tests_list.zip
all_tests :=
$(foreach test, $(tradefed_tests), \
$(eval all_tests += $(word 2,$(subst /res/config/,$(space),$(test)))))
$(tradefed_tests_list_zip) : PRIVATE_tradefed_tests := $(subst .xml,,$(subst $(space),\n,$(sort $(all_tests))))
-$(tradefed_tests_list_zip) : PRIVATE_tradefed_tests_list := $(tradefed_tests_list_intermediates)/tradefed-tests-list
+$(tradefed_tests_list_zip) : PRIVATE_tradefed_tests_list := $(tradefed_tests_list_intermediates)/tradefed-tests_list
$(tradefed_tests_list_zip) : $(tradefed_tests) $(SOONG_ZIP)
@echo "Package: $@"
@@ -34,3 +34,5 @@
tradefed-tests-list : $(tradefed_tests_list_zip)
$(call dist-for-goals, tradefed-tests-list, $(tradefed_tests_list_zip))
+
+tests: tradefed-tests-list
diff --git a/target/board/Android.mk b/target/board/Android.mk
index f8d3bb3..fc32cd9 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -34,7 +34,8 @@
ifdef DEVICE_MANIFEST_FILE
# $(DEVICE_MANIFEST_FILE) can be a list of files
include $(CLEAR_VARS)
-LOCAL_MODULE := manifest.xml
+LOCAL_MODULE := device_manifest.xml
+LOCAL_MODULE_STEM := manifest.xml
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)
@@ -42,6 +43,8 @@
$(GEN): PRIVATE_DEVICE_MANIFEST_FILE := $(DEVICE_MANIFEST_FILE)
$(GEN): $(DEVICE_MANIFEST_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf
BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \
+ PRODUCT_ENFORCE_VINTF_MANIFEST=$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
+ PRODUCT_SHIPPING_API_LEVEL=$(PRODUCT_SHIPPING_API_LEVEL) \
$(HOST_OUT_EXECUTABLES)/assemble_vintf -o $@ \
-i $(call normalize-path-list,$(PRIVATE_DEVICE_MANIFEST_FILE))
@@ -53,7 +56,8 @@
# Device Compatibility Matrix
ifdef DEVICE_MATRIX_FILE
include $(CLEAR_VARS)
-LOCAL_MODULE := compatibility_matrix.xml
+LOCAL_MODULE := device_compatibility_matrix.xml
+LOCAL_MODULE_STEM := compatibility_matrix.xml
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)
@@ -69,7 +73,7 @@
# Framework Manifest
include $(CLEAR_VARS)
-LOCAL_MODULE := system_manifest.xml
+LOCAL_MODULE := framework_manifest.xml
LOCAL_MODULE_STEM := manifest.xml
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_PATH := $(TARGET_OUT)
@@ -94,7 +98,7 @@
# Framework Compatibility Matrix
include $(CLEAR_VARS)
-LOCAL_MODULE := system_compatibility_matrix.xml
+LOCAL_MODULE := framework_compatibility_matrix.xml
LOCAL_MODULE_STEM := compatibility_matrix.xml
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_PATH := $(TARGET_OUT)
@@ -103,12 +107,10 @@
$(GEN): PRIVATE_FLAGS :=
-ifeq ($(PRODUCT_ENFORCE_VINTF_MANIFEST),true)
ifdef BUILT_VENDOR_MANIFEST
$(GEN): $(BUILT_VENDOR_MANIFEST)
$(GEN): PRIVATE_FLAGS += -c "$(BUILT_VENDOR_MANIFEST)"
endif
-endif
ifeq (true,$(BOARD_AVB_ENABLE))
$(GEN): $(AVBTOOL)
@@ -139,12 +141,15 @@
KERNEL_VERSIONS :=
KERNEL_CONFIG_DATA :=
-$(GEN): $(FRAMEWORK_COMPATIBILITY_MATRIX_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf
+$(GEN): $(FRAMEWORK_COMPATIBILITY_MATRIX_FILES) $(HOST_OUT_EXECUTABLES)/assemble_vintf
# TODO(b/37405869) (b/37715375) inject avb versions as well for devices that have avb enabled.
POLICYVERS=$(POLICYVERS) \
BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \
FRAMEWORK_VBMETA_VERSION=$(FRAMEWORK_VBMETA_VERSION) \
- $(HOST_OUT_EXECUTABLES)/assemble_vintf -i $< -o $@ $(PRIVATE_FLAGS)
+ PRODUCT_ENFORCE_VINTF_MANIFEST=$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
+ $(HOST_OUT_EXECUTABLES)/assemble_vintf \
+ -i $(call normalize-path-list,$(FRAMEWORK_COMPATIBILITY_MATRIX_FILES)) \
+ -o $@ $(PRIVATE_FLAGS)
LOCAL_PREBUILT_MODULE_FILE := $(GEN)
include $(BUILD_PREBUILT)
BUILT_SYSTEM_COMPATIBILITY_MATRIX := $(LOCAL_BUILT_MODULE)
diff --git a/target/product/embedded.mk b/target/product/embedded.mk
index 246a553..55ee6dc 100644
--- a/target/product/embedded.mk
+++ b/target/product/embedded.mk
@@ -81,10 +81,13 @@
tzdatacheck \
vndservice \
vndservicemanager \
- compatibility_matrix.xml \
- manifest.xml \
- system_manifest.xml \
- system_compatibility_matrix.xml \
+
+# VINTF data
+PRODUCT_PACKAGES += \
+ device_compatibility_matrix.xml \
+ device_manifest.xml \
+ framework_manifest.xml \
+ framework_compatibility_matrix.xml \
# SELinux packages are added as dependencies of the selinux_policy
# phony package.
diff --git a/target/product/emulator.mk b/target/product/emulator.mk
index cc946ca..0f33f38 100644
--- a/target/product/emulator.mk
+++ b/target/product/emulator.mk
@@ -131,3 +131,9 @@
PRODUCT_CHARACTERISTICS := emulator
PRODUCT_FULL_TREBLE_OVERRIDE := true
+
+
+#watchdog tiggers reboot because location service is not
+#responding, disble it for now
+PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+config.disable_location=true
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index e4e4138..2a92d86 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -376,6 +376,40 @@
return None
return base_fs_file
+
+def CheckHeadroom(ext4fs_output, prop_dict):
+ """Checks if there's enough headroom space available.
+
+ Headroom is the reserved space on system image (via PRODUCT_SYSTEM_HEADROOM),
+ which is useful for devices with low disk space that have system image
+ variation between builds. The 'partition_headroom' in prop_dict is the size
+ in bytes, while the numbers in 'ext4fs_output' are for 4K-blocks.
+
+ Args:
+ ext4fs_output: The output string from mke2fs command.
+ prop_dict: The property dict.
+
+ Returns:
+ The check result.
+ """
+ ext4fs_stats = re.compile(
+ r'Created filesystem with .* (?P<used_blocks>[0-9]+)/'
+ r'(?P<total_blocks>[0-9]+) blocks')
+ m = ext4fs_stats.match(ext4fs_output.strip().split('\n')[-1])
+ used_blocks = int(m.groupdict().get('used_blocks'))
+ total_blocks = int(m.groupdict().get('total_blocks'))
+ headroom_blocks = int(prop_dict.get('partition_headroom')) / BLOCK_SIZE
+ adjusted_blocks = total_blocks - headroom_blocks
+ if used_blocks > adjusted_blocks:
+ mount_point = prop_dict.get("mount_point")
+ print("Error: Not enough room on %s (total: %d blocks, used: %d blocks, "
+ "headroom: %d blocks, available: %d blocks)" % (
+ mount_point, total_blocks, used_blocks, headroom_blocks,
+ adjusted_blocks))
+ return False
+ return True
+
+
def BuildImage(in_dir, prop_dict, out_file, target_out=None):
"""Build an image to out_file from in_dir with property prop_dict.
@@ -542,9 +576,7 @@
shutil.rmtree(staging_system, ignore_errors=True)
shutil.copytree(origin_in, staging_system, symlinks=True)
- has_reserved_blocks = prop_dict.get("has_ext4_reserved_blocks") == "true"
ext4fs_output = None
-
try:
if fs_type.startswith("ext4"):
(ext4fs_output, exit_code) = RunCommand(build_command)
@@ -562,37 +594,10 @@
print("Error: '%s' failed with exit code %d" % (build_command, exit_code))
return False
- # Bug: 21522719, 22023465
- # There are some reserved blocks on ext4 FS (lesser of 4096 blocks and 2%).
- # We need to deduct those blocks from the available space, since they are
- # not writable even with root privilege. It only affects devices using
- # file-based OTA and a kernel version of 3.10 or greater (currently just
- # sprout).
- # Separately, check if there's enough headroom space available. This is useful for
- # devices with low disk space that have system image variation between builds.
- if (has_reserved_blocks or "partition_headroom" in prop_dict) and fs_type.startswith("ext4"):
+ # Check if there's enough headroom space available for ext4 image.
+ if "partition_headroom" in prop_dict and fs_type.startswith("ext4"):
assert ext4fs_output is not None
- ext4fs_stats = re.compile(
- r'Created filesystem with .* (?P<used_blocks>[0-9]+)/'
- r'(?P<total_blocks>[0-9]+) blocks')
- m = ext4fs_stats.match(ext4fs_output.strip().split('\n')[-1])
- used_blocks = int(m.groupdict().get('used_blocks'))
- total_blocks = int(m.groupdict().get('total_blocks'))
- reserved_blocks = 0
- headroom_blocks = 0
- adjusted_blocks = total_blocks
- if has_reserved_blocks:
- reserved_blocks = min(4096, int(total_blocks * 0.02))
- adjusted_blocks -= reserved_blocks
- if "partition_headroom" in prop_dict:
- headroom_blocks = int(prop_dict.get('partition_headroom')) / BLOCK_SIZE
- adjusted_blocks -= headroom_blocks
- if used_blocks > adjusted_blocks:
- mount_point = prop_dict.get("mount_point")
- print("Error: Not enough room on %s (total: %d blocks, used: %d blocks, "
- "reserved: %d blocks, headroom: %d blocks, available: %d blocks)" % (
- mount_point, total_blocks, used_blocks, reserved_blocks,
- headroom_blocks, adjusted_blocks))
+ if not CheckHeadroom(ext4fs_output, prop_dict):
return False
if not fs_spans_partition:
@@ -698,7 +703,6 @@
copy_prop("system_root_image", "system_root_image")
copy_prop("ramdisk_dir", "ramdisk_dir")
copy_prop("ramdisk_fs_config", "ramdisk_fs_config")
- copy_prop("has_ext4_reserved_blocks", "has_ext4_reserved_blocks")
copy_prop("system_squashfs_compressor", "squashfs_compressor")
copy_prop("system_squashfs_compressor_opt", "squashfs_compressor_opt")
copy_prop("system_squashfs_block_size", "squashfs_block_size")
@@ -718,7 +722,6 @@
copy_prop("system_size", "partition_size")
copy_prop("system_journal_size", "journal_size")
copy_prop("system_verity_block_device", "verity_block_device")
- copy_prop("has_ext4_reserved_blocks", "has_ext4_reserved_blocks")
copy_prop("system_squashfs_compressor", "squashfs_compressor")
copy_prop("system_squashfs_compressor_opt", "squashfs_compressor_opt")
copy_prop("system_squashfs_block_size", "squashfs_block_size")
@@ -744,7 +747,6 @@
copy_prop("vendor_size", "partition_size")
copy_prop("vendor_journal_size", "journal_size")
copy_prop("vendor_verity_block_device", "verity_block_device")
- copy_prop("has_ext4_reserved_blocks", "has_ext4_reserved_blocks")
copy_prop("vendor_squashfs_compressor", "squashfs_compressor")
copy_prop("vendor_squashfs_compressor_opt", "squashfs_compressor_opt")
copy_prop("vendor_squashfs_block_size", "squashfs_block_size")
@@ -755,7 +757,6 @@
copy_prop("fs_type", "fs_type")
copy_prop("oem_size", "partition_size")
copy_prop("oem_journal_size", "journal_size")
- copy_prop("has_ext4_reserved_blocks", "has_ext4_reserved_blocks")
copy_prop("oem_extfs_inode_count", "extfs_inode_count")
d["partition_name"] = mount_point
return d
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index 8106d06..b5e9d8b 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -21,11 +21,7 @@
from __future__ import print_function
import argparse
-import common
-import os
-import os.path
import re
-import site
import subprocess
import sys
import tempfile
@@ -34,15 +30,7 @@
from hashlib import sha1
from hashlib import sha256
-# 'update_payload' package is under 'system/update_engine/scripts/', which
-# should be included in PYTHONPATH. Try to set it up automatically if
-# if ANDROID_BUILD_TOP is available.
-top = os.getenv('ANDROID_BUILD_TOP')
-if top:
- site.addsitedir(os.path.join(top, 'system', 'update_engine', 'scripts'))
-
-from update_payload.payload import Payload
-from update_payload.update_metadata_pb2 import Signatures
+import common
def CertUsesSha256(cert):
@@ -108,10 +96,7 @@
use_sha256 = CertUsesSha256(cert)
print('Use SHA-256: %s' % (use_sha256,))
- if use_sha256:
- h = sha256()
- else:
- h = sha1()
+ h = sha256() if use_sha256 else sha1()
h.update(package_bytes[:signed_len])
package_digest = h.hexdigest().lower()
@@ -161,40 +146,6 @@
def VerifyAbOtaPayload(cert, package):
"""Verifies the payload and metadata signatures in an A/B OTA payload."""
-
- def VerifySignatureBlob(hash_file, blob):
- """Verifies the input hash_file against the signature blob."""
- signatures = Signatures()
- signatures.ParseFromString(blob)
-
- extracted_sig_file = common.MakeTempFile(
- prefix='extracted-sig-', suffix='.bin')
- # In Android, we only expect one signature.
- assert len(signatures.signatures) == 1, \
- 'Invalid number of signatures: %d' % len(signatures.signatures)
- signature = signatures.signatures[0]
- length = len(signature.data)
- assert length == 256, 'Invalid signature length %d' % (length,)
- with open(extracted_sig_file, 'w') as f:
- f.write(signature.data)
-
- # Verify the signature file extracted from the payload, by reversing the
- # signing operation. Alternatively, this can be done by calling 'openssl
- # rsautl -verify -certin -inkey <cert.pem> -in <extracted_sig_file> -out
- # <output>', then to assert that
- # <output> == SHA-256 DigestInfo prefix || <hash_file>.
- cmd = ['openssl', 'pkeyutl', '-verify', '-certin', '-inkey', cert,
- '-pkeyopt', 'digest:sha256', '-in', hash_file,
- '-sigfile', extracted_sig_file]
- p = common.Run(cmd, stdout=subprocess.PIPE)
- result, _ = p.communicate()
-
- # https://github.com/openssl/openssl/pull/3213
- # 'openssl pkeyutl -verify' (prior to 1.1.0) returns non-zero return code,
- # even on successful verification. To avoid the false alarm with older
- # openssl, check the output directly.
- assert result.strip() == 'Signature Verified Successfully', result.strip()
-
package_zip = zipfile.ZipFile(package, 'r')
if 'payload.bin' not in package_zip.namelist():
common.ZipClose(package_zip)
@@ -202,37 +153,27 @@
print('Verifying A/B OTA payload signatures...')
+ # Dump pubkey from the certificate.
+ pubkey = common.MakeTempFile(prefix="key-", suffix=".key")
+ cmd = ['openssl', 'x509', '-pubkey', '-noout', '-in', cert, '-out', pubkey]
+ proc = common.Run(cmd, stdout=subprocess.PIPE)
+ stdoutdata, _ = proc.communicate()
+ assert proc.returncode == 0, \
+ 'Failed to dump public key from certificate: %s\n%s' % (cert, stdoutdata)
+
package_dir = tempfile.mkdtemp(prefix='package-')
common.OPTIONS.tempfiles.append(package_dir)
+ # Signature verification with delta_generator.
payload_file = package_zip.extract('payload.bin', package_dir)
- payload = Payload(open(payload_file, 'rb'))
- payload.Init()
-
- # Extract the payload hash and metadata hash from the payload.bin.
- payload_hash_file = common.MakeTempFile(prefix='hash-', suffix='.bin')
- metadata_hash_file = common.MakeTempFile(prefix='hash-', suffix='.bin')
- cmd = ['brillo_update_payload', 'hash',
- '--unsigned_payload', payload_file,
- '--signature_size', '256',
- '--metadata_hash_file', metadata_hash_file,
- '--payload_hash_file', payload_hash_file]
- p = common.Run(cmd, stdout=subprocess.PIPE)
- p.communicate()
- assert p.returncode == 0, 'brillo_update_payload hash failed'
-
- # Payload signature verification.
- assert payload.manifest.HasField('signatures_offset')
- payload_signature = payload.ReadDataBlob(
- payload.manifest.signatures_offset, payload.manifest.signatures_size)
- VerifySignatureBlob(payload_hash_file, payload_signature)
-
- # Metadata signature verification.
- metadata_signature = payload.ReadDataBlob(
- -payload.header.metadata_signature_len,
- payload.header.metadata_signature_len)
- VerifySignatureBlob(metadata_hash_file, metadata_signature)
-
+ cmd = ['delta_generator',
+ '--in_file=' + payload_file,
+ '--public_key=' + pubkey]
+ proc = common.Run(cmd, stdout=subprocess.PIPE)
+ stdoutdata, _ = proc.communicate()
+ assert proc.returncode == 0, \
+ 'Failed to verify payload with delta_generator: %s\n%s' % (package,
+ stdoutdata)
common.ZipClose(package_zip)
# Verified successfully upon reaching here.
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 07037f1..60e2e5c 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -109,9 +109,6 @@
Specifies the threshold that will be used to compute the maximum
allowed stash size (defaults to 0.8).
- --gen_verify
- Generate an OTA package that verifies the partitions.
-
--log_diff <file>
Generate a log file that shows the differences in the source and target
builds for an incremental package. This option is only meaningful when
@@ -172,7 +169,6 @@
# Stash size cannot exceed cache_size * threshold.
OPTIONS.cache_size = None
OPTIONS.stash_threshold = 0.8
-OPTIONS.gen_verify = False
OPTIONS.log_diff = None
OPTIONS.payload_signer = None
OPTIONS.payload_signer_args = []
@@ -936,78 +932,6 @@
WriteMetadata(metadata, output_zip)
-def WriteVerifyPackage(input_zip, output_zip):
- script = edify_generator.EdifyGenerator(3, OPTIONS.info_dict)
-
- oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
- recovery_mount_options = OPTIONS.info_dict.get(
- "recovery_mount_options")
- oem_dicts = None
- if oem_props:
- oem_dicts = _LoadOemDicts(script, recovery_mount_options)
-
- target_fp = CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
- OPTIONS.info_dict)
- metadata = {
- "post-build": target_fp,
- "pre-device": GetOemProperty("ro.product.device", oem_props,
- oem_dicts and oem_dicts[0],
- OPTIONS.info_dict),
- "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
- }
-
- device_specific = common.DeviceSpecificParams(
- input_zip=input_zip,
- input_version=OPTIONS.info_dict["recovery_api_version"],
- output_zip=output_zip,
- script=script,
- input_tmp=OPTIONS.input_tmp,
- metadata=metadata,
- info_dict=OPTIONS.info_dict)
-
- AppendAssertions(script, OPTIONS.info_dict, oem_dicts)
-
- script.Print("Verifying device images against %s..." % target_fp)
- script.AppendExtra("")
-
- script.Print("Verifying boot...")
- boot_img = common.GetBootableImage(
- "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
- boot_type, boot_device = common.GetTypeAndDevice(
- "/boot", OPTIONS.info_dict)
- script.Verify("%s:%s:%d:%s" % (
- boot_type, boot_device, boot_img.size, boot_img.sha1))
- script.AppendExtra("")
-
- script.Print("Verifying recovery...")
- recovery_img = common.GetBootableImage(
- "recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
- recovery_type, recovery_device = common.GetTypeAndDevice(
- "/recovery", OPTIONS.info_dict)
- script.Verify("%s:%s:%d:%s" % (
- recovery_type, recovery_device, recovery_img.size, recovery_img.sha1))
- script.AppendExtra("")
-
- system_tgt = GetImage("system", OPTIONS.input_tmp)
- system_tgt.ResetFileMap()
- system_diff = common.BlockDifference("system", system_tgt, src=None)
- system_diff.WriteStrictVerifyScript(script)
-
- if HasVendorPartition(input_zip):
- vendor_tgt = GetImage("vendor", OPTIONS.input_tmp)
- vendor_tgt.ResetFileMap()
- vendor_diff = common.BlockDifference("vendor", vendor_tgt, src=None)
- vendor_diff.WriteStrictVerifyScript(script)
-
- # Device specific partitions, such as radio, bootloader and etc.
- device_specific.VerifyOTA_Assertions()
-
- script.SetProgress(1.0)
- script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
- metadata["ota-required-cache"] = str(script.required_cache)
- WriteMetadata(metadata, output_zip)
-
-
def WriteABOTAPackageWithBrilloScript(target_file, output_file,
source_file=None):
"""Generate an Android OTA package that has A/B update payload."""
@@ -1320,8 +1244,6 @@
except ValueError:
raise ValueError("Cannot parse value %r for option %r - expecting "
"a float" % (a, o))
- elif o == "--gen_verify":
- OPTIONS.gen_verify = True
elif o == "--log_diff":
OPTIONS.log_diff = a
elif o == "--payload_signer":
@@ -1355,7 +1277,6 @@
"verify",
"no_fallback_to_full",
"stash_threshold=",
- "gen_verify",
"log_diff=",
"payload_signer=",
"payload_signer_args=",
@@ -1484,12 +1405,8 @@
print("--- can't determine the cache partition size ---")
OPTIONS.cache_size = cache_size
- # Generate a verify package.
- if OPTIONS.gen_verify:
- WriteVerifyPackage(input_zip, output_zip)
-
# Generate a full OTA.
- elif OPTIONS.incremental_source is None:
+ if OPTIONS.incremental_source is None:
WriteFullOTAPackage(input_zip, output_zip)
# Generate an incremental OTA. It will fall back to generate a full OTA on
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index f559b29..7bfc04b 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -278,7 +278,7 @@
if stat.S_ISLNK(info.external_attr >> 16):
new_data = data
else:
- new_data = RewriteProps(data, misc_info)
+ new_data = RewriteProps(data)
common.ZipWriteStr(output_tf_zip, out_info, new_data)
elif info.filename.endswith("mac_permissions.xml"):
@@ -385,8 +385,14 @@
def EditTags(tags):
- """Given a string containing comma-separated tags, apply the edits
- specified in OPTIONS.tag_changes and return the updated string."""
+ """Applies the edits to the tag string as specified in OPTIONS.tag_changes.
+
+ Args:
+ tags: The input string that contains comma-separated tags.
+
+ Returns:
+ The updated tags (comma-separated and sorted).
+ """
tags = set(tags.split(","))
for ch in OPTIONS.tag_changes:
if ch[0] == "-":
@@ -396,20 +402,27 @@
return ",".join(sorted(tags))
-def RewriteProps(data, misc_info):
+def RewriteProps(data):
+ """Rewrites the system properties in the given string.
+
+ Each property is expected in 'key=value' format. The properties that contain
+ build tags (i.e. test-keys, dev-keys) will be updated accordingly by calling
+ EditTags().
+
+ Args:
+ data: Input string, separated by newlines.
+
+ Returns:
+ The string with modified properties.
+ """
output = []
for line in data.split("\n"):
line = line.strip()
original_line = line
if line and line[0] != '#' and "=" in line:
key, value = line.split("=", 1)
- if (key in ("ro.build.fingerprint", "ro.vendor.build.fingerprint")
- and misc_info.get("oem_fingerprint_properties") is None):
- pieces = value.split("/")
- pieces[-1] = EditTags(pieces[-1])
- value = "/".join(pieces)
- elif (key in ("ro.build.thumbprint", "ro.vendor.build.thumbprint")
- and misc_info.get("oem_fingerprint_properties") is not None):
+ if key in ("ro.build.fingerprint", "ro.build.thumbprint",
+ "ro.vendor.build.fingerprint", "ro.vendor.build.thumbprint"):
pieces = value.split("/")
pieces[-1] = EditTags(pieces[-1])
value = "/".join(pieces)
diff --git a/tools/releasetools/test_build_image.py b/tools/releasetools/test_build_image.py
new file mode 100644
index 0000000..6566a5a
--- /dev/null
+++ b/tools/releasetools/test_build_image.py
@@ -0,0 +1,65 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import shutil
+import tempfile
+import unittest
+
+from build_image import CheckHeadroom, RunCommand
+
+
+class BuildImageTest(unittest.TestCase):
+
+ def test_CheckHeadroom_SizeUnderLimit(self):
+ ext4fs_output = ("Created filesystem with 2777/129024 inodes and "
+ "508140/516099 blocks")
+ prop_dict = {
+ 'partition_headroom' : '4194304',
+ 'mount_point' : 'system',
+ }
+ self.assertTrue(CheckHeadroom(ext4fs_output, prop_dict))
+
+ def test_CheckHeadroom_InsufficientHeadroom(self):
+ ext4fs_output = ("Created filesystem with 2777/129024 inodes and "
+ "515099/516099 blocks")
+ prop_dict = {
+ 'partition_headroom' : '4100096',
+ 'mount_point' : 'system',
+ }
+ self.assertFalse(CheckHeadroom(ext4fs_output, prop_dict))
+
+ def test_CheckHeadroom_WithMke2fsOutput(self):
+ """Tests the result parsing from actual call to mke2fs."""
+ input_dir = tempfile.mkdtemp()
+ output_image = tempfile.NamedTemporaryFile(suffix='.img')
+ command = ['mkuserimg_mke2fs.sh', input_dir, output_image.name, 'ext4',
+ '/system', '409600', '-j', '0']
+ ext4fs_output, exit_code = RunCommand(command)
+ self.assertEqual(0, exit_code)
+
+ prop_dict = {
+ 'partition_headroom' : '40960',
+ 'mount_point' : 'system',
+ }
+ self.assertTrue(CheckHeadroom(ext4fs_output, prop_dict))
+
+ prop_dict = {
+ 'partition_headroom' : '413696',
+ 'mount_point' : 'system',
+ }
+ self.assertFalse(CheckHeadroom(ext4fs_output, prop_dict))
+
+ shutil.rmtree(input_dir)
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
new file mode 100644
index 0000000..90afdc7
--- /dev/null
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -0,0 +1,67 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import print_function
+
+import unittest
+
+from sign_target_files_apks import EditTags, RewriteProps
+
+
+class SignTargetFilesApksTest(unittest.TestCase):
+
+ def test_EditTags(self):
+ self.assertEqual(EditTags('dev-keys'), ('release-keys'))
+ self.assertEqual(EditTags('test-keys'), ('release-keys'))
+
+ # Multiple tags.
+ self.assertEqual(EditTags('abc,dev-keys,xyz'), ('abc,release-keys,xyz'))
+
+ # Tags are sorted.
+ self.assertEqual(EditTags('xyz,abc,dev-keys,xyz'), ('abc,release-keys,xyz'))
+
+ def test_RewriteProps(self):
+ props = (
+ ('', '\n'),
+ ('ro.build.fingerprint=foo/bar/dev-keys',
+ 'ro.build.fingerprint=foo/bar/release-keys\n'),
+ ('ro.build.thumbprint=foo/bar/dev-keys',
+ 'ro.build.thumbprint=foo/bar/release-keys\n'),
+ ('ro.vendor.build.fingerprint=foo/bar/dev-keys',
+ 'ro.vendor.build.fingerprint=foo/bar/release-keys\n'),
+ ('ro.vendor.build.thumbprint=foo/bar/dev-keys',
+ 'ro.vendor.build.thumbprint=foo/bar/release-keys\n'),
+ ('# comment line 1', '# comment line 1\n'),
+ ('ro.bootimage.build.fingerprint=foo/bar/dev-keys',
+ 'ro.bootimage.build.fingerprint=foo/bar/release-keys\n'),
+ ('ro.build.description='
+ 'sailfish-user 8.0.0 OPR6.170623.012 4283428 dev-keys',
+ 'ro.build.description='
+ 'sailfish-user 8.0.0 OPR6.170623.012 4283428 release-keys\n'),
+ ('ro.build.tags=dev-keys', 'ro.build.tags=release-keys\n'),
+ ('# comment line 2', '# comment line 2\n'),
+ ('ro.build.display.id=OPR6.170623.012 dev-keys',
+ 'ro.build.display.id=OPR6.170623.012\n'),
+ ('# comment line 3', '# comment line 3\n'),
+ )
+
+ # Assert the case for each individual line.
+ for input, output in props:
+ self.assertEqual(RewriteProps(input), output)
+
+ # Concatenate all the input lines.
+ self.assertEqual(RewriteProps('\n'.join([prop[0] for prop in props])),
+ ''.join([prop[1] for prop in props]))