am dae9bb50: am 64f0f2ad: am f7cb5fc2: am 1c2ca593: am 289c9563: am 6be36e0c: am 7447c516: am 89a0e52e: am 01b35afe: am a9c86ef0: am f7bb2237: am f2ac0682: am 7eba7cc4: am fe3eeb8c: am c5ec654c: am c646bbfb: We shouldn\'t use temporary variable in the build recipe.
* commit 'dae9bb50a5e733221a23c8d7458165cdf34028fc':
We shouldn't use temporary variable in the build recipe.
diff --git a/CleanSpec.mk b/CleanSpec.mk
index a1dec2e..6e7c9b0 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -299,11 +299,15 @@
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
-# 5.0.1
+# API 22!
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
-# 5.0.2
+# 5.1!
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
diff --git a/core/Makefile b/core/Makefile
index d8d3044..15c7f02 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -200,6 +200,7 @@
echo "import /oem/oem.prop $(prop)" >> $@;)
endif
$(hide) TARGET_BUILD_TYPE="$(TARGET_BUILD_VARIANT)" \
+ TARGET_BUILD_FLAVOR="$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)" \
TARGET_DEVICE="$(TARGET_DEVICE)" \
PRODUCT_NAME="$(TARGET_PRODUCT)" \
PRODUCT_BRAND="$(PRODUCT_BRAND)" \
@@ -491,6 +492,11 @@
tmp_dir_for_image := $(call intermediates-dir-for,EXECUTABLES,boot_img)/bootimg
INTERNAL_BOOTIMAGE_ARGS += --tmpdir $(tmp_dir_for_image)
INTERNAL_BOOTIMAGE_ARGS += --genext2fs $(MKEXT2IMG)
+
+ifeq ($(TARGET_BOOTIMAGE_USE_EXTLINUX),true)
+INTERNAL_BOOTIMAGE_ARGS += --extlinuxconf $(TARGET_BOOTIMAGE_EXTLINUX_CONFIG)
+endif
+
$(INSTALLED_BOOTIMAGE_TARGET): $(MKEXT2IMG) $(INTERNAL_BOOTIMAGE_FILES)
$(call pretty,"Target boot image: $@")
$(hide) $(MKEXT2BOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) --output $@
@@ -505,14 +511,14 @@
$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER)
$(call pretty,"Target boot image: $@")
$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
- $(BOOT_SIGNER) /boot $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY) $@
+ $(BOOT_SIGNER) /boot $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $@
$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
.PHONY: bootimage-nodeps
bootimage-nodeps: $(MKBOOTIMG) $(BOOT_SIGNER)
@echo "make $@: ignoring dependencies"
$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
- $(BOOT_SIGNER) /boot $(INSTALLED_BOOTIMAGE_TARGET) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY) $(INSTALLED_BOOTIMAGE_TARGET)
+ $(BOOT_SIGNER) /boot $(INSTALLED_BOOTIMAGE_TARGET) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $(INSTALLED_BOOTIMAGE_TARGET)
$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
else # PRODUCT_SUPPORTS_VERITY != true
@@ -752,12 +758,8 @@
# If PRODUCT_AAPT_PREF_CONFIG includes a dpi bucket, then use that value.
recovery_density := $(filter %dpi,$(PRODUCT_AAPT_PREF_CONFIG))
else
-# Otherwise, use the highest density that appears in PRODUCT_AAPT_CONFIG.
-# Order is important here; we'll take the first one that's found.
-recovery_densities := $(filter $(PRODUCT_AAPT_CONFIG_SP),xxxhdpi xxhdpi xhdpi hdpi tvdpi mdpi ldpi)
-ifneq (,$(recovery_densities))
-recovery_density := $(word 1,$(recovery_densities))
-endif
+# Otherwise, use the default medium density.
+recovery_densities := mdpi
endif
ifneq (,$(wildcard $(recovery_resources_common)-$(recovery_density)))
@@ -867,7 +869,7 @@
$(hide) $(MKBOOTFS) $(TARGET_RECOVERY_ROOT_OUT) | $(MINIGZIP) > $(recovery_ramdisk)
$(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY))
- $(BOOT_SIGNER) /recovery $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY) $@
+ $(BOOT_SIGNER) /recovery $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $@
endif
$(hide) $(call assert-max-image-size,$@,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))
@echo ----- Made recovery image: $@ --------
@@ -1240,6 +1242,12 @@
endif # BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
# -----------------------------------------------------------------
+# bring in the installer image generation defines if necessary
+ifeq ($(TARGET_USE_DISKINSTALLER),true)
+include bootable/diskinstaller/config.mk
+endif
+
+# -----------------------------------------------------------------
# host tools needed to build dist and OTA packages
DISTTOOLS := $(HOST_OUT_EXECUTABLES)/minigzip \
@@ -1421,6 +1429,7 @@
$(hide) echo "use_set_metadata=1" >> $(zip_root)/META/misc_info.txt
$(hide) echo "multistage_support=1" >> $(zip_root)/META/misc_info.txt
$(hide) echo "update_rename_support=1" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "blockimgdiff_versions=1,2" >> $(zip_root)/META/misc_info.txt
ifneq ($(OEM_THUMBPRINT_PROPERTIES),)
# OTA scripts are only interested in fingerprint related properties
$(hide) echo "oem_fingerprint_properties=$(OEM_THUMBPRINT_PROPERTIES)" >> $(zip_root)/META/misc_info.txt
diff --git a/core/android_manifest.mk b/core/android_manifest.mk
index 21b95c2..c641b75 100644
--- a/core/android_manifest.mk
+++ b/core/android_manifest.mk
@@ -11,13 +11,27 @@
full_android_manifest := $(LOCAL_PATH)/$(LOCAL_MANIFEST_FILE)
endif
+my_full_libs_manifest_files := $(LOCAL_FULL_LIBS_MANIFEST_FILES)
+my_full_libs_manifest_deps := $(LOCAL_FULL_LIBS_MANIFEST_FILES)
+
+# Set up dependency on aar libraries
+ifdef LOCAL_STATIC_JAVA_AAR_LIBRARIES
+my_full_libs_manifest_deps += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
+ $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/classes.jar)
+my_full_libs_manifest_files += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
+ $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/AndroidManifest.xml)
+
+LOCAL_RESOURCE_DIR += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
+ $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/res)
+endif
+
# Set up rules to merge library manifest files
-ifdef LOCAL_FULL_LIBS_MANIFEST_FILES
+ifdef my_full_libs_manifest_files
main_android_manifest := $(full_android_manifest)
full_android_manifest := $(intermediates.COMMON)/AndroidManifest.xml
-$(full_android_manifest): PRIVATE_LIBS_MANIFESTS := $(LOCAL_FULL_LIBS_MANIFEST_FILES)
-$(full_android_manifest) : $(main_android_manifest) $(LOCAL_FULL_LIBS_MANIFEST_FILES)
- @echo "Merge android manifest files: $@ <-- $^"
+$(full_android_manifest): PRIVATE_LIBS_MANIFESTS := $(my_full_libs_manifest_files)
+$(full_android_manifest) : $(main_android_manifest) $(my_full_libs_manifest_deps)
+ @echo "Merge android manifest files: $@ <-- $< $(PRIVATE_LIBS_MANIFESTS)"
@mkdir -p $(dir $@)
$(hide) $(ANDROID_MANIFEST_MERGER) --main $< --libs $(PRIVATE_LIBS_MANIFESTS) \
--out $@
diff --git a/core/apicheck_msg_current.txt b/core/apicheck_msg_current.txt
index 9abd381..440e7f8 100644
--- a/core/apicheck_msg_current.txt
+++ b/core/apicheck_msg_current.txt
@@ -7,7 +7,7 @@
errors above.
2) You can update current.txt by executing the following command:
- make %UPDATE_API%
+ make update-api
To submit the revised current.txt to the main Android repository,
you will need approval.
diff --git a/core/binary.mk b/core/binary.mk
index d339317..1e313ff 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -184,9 +184,11 @@
my_compiler_dependencies :=
-####################################################
+##################################################################
## Add FDO flags if FDO is turned on and supported
-####################################################
+## Please note that we will do option filtering during FDO build.
+## i.e. Os->O2, remove -fno-early-inline and -finline-limit.
+##################################################################
ifeq ($(strip $(LOCAL_FDO_SUPPORT)), true)
ifeq ($(strip $(LOCAL_IS_HOST_MODULE)),)
my_cflags += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_CFLAGS)
@@ -923,6 +925,21 @@
my_ldflags := $(call $(LOCAL_2ND_ARCH_VAR_PREFIX)convert-to-$(my_host)clang-flags,$(my_ldflags))
endif
+ifeq ($(LOCAL_FDO_SUPPORT), true)
+ build_with_fdo := false
+ ifeq ($(BUILD_FDO_INSTRUMENT), true)
+ build_with_fdo := true
+ endif
+ ifeq ($(BUILD_FDO_OPTIMIZE), true)
+ build_with_fdo := true
+ endif
+ ifeq ($(build_with_fdo), true)
+ my_cflags := $(patsubst -Os,-O2,$(my_cflags))
+ fdo_incompatible_flags=-fno-early-inlining -finline-limit=%
+ my_cflags := $(filter-out $(fdo_incompatible_flags),$(my_cflags))
+ endif
+endif
+
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_YACCFLAGS := $(LOCAL_YACCFLAGS)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ASFLAGS := $(my_asflags)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CONLYFLAGS := $(LOCAL_CONLYFLAGS)
diff --git a/core/cleanbuild.mk b/core/cleanbuild.mk
index 1bada38..cf980d8 100644
--- a/core/cleanbuild.mk
+++ b/core/cleanbuild.mk
@@ -132,23 +132,11 @@
previous_build_config_file := $(PRODUCT_OUT)/previous_build_config.mk
-# TODO: this special case for the sdk is only necessary while "sdk"
-# is a valid make target. Eventually, it will just be a product, at
-# which point TARGET_PRODUCT will handle it and we can avoid this check
-# of MAKECMDGOALS. The "addprefix" is just to keep things pretty.
-ifneq ($(TARGET_PRODUCT),sdk)
- building_sdk := $(addprefix -,$(filter sdk,$(MAKECMDGOALS)))
-else
- # Don't bother with this extra part when explicitly building the sdk product.
- building_sdk :=
-endif
-
# A change in the list of aapt configs warrants an installclean, too.
aapt_config_list := $(strip $(PRODUCT_AAPT_CONFIG) $(PRODUCT_AAPT_PREF_CONFIG))
current_build_config := \
- $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)$(building_sdk)-{$(aapt_config_list)}
-building_sdk :=
+ $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)-{$(aapt_config_list)}
aapt_config_list :=
force_installclean := false
@@ -220,6 +208,7 @@
$(PRODUCT_OUT)/obj/JAVA_LIBRARIES \
$(PRODUCT_OUT)/obj/FAKE \
$(PRODUCT_OUT)/obj/EXECUTABLES/adbd_intermediates \
+ $(PRODUCT_OUT)/obj/STATIC_LIBRARIES/libfs_mgr_intermediates \
$(PRODUCT_OUT)/obj/EXECUTABLES/init_intermediates \
$(PRODUCT_OUT)/obj/ETC/mac_permissions.xml_intermediates \
$(PRODUCT_OUT)/obj/ETC/sepolicy_intermediates \
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 92b3fc4..bf81ebf 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -30,6 +30,7 @@
LOCAL_SRC_FILES:=
LOCAL_PREBUILT_OBJ_FILES:=
LOCAL_STATIC_JAVA_LIBRARIES:=
+LOCAL_STATIC_JAVA_AAR_LIBRARIES:=
LOCAL_STATIC_LIBRARIES:=
# Group static libraries with "-Wl,--start-group" and "-Wl,--end-group" when linking.
LOCAL_GROUP_STATIC_LIBRARIES:=
@@ -158,6 +159,8 @@
LOCAL_MODULE_UNSUPPORTED_TARGET_ARCH:=
LOCAL_MODULE_UNSUPPORTED_TARGET_ARCH_WARN:=
LOCAL_MODULE_HOST_ARCH:=
+LOCAL_DPI_VARIANTS:=
+LOCAL_DPI_FILE_STEM:=
# arch specific variables
LOCAL_SRC_FILES_$(TARGET_ARCH):=
diff --git a/core/combo/HOST_darwin-x86.mk b/core/combo/HOST_darwin-x86.mk
index 4a2bfe3..ec37993 100644
--- a/core/combo/HOST_darwin-x86.mk
+++ b/core/combo/HOST_darwin-x86.mk
@@ -37,8 +37,8 @@
ifneq (,$(strip $(wildcard $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)-gcc)))
$(combo_2nd_arch_prefix)HOST_CC := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)-gcc
$(combo_2nd_arch_prefix)HOST_CXX := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)-g++
-ifeq ($(mac_sdk_version),10.8)
-# Mac SDK 10.8 no longer has stdarg.h, etc
+ifneq ($(filter 10.8 10.9, $(mac_sdk_version)),)
+# Mac SDK 10.8+ no longer has stdarg.h, etc
host_toolchain_header := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_ROOT)/lib/gcc/i686-apple-darwin$(gcc_darwin_version)/4.2.1/include
$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -isystem $(host_toolchain_header)
endif
diff --git a/core/combo/HOST_darwin-x86_64.mk b/core/combo/HOST_darwin-x86_64.mk
index 0bc0227..a776a69 100644
--- a/core/combo/HOST_darwin-x86_64.mk
+++ b/core/combo/HOST_darwin-x86_64.mk
@@ -37,8 +37,8 @@
ifneq (,$(strip $(wildcard $(HOST_TOOLCHAIN_PREFIX)-gcc)))
HOST_CC := $(HOST_TOOLCHAIN_PREFIX)-gcc
HOST_CXX := $(HOST_TOOLCHAIN_PREFIX)-g++
-ifeq ($(mac_sdk_version),10.8)
-# Mac SDK 10.8 no longer has stdarg.h, etc
+ifneq ($(filter 10.8 10.9, $(mac_sdk_version)),)
+# Mac SDK 10.8+ no longer has stdarg.h, etc
host_toolchain_header := $(HOST_TOOLCHAIN_ROOT)/lib/gcc/i686-apple-darwin$(gcc_darwin_version)/4.2.1/include
HOST_GLOBAL_CFLAGS += -isystem $(host_toolchain_header)
endif
diff --git a/core/combo/HOST_windows-x86.mk b/core/combo/HOST_windows-x86.mk
index fdb72a7..00e1974 100644
--- a/core/combo/HOST_windows-x86.mk
+++ b/core/combo/HOST_windows-x86.mk
@@ -27,7 +27,7 @@
ifneq ($(strip $(USE_MINGW)),)
HOST_ACP_UNAVAILABLE := true
TOOLS_EXE_SUFFIX :=
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -DUSE_MINGW
+$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -DUSE_MINGW -DWIN32_LEAN_AND_MEAN
$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -Wno-unused-parameter
$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += --sysroot=prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32
$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -m32
diff --git a/core/combo/mac_version.mk b/core/combo/mac_version.mk
index b49feee..6defba7 100644
--- a/core/combo/mac_version.mk
+++ b/core/combo/mac_version.mk
@@ -9,7 +9,7 @@
build_mac_version := $(shell sw_vers -productVersion)
-mac_sdk_versions_supported := 10.6 10.7 10.8
+mac_sdk_versions_supported := 10.6 10.7 10.8 10.9
ifneq ($(strip $(MAC_SDK_VERSION)),)
mac_sdk_version := $(MAC_SDK_VERSION)
ifeq ($(filter $(mac_sdk_version),$(mac_sdk_versions_supported)),)
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index de995b7..cfbf221 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -16,7 +16,9 @@
DEX2OATD_DEPENDENCY := $(DEX2OAT_DEPENDENCY)
DEX2OATD_DEPENDENCY += $(DEX2OATD)
-PRELOADED_CLASSES := frameworks/base/preloaded-classes
+# Use the first preloaded-classes file in PRODUCT_COPY_FILES.
+PRELOADED_CLASSES := $(call word-colon,1,$(firstword \
+ $(filter %system/etc/preloaded-classes,$(PRODUCT_COPY_FILES))))
# Use the first compiled-classes file in PRODUCT_COPY_FILES.
COMPILED_CLASSES := $(call word-colon,1,$(firstword \
diff --git a/core/dpi_specific_apk.mk b/core/dpi_specific_apk.mk
new file mode 100644
index 0000000..0e8cbdf
--- /dev/null
+++ b/core/dpi_specific_apk.mk
@@ -0,0 +1,70 @@
+# Set up rules to build dpi-specific apk, with whatever else from the base apk.
+# Input variable: my_dpi, and all other variables set up in package_internal.mk.
+#
+
+dpi_apk_name := $(LOCAL_MODULE)_$(my_dpi)
+dpi_intermediate := $(call intermediates-dir-for,APPS,$(dpi_apk_name))
+built_dpi_apk := $(dpi_intermediate)/package.apk
+
+# Set up all the target-specific variables.
+$(built_dpi_apk): PRIVATE_MODULE := $(dpi_apk_name)
+$(built_dpi_apk): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) $(PRODUCT_AAPT_FLAGS) $($(LOCAL_PACKAGE_NAME)_aapt_flags_$(my_dpi))
+# Clear PRIVATE_PRODUCT_AAPT_CONFIG to include everything by default.
+$(built_dpi_apk): PRIVATE_PRODUCT_AAPT_CONFIG :=
+$(built_dpi_apk): PRIVATE_PRODUCT_AAPT_PREF_CONFIG := $(my_dpi)
+$(built_dpi_apk): PRIVATE_ANDROID_MANIFEST := $(full_android_manifest)
+$(built_dpi_apk): PRIVATE_RESOURCE_DIR := $(LOCAL_RESOURCE_DIR)
+$(built_dpi_apk): PRIVATE_ASSET_DIR := $(LOCAL_ASSET_DIR)
+$(built_dpi_apk): PRIVATE_AAPT_INCLUDES := $(all_library_res_package_exports)
+ifneq (,$(filter-out current system_current, $(LOCAL_SDK_VERSION)))
+$(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(LOCAL_SDK_VERSION)
+else
+$(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(DEFAULT_APP_TARGET_SDK)
+endif
+$(built_dpi_apk): PRIVATE_MANIFEST_PACKAGE_NAME := $(LOCAL_MANIFEST_PACKAGE_NAME)
+$(built_dpi_apk): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOCAL_INSTRUMENTATION_FOR)
+$(built_dpi_apk): PRIVATE_JNI_SHARED_LIBRARIES := $(jni_shared_libraries_with_abis)
+$(built_dpi_apk): PRIVATE_JNI_SHARED_LIBRARIES_ABI := $(jni_shared_libraries_abis)
+$(built_dpi_apk): PRIVATE_DEX_FILE := $(built_dex)
+# Note that PRIVATE_CLASS_INTERMEDIATES_DIR points to the base apk's intermediate dir.
+$(built_dpi_apk): PRIVATE_CLASS_INTERMEDIATES_DIR := $(intermediates)/classes
+$(built_dpi_apk): PRIVATE_EXTRA_JAR_ARGS := $(extra_jar_args)
+$(built_dpi_apk): PRIVATE_PRIVATE_KEY := $(private_key)
+$(built_dpi_apk): PRIVATE_CERTIFICATE := $(certificate)
+$(built_dpi_apk): PRIVATE_ADDITIONAL_CERTIFICATES := $(foreach c,\
+ $(LOCAL_ADDITIONAL_CERTIFICATES), $(c).x509.pem $(c).pk8)
+
+# Set up dependenncies and the build recipe.
+$(built_dpi_apk) : $(R_file_stamp)
+$(built_dpi_apk) : $(all_library_res_package_export_deps)
+$(built_dpi_apk) : $(built_dex)
+$(built_dpi_apk) : $(private_key) $(certificate) $(SIGNAPK_JAR)
+$(built_dpi_apk) : $(AAPT) | $(ZIPALIGN)
+$(built_dpi_apk) : $(all_res_assets) $(jni_shared_libraries) $(full_android_manifest)
+ @echo "target Package: $(PRIVATE_MODULE) ($@)"
+ $(create-empty-package)
+ $(add-assets-to-package)
+ifneq ($(jni_shared_libraries),)
+ $(add-jni-shared-libs-to-package)
+endif
+ifneq ($(full_classes_jar),)
+ $(add-dex-to-package)
+endif
+ $(add-carried-java-resources)
+ifneq ($(extra_jar_args),)
+ $(add-java-resources-to-package)
+endif
+ $(sign-package)
+ $(align-package)
+
+# Set up global variables to register this apk to the higher-level dependency graph.
+ALL_MODULES += $(dpi_apk_name)
+ALL_MODULES.$(dpi_apk_name).CLASS := APPS
+ALL_MODULES.$(dpi_apk_name).BUILT := $(built_dpi_apk)
+PACKAGES := $(PACKAGES) $(dpi_apk_name)
+PACKAGES.$(dpi_apk_name).PRIVATE_KEY := $(private_key)
+PACKAGES.$(dpi_apk_name).CERTIFICATE := $(certificate)
+
+# Phony targets used by "apps_only".
+.PHONY: $(dpi_apk_name)
+$(dpi_apk_name) : $(built_dpi_apk)
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index 3d6ad4a..08fb176 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -17,16 +17,13 @@
# know its results before base_rules.mk is included.
include $(BUILD_SYSTEM)/configure_module_stem.mk
-# base_rules.make defines $(intermediates), but we need its value
-# before we include base_rules. Make a guess, and verify that
-# it's correct once the real value is defined.
-guessed_intermediates := $(call local-intermediates-dir,,$(LOCAL_2ND_ARCH_VAR_PREFIX))
+intermediates := $(call local-intermediates-dir,,$(LOCAL_2ND_ARCH_VAR_PREFIX))
# Define the target that is the unmodified output of the linker.
# The basename of this target must be the same as the final output
# binary name, because it's used to set the "soname" in the binary.
# The includer of this file will define a rule to build this target.
-linked_module := $(guessed_intermediates)/LINKED/$(my_built_module_stem)
+linked_module := $(intermediates)/LINKED/$(my_built_module_stem)
ALL_ORIGINAL_DYNAMIC_BINARIES += $(linked_module)
@@ -41,11 +38,6 @@
include $(BUILD_SYSTEM)/binary.mk
###################################
-# Make sure that our guess at the value of intermediates was correct.
-ifneq ($(intermediates),$(guessed_intermediates))
-$(error Internal error: guessed path '$(guessed_intermediates)' doesn't match '$(intermediates))
-endif
-
###########################################################
## Compress
###########################################################
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index 944420b..c28bb15 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -100,9 +100,15 @@
my_prebuilt_jni_libs := $(addprefix $(LOCAL_PATH)/, \
$(filter-out @%, $(my_prebuilt_jni_libs)))
ifdef my_prebuilt_jni_libs
+ifdef my_embed_jni
+# Embed my_prebuilt_jni_libs to the apk
+my_jni_shared_libraries += $(my_prebuilt_jni_libs)
+else # not my_embed_jni
+# Install my_prebuilt_jni_libs as separate files.
$(foreach lib, $(my_prebuilt_jni_libs), \
$(eval $(call copy-one-file, $(lib), $(my_app_lib_path)/$(notdir $(lib)))))
$(LOCAL_INSTALLED_MODULE) : | $(addprefix $(my_app_lib_path)/, $(notdir $(my_prebuilt_jni_libs)))
+endif # my_embed_jni
endif # inner my_prebuilt_jni_libs
endif # outer my_prebuilt_jni_libs
diff --git a/core/java.mk b/core/java.mk
index debdf53..a969254 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -52,6 +52,9 @@
endif
endif
+# LOCAL_STATIC_JAVA_AAR_LIBRARIES are special LOCAL_STATIC_JAVA_LIBRARIES
+LOCAL_STATIC_JAVA_LIBRARIES := $(strip $(LOCAL_STATIC_JAVA_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES))
+
LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
LOCAL_BUILT_MODULE_STEM := $(strip $(LOCAL_BUILT_MODULE_STEM))
diff --git a/core/package_internal.mk b/core/package_internal.mk
index bb458d4..a6ad354 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -123,7 +123,8 @@
all_res_assets := $(strip $(all_assets) $(all_resources))
-package_expected_intermediates_COMMON := $(call local-intermediates-dir,COMMON)
+intermediates.COMMON := $(call local-intermediates-dir,COMMON)
+
# If no assets or resources were found, clear the directory variables so
# we don't try to build them.
ifneq (true,$(need_compile_asset))
@@ -136,7 +137,7 @@
# Make sure that R_file_stamp inherits the proper PRIVATE vars.
# If R.stamp moves, be sure to update the framework makefile,
# which has intimate knowledge of its location.
-R_file_stamp := $(package_expected_intermediates_COMMON)/src/R.stamp
+R_file_stamp := $(intermediates.COMMON)/src/R.stamp
LOCAL_INTERMEDIATE_TARGETS += $(R_file_stamp)
endif
@@ -156,7 +157,7 @@
proguard_options_file :=
ifneq ($(LOCAL_PROGUARD_ENABLED),custom)
ifeq ($(need_compile_res),true)
- proguard_options_file := $(package_expected_intermediates_COMMON)/proguard_options
+ proguard_options_file := $(intermediates.COMMON)/proguard_options
endif # need_compile_res
endif # !custom
LOCAL_PROGUARD_FLAGS := $(addprefix -include ,$(proguard_options_file)) $(LOCAL_PROGUARD_FLAGS)
@@ -192,6 +193,8 @@
rs_compatibility_jni_libs :=
+include $(BUILD_SYSTEM)/android_manifest.mk
+
#################################
include $(BUILD_SYSTEM)/java.mk
#################################
@@ -201,8 +204,6 @@
LOCAL_SDK_RES_VERSION:=$(LOCAL_SDK_VERSION)
endif
-include $(BUILD_SYSTEM)/android_manifest.mk
-
$(LOCAL_INTERMEDIATE_TARGETS): \
PRIVATE_ANDROID_MANIFEST := $(full_android_manifest)
ifneq (,$(filter-out current system_current, $(LOCAL_SDK_VERSION)))
@@ -223,11 +224,6 @@
# At the same time, this will copy the R.java file to a central
# 'R' directory to make it easier to add the files to an IDE.
#
-#TODO: use PRIVATE_SOURCE_INTERMEDIATES_DIR instead of
-# $(intermediates.COMMON)/src
-ifneq ($(package_expected_intermediates_COMMON),$(intermediates.COMMON))
- $(error $(LOCAL_MODULE): internal error: expected intermediates.COMMON "$(package_expected_intermediates_COMMON)" != intermediates.COMMON "$(intermediates.COMMON)")
-endif
$(R_file_stamp): PRIVATE_RESOURCE_PUBLICS_OUTPUT := \
$(intermediates.COMMON)/public_resources.xml
@@ -407,6 +403,16 @@
$(align-package)
###############################
+## Build dpi-specific apks, if it's apps_only build.
+ifdef TARGET_BUILD_APPS
+ifdef LOCAL_DPI_VARIANTS
+$(foreach d, $(LOCAL_DPI_VARIANTS), \
+ $(eval my_dpi := $(d)) \
+ $(eval include $(BUILD_SYSTEM)/dpi_specific_apk.mk))
+endif
+endif
+
+###############################
## Rule to build the odex file
ifdef LOCAL_DEX_PREOPT
$(built_odex): PRIVATE_DEX_FILE := $(built_dex)
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 284884c..bc6088f 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -114,6 +114,19 @@
ifeq ($(LOCAL_MODULE_CLASS),APPS)
PACKAGES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
+# Select dpi-specific source
+ifdef LOCAL_DPI_VARIANTS
+my_dpi := $(firstword $(filter $(LOCAL_DPI_VARIANTS),$(PRODUCT_AAPT_PREF_CONFIG) $(PRODUCT_AAPT_PREBUILT_DPI)))
+ifdef my_dpi
+ifdef LOCAL_DPI_FILE_STEM
+my_prebuilt_dpi_file_stem := $(LOCAL_DPI_FILE_STEM)
+else
+my_prebuilt_dpi_file_stem := $(LOCAL_MODULE)_%.apk
+endif
+my_prebuilt_src_file := $(dir $(my_prebuilt_src_file))$(subst %,$(my_dpi),$(my_prebuilt_dpi_file_stem))
+endif # my_dpi
+endif # LOCAL_DPI_VARIANTS
+
rs_compatibility_jni_libs :=
include $(BUILD_SYSTEM)/install_jni_libs.mk
@@ -247,10 +260,26 @@
# while the deps should be in the common dir, so we make a copy in the common dir.
# For nonstatic library, $(common_javalib_jar) is the dependency file,
# while $(common_classes_jar) is used to link.
-common_classes_jar := $(call intermediates-dir-for,JAVA_LIBRARIES,$(LOCAL_MODULE),,COMMON)/classes.jar
-common_javalib_jar := $(dir $(common_classes_jar))javalib.jar
+common_classes_jar := $(intermediates.COMMON)/classes.jar
+common_javalib_jar := $(intermediates.COMMON)/javalib.jar
-$(common_classes_jar) : $(my_prebuilt_src_file) | $(ACP)
+$(common_classes_jar) $(common_javalib_jar): PRIVATE_MODULE := $(LOCAL_MODULE)
+
+ifneq ($(filter %.aar, $(my_prebuilt_src_file)),)
+# This is .aar file, archive of classes.jar and Android resources.
+my_src_jar := $(intermediates.COMMON)/aar/classes.jar
+
+$(my_src_jar) : $(my_prebuilt_src_file)
+ $(hide) rm -rf $(dir $@) && mkdir -p $(dir $@)
+ $(hide) unzip -qo -d $(dir $@) $<
+ # Make sure the extracted classes.jar has a new timestamp.
+ $(hide) touch $@
+
+else
+# This is jar file.
+my_src_jar := $(my_prebuilt_src_file)
+endif
+$(common_classes_jar) : $(my_src_jar) | $(ACP)
$(transform-prebuilt-to-target)
$(common_javalib_jar) : $(common_classes_jar) | $(ACP)
diff --git a/core/product.mk b/core/product.mk
index ed906cb..f0fc921 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -70,6 +70,7 @@
PRODUCT_LOCALES \
PRODUCT_AAPT_CONFIG \
PRODUCT_AAPT_PREF_CONFIG \
+ PRODUCT_AAPT_PREBUILT_DPI \
PRODUCT_PACKAGES \
PRODUCT_PACKAGES_DEBUG \
PRODUCT_PACKAGES_ENG \
diff --git a/core/product_config.mk b/core/product_config.mk
index d4ba364..e7fa75e 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -304,19 +304,9 @@
endif
# Add PRODUCT_LOCALES to PRODUCT_AAPT_CONFIG
-PRODUCT_AAPT_CONFIG := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_AAPT_CONFIG))
-PRODUCT_AAPT_CONFIG := $(PRODUCT_LOCALES) $(PRODUCT_AAPT_CONFIG)
+PRODUCT_AAPT_CONFIG := $(strip $(PRODUCT_LOCALES) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_AAPT_CONFIG))
PRODUCT_AAPT_PREF_CONFIG := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_AAPT_PREF_CONFIG))
-
-# Default to medium-density assets.
-# (Can be overridden in the device config, e.g.: PRODUCT_AAPT_CONFIG += hdpi)
-PRODUCT_AAPT_CONFIG := $(strip \
- $(PRODUCT_AAPT_CONFIG) \
- $(if $(filter %dpi,$(PRODUCT_AAPT_CONFIG)),,mdpi))
-PRODUCT_AAPT_PREF_CONFIG := $(strip $(PRODUCT_AAPT_PREF_CONFIG))
-
-# Everyone gets nodpi and anydpi assets which are density-independent.
-PRODUCT_AAPT_CONFIG += nodpi anydpi
+PRODUCT_AAPT_PREBUILT_DPI := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_AAPT_PREBUILT_DPI))
# Keep a copy of the space-separated config
PRODUCT_AAPT_CONFIG_SP := $(PRODUCT_AAPT_CONFIG)
@@ -324,8 +314,6 @@
# Convert spaces to commas.
PRODUCT_AAPT_CONFIG := \
$(subst $(space),$(comma),$(strip $(PRODUCT_AAPT_CONFIG)))
-PRODUCT_AAPT_PREF_CONFIG := \
- $(subst $(space),$(comma),$(strip $(PRODUCT_AAPT_PREF_CONFIG)))
# product-scoped aapt flags
PRODUCT_AAPT_FLAGS :=
diff --git a/core/tasks/apicheck.mk b/core/tasks/apicheck.mk
index fc98f5b..683a075 100644
--- a/core/tasks/apicheck.mk
+++ b/core/tasks/apicheck.mk
@@ -31,6 +31,11 @@
)\
))
+.PHONY: check-public-api
+checkapi : check-public-api
+
+.PHONY: update-api
+
# INTERNAL_PLATFORM_API_FILE is the one build by droiddoc.
# Note that since INTERNAL_PLATFORM_API_FILE is the byproduct of api-stubs module,
# (See frameworks/base/Android.mk)
@@ -39,7 +44,7 @@
# Check that the API we're building hasn't broken the last-released
# SDK version.
$(eval $(call check-api, \
- checkapi-last, \
+ checkpublicapi-last, \
$(SRC_API_DIR)/$(last_released_sdk_version).txt, \
$(INTERNAL_PLATFORM_API_FILE), \
frameworks/base/api/removed.txt, \
@@ -48,14 +53,14 @@
-error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
-error 16 -error 17 -error 18 , \
cat $(BUILD_SYSTEM)/apicheck_msg_last.txt, \
- checkapi, \
+ check-public-api, \
$(call doc-timestamp-for,api-stubs) \
))
# Check that the API we're building hasn't changed from the not-yet-released
# SDK version.
$(eval $(call check-api, \
- checkapi-current, \
+ checkpublicapi-current, \
frameworks/base/api/current.txt, \
$(INTERNAL_PLATFORM_API_FILE), \
frameworks/base/api/removed.txt, \
@@ -64,21 +69,23 @@
-error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
-error 16 -error 17 -error 18 -error 19 -error 20 -error 21 -error 23 -error 24 \
-error 25 -error 26 -error 27, \
- sed -e 's/%UPDATE_API%/update-api/g' $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
- checkapi, \
+ cat $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
+ check-public-api, \
$(call doc-timestamp-for,api-stubs) \
))
-.PHONY: update-api
-update-api: $(INTERNAL_PLATFORM_API_FILE) | $(ACP)
+.PHONY: update-public-api
+update-public-api: $(INTERNAL_PLATFORM_API_FILE) | $(ACP)
@echo Copying current.txt
$(hide) $(ACP) $(INTERNAL_PLATFORM_API_FILE) frameworks/base/api/current.txt
@echo Copying removed.txt
$(hide) $(ACP) $(INTERNAL_PLATFORM_REMOVED_API_FILE) frameworks/base/api/removed.txt
+update-api : update-public-api
#####################Check System API#####################
-.PHONY: checksystemapi
+.PHONY: check-system-api
+checkapi : check-system-api
# Check that the System API we're building hasn't broken the last-released
# SDK version.
@@ -92,7 +99,7 @@
-error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
-error 16 -error 17 -error 18 , \
cat $(BUILD_SYSTEM)/apicheck_msg_last.txt, \
- checksystemapi, \
+ check-system-api, \
$(call doc-timestamp-for,system-api-stubs) \
))
@@ -108,12 +115,14 @@
-error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
-error 16 -error 17 -error 18 -error 19 -error 20 -error 21 -error 23 -error 24 \
-error 25 -error 26 -error 27, \
- sed -e 's/%UPDATE_API%/update-system-api/g' $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
- checksystemapi, \
+ cat $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
+ check-system-api, \
$(call doc-timestamp-for,system-api-stubs) \
))
.PHONY: update-system-api
+update-api : update-system-api
+
update-system-api: $(INTERNAL_PLATFORM_SYSTEM_API_FILE) | $(ACP)
@echo Copying system-current.txt
$(hide) $(ACP) $(INTERNAL_PLATFORM_SYSTEM_API_FILE) frameworks/base/api/system-current.txt
diff --git a/core/tasks/factory_bundle.mk b/core/tasks/factory_bundle.mk
index 054a52a..51531cb 100644
--- a/core/tasks/factory_bundle.mk
+++ b/core/tasks/factory_bundle.mk
@@ -46,7 +46,7 @@
$(eval _fb_m_name := $(word 1,$(_fb_m_tuple))) \
$(eval _fb_dests := $(wordlist 2,999,$(_fb_m_tuple))) \
$(eval _fb_m_built := $(filter $(HOST_OUT)/%, $(ALL_MODULES.$(_fb_m_name).BUILT))) \
- $(if $(_fb_m_built),,$(error no built file in requested_modules for '$(_fb_m_built)'))\
+ $(if $(_fb_m_built),,$(warning no built file in requested_modules for '$(_fb_m_built)'))\
$(foreach _fb_f,$(_fb_dests),$(eval $(call copy-one-file,$(_fb_m_built),$(root_dir)/$(_fb_f))))\
$(addprefix $(root_dir)/,$(_fb_dests)) \
)) \
@@ -82,4 +82,3 @@
endif # TARGET_BUILD_PDK
endif # ONE_SHOT_MAKEFILE
-
diff --git a/core/tasks/factory_ramdisk.mk b/core/tasks/factory_ramdisk.mk
index 00fcdde..d65d931 100644
--- a/core/tasks/factory_ramdisk.mk
+++ b/core/tasks/factory_ramdisk.mk
@@ -44,7 +44,7 @@
$(eval _fulldest := $(TARGET_FACTORY_RAMDISK_OUT)/$(1)) \
$(eval $(call copy-one-file,$(_iofrm_src),$(_fulldest))) \
$(eval INTERNAL_FACTORY_RAMDISK_EXTRA_MODULES_FILES += $(_fulldest)), \
- $(error Error: Cannot find match in "$(2)" for "$(1)") \
+ $(warning Warning: Cannot find built file in "$(2)" for "$(1)") \
)
endef
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index abfa072..d87a032 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -41,7 +41,7 @@
# which is the version that we reveal to the end user.
# Update this value when the platform version changes (rather
# than overriding it somewhere else). Can be an arbitrary string.
- PLATFORM_VERSION := 5.0.2
+ PLATFORM_VERSION := 5.1
endif
ifeq "" "$(PLATFORM_SDK_VERSION)"
@@ -53,7 +53,7 @@
# intermediate builds). During development, this number remains at the
# SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds
# the code-name of the new development work.
- PLATFORM_SDK_VERSION := 21
+ PLATFORM_SDK_VERSION := 22
endif
ifeq "" "$(PLATFORM_VERSION_CODENAME)"
diff --git a/envsetup.sh b/envsetup.sh
index a9bd707..d80e95c 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -570,7 +570,8 @@
{
local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|armv5|arm64|x86_64|mips64)$' | xargs)"
local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
- local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|armv5|arm64|x86_64|mips64)$' | xargs)"
+ local density="$(echo $* | xargs -n 1 echo | \grep -E '^(ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
+ local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|armv5|arm64|x86_64|mips64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
if [ $(echo $arch | wc -w) -gt 1 ]; then
echo "tapas: Error: Multiple build archs supplied: $arch"
@@ -580,6 +581,10 @@
echo "tapas: Error: Multiple build variants supplied: $variant"
return
fi
+ if [ $(echo $density | wc -w) -gt 1 ]; then
+ echo "tapas: Error: Multiple densities supplied: $density"
+ return
+ fi
local product=full
case $arch in
@@ -596,9 +601,13 @@
if [ -z "$apps" ]; then
apps=all
fi
+ if [ -z "$density" ]; then
+ density=alldpi
+ fi
export TARGET_PRODUCT=$product
export TARGET_BUILD_VARIANT=$variant
+ export TARGET_BUILD_DENSITY=$density
export TARGET_BUILD_TYPE=release
export TARGET_BUILD_APPS=$apps
@@ -884,6 +893,85 @@
fi
}
+# coredump_setup - enable core dumps globally for any process
+# that has the core-file-size limit set correctly
+#
+# NOTE: You must call also coredump_enable for a specific process
+# if its core-file-size limit is not set already.
+# NOTE: Core dumps are written to ramdisk; they will not survive a reboot!
+
+function coredump_setup()
+{
+ echo "Getting root...";
+ adb root;
+ adb wait-for-device;
+
+ echo "Remounting root parition read-write...";
+ adb shell mount -w -o remount -t rootfs rootfs;
+ sleep 1;
+ adb wait-for-device;
+ adb shell mkdir -p /cores;
+ adb shell mount -t tmpfs tmpfs /cores;
+ adb shell chmod 0777 /cores;
+
+ echo "Granting SELinux permission to dump in /cores...";
+ adb shell restorecon -R /cores;
+
+ echo "Set core pattern.";
+ adb shell 'echo /cores/core.%p > /proc/sys/kernel/core_pattern';
+
+ echo "Done."
+}
+
+# coredump_enable - enable core dumps for the specified process
+# $1 = PID of process (e.g., $(pid mediaserver))
+#
+# NOTE: coredump_setup must have been called as well for a core
+# dump to actually be generated.
+
+function coredump_enable()
+{
+ local PID=$1;
+ if [ -z "$PID" ]; then
+ printf "Expecting a PID!\n";
+ return;
+ fi;
+ echo "Setting core limit for $PID to infinite...";
+ adb shell prlimit $PID 4 -1 -1
+}
+
+# core - send SIGV and pull the core for process
+# $1 = PID of process (e.g., $(pid mediaserver))
+#
+# NOTE: coredump_setup must be called once per boot for core dumps to be
+# enabled globally.
+
+function core()
+{
+ local PID=$1;
+
+ if [ -z "$PID" ]; then
+ printf "Expecting a PID!\n";
+ return;
+ fi;
+
+ local CORENAME=core.$PID;
+ local COREPATH=/cores/$CORENAME;
+ local SIG=SEGV;
+
+ coredump_enable $1;
+
+ local done=0;
+ while [ $(adb shell "[ -d /proc/$PID ] && echo -n yes") ]; do
+ printf "\tSending SIG%s to %d...\n" $SIG $PID;
+ adb shell kill -$SIG $PID;
+ sleep 1;
+ done;
+
+ adb shell "while [ ! -f $COREPATH ] ; do echo waiting for $COREPATH to be generated; sleep 1; done"
+ echo "Done: core is under $COREPATH on device.";
+}
+
# systemstack - dump the current stack trace of all threads in the system process
# to the usual ANR traces file
function systemstack()
@@ -967,10 +1055,151 @@
fi
}
+function adb_get_product_device() {
+ echo `adb shell getprop ro.product.device | sed s/.$//`
+}
+
+# returns 0 when process is not traced
+function adb_get_traced_by() {
+ echo `adb shell cat /proc/$1/status | grep -e "^TracerPid:" | sed "s/^TracerPid:\t//" | sed s/.$//`
+}
+
+function gdbclient() {
+ # TODO:
+ # 1. Check for ANDROID_SERIAL/multiple devices
+ local PROCESS_NAME="n/a"
+ local PID=$1
+ local PORT=5039
+ if [ -z "$PID" ]; then
+ echo "Usage: gdbclient <pid|processname> [port number]"
+ return -1
+ fi
+ local DEVICE=$(adb_get_product_device)
+
+ if [ -z "$DEVICE" ]; then
+ echo "Error: Unable to get device name. Please check if device is connected and ANDROID_SERIAL is set."
+ return -2
+ fi
+
+ if [ -n "$2" ]; then
+ PORT=$2
+ fi
+
+ local ROOT=$(gettop)
+ if [ -z "$ROOT" ]; then
+ # This is for the situation with downloaded symbols (from the build server)
+ # we check if they are available.
+ ROOT=`realpath .`
+ fi
+
+ local OUT_ROOT="$ROOT/out/target/product/$DEVICE"
+ local SYMBOLS_DIR="$OUT_ROOT/symbols"
+
+ if [ ! -d $SYMBOLS_DIR ]; then
+ echo "Error: couldn't find symbols: $SYMBOLS_DIR does not exist or is not a directory."
+ return -3
+ fi
+
+ # let's figure out which executable we are about to debug
+
+ # check if user specified a name -> resolve to pid
+ if [[ ! "$PID" =~ ^[0-9]+$ ]] ; then
+ PROCESS_NAME=$PID
+ PID=$(pid --exact $PROCESS_NAME)
+ if [ -z "$PID" ]; then
+ echo "Error: couldn't resolve pid by process name: $PROCESS_NAME"
+ return -4
+ fi
+ fi
+
+ local EXE=`adb shell readlink /proc/$PID/exe | sed s/.$//`
+ # TODO: print error in case there is no such pid
+ local LOCAL_EXE_PATH=$SYMBOLS_DIR$EXE
+
+ if [ ! -f $LOCAL_EXE_PATH ]; then
+ echo "Error: unable to find symbols for executable $EXE: file $LOCAL_EXE_PATH does not exist"
+ return -5
+ fi
+
+ local USE64BIT=""
+
+ if [[ "$(file $LOCAL_EXE_PATH)" =~ 64-bit ]]; then
+ USE64BIT="64"
+ fi
+
+ local GDB=
+ local GDB64=
+ local CPU_ABI=`adb shell getprop ro.product.cpu.abilist | sed s/.$//`
+ # TODO: we assume these are available via $PATH
+ if [[ $CPU_ABI =~ (^|,)arm64 ]]; then
+ GDB=arm-linux-androideabi-gdb
+ GDB64=aarch64-linux-android-gdb
+ elif [[ $CPU_ABI =~ (^|,)arm ]]; then
+ GDB=arm-linux-androideabi-gdb
+ elif [[ $CPU_ABI =~ (^|,)x86_64 ]]; then
+ GDB=x86_64-linux-androideabi-gdb
+ elif [[ $CPU_ABI =~ (^|,)x86 ]]; then
+ GDB=x86_64-linux-androideabi-gdb
+ elif [[ $CPU_ABI =~ (^|,)mips64 ]]; then
+ GDB=mipsel-linux-android-gdb
+ GDB64=mips64el-linux-android-gdb
+ elif [[ $CPU_ABI =~ (^|,)mips ]]; then
+ GDB=mipsel-linux-android-gdb
+ else
+ echo "Error: unrecognized cpu.abilist: $CPU_ABI"
+ return -6
+ fi
+
+ # TODO: check if tracing process is gdbserver and not some random strace...
+ if [ $(adb_get_traced_by $PID) -eq 0 ]; then
+ # start gdbserver
+ echo "Starting gdbserver..."
+ # TODO: check if adb is already listening $PORT
+ # to avoid unnecessary calls
+ echo ". adb forward for port=$PORT..."
+ adb forward tcp:$PORT tcp:$PORT
+ echo ". starting gdbserver to attach to pid=$PID..."
+ adb shell gdbserver$USE64BIT :$PORT --attach $PID &
+ echo ". give it couple of seconds to start..."
+ sleep 2
+ echo ". done"
+ else
+ echo "It looks like gdbserver is already attached to $PID (process is traced), trying to connect to it using local port=$PORT"
+ fi
+
+ local OUT_SO_SYMBOLS=$SYMBOLS_DIR/system/lib$USE64BIT
+ local OUT_VENDOR_SO_SYMBOLS=$SYMBOLS_DIR/vendor/lib$USE64BIT
+ local ART_CMD=""
+
+ echo >|"$OUT_ROOT/gdbclient.cmds" "set solib-absolute-prefix $SYMBOLS_DIR"
+ echo >>"$OUT_ROOT/gdbclient.cmds" "set solib-search-path $OUT_SO_SYMBOLS:$OUT_SO_SYMBOLS/hw:$OUT_SO_SYMBOLS/ssl/engines:$OUT_SO_SYMBOLS/drm:$OUT_SO_SYMBOLS/egl:$OUT_SO_SYMBOLS/soundfx:$OUT_VENDOR_SO_SYMBOLS:$OUT_VENDOR_SO_SYMBOLS/hw:$OUT_VENDOR_SO_SYMBOLS/egl"
+ local DALVIK_GDB_SCRIPT=$ROOT/development/scripts/gdb/dalvik.gdb
+ if [ -f $DALVIK_GDB_SCRIPT ]; then
+ echo >>"$OUT_ROOT/gdbclient.cmds" "source $DALVIK_GDB_SCRIPT"
+ ART_CMD="art-on"
+ else
+ echo "Warning: couldn't find $DALVIK_GDB_SCRIPT - ART debugging options will not be available"
+ fi
+ echo >>"$OUT_ROOT/gdbclient.cmds" "target remote :$PORT"
+ if [[ $EXE =~ (^|/)(app_process|dalvikvm)(|32|64)$ ]]; then
+ echo >> "$OUT_ROOT/gdbclient.cmds" $ART_CMD
+ fi
+
+ echo >>"$OUT_ROOT/gdbclient.cmds" ""
+
+ local WHICH_GDB=$GDB
+
+ if [ -n "$USE64BIT" -a -n "$GDB64" ]; then
+ WHICH_GDB=$GDB64
+ fi
+
+ gdbwrapper $WHICH_GDB "$OUT_ROOT/gdbclient.cmds" "$LOCAL_EXE_PATH"
+}
+
# gdbclient now determines whether the user wants to debug a 32-bit or 64-bit
# executable, set up the approriate gdbserver, then invokes the proper host
# gdb.
-function gdbclient()
+function gdbclient_old()
{
local OUT_ROOT=$(get_abs_build_var PRODUCT_OUT)
local OUT_SYMBOLS=$(get_abs_build_var TARGET_OUT_UNSTRIPPED)
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 818f857..8afd5a8 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -76,7 +76,7 @@
USE_OPENGL_RENDERER := true
TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 845427200
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 943718400
BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
diff --git a/target/board/generic_mips/BoardConfig.mk b/target/board/generic_mips/BoardConfig.mk
index a319ad7..7cef968 100644
--- a/target/board/generic_mips/BoardConfig.mk
+++ b/target/board/generic_mips/BoardConfig.mk
@@ -50,7 +50,7 @@
USE_OPENGL_RENDERER := true
TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 786432000
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 838860800
BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index c4fd958..295ee2b 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -41,7 +41,7 @@
USE_OPENGL_RENDERER := true
TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 943718400 # 900MB
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1073741824 # 1GB
BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
diff --git a/target/product/base.mk b/target/product/base.mk
index 28877c5..0d052b5 100644
--- a/target/product/base.mk
+++ b/target/product/base.mk
@@ -115,6 +115,9 @@
wm
+PRODUCT_COPY_FILES := $(call add-to-product-copy-files-if-exists,\
+ frameworks/base/preloaded-classes:system/etc/preloaded-classes)
+
# Note: it is acceptable to not have a compiled-classes file. In that case, all boot classpath
# classes will be compiled.
PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
diff --git a/target/product/full_base.mk b/target/product/full_base.mk
index 9b1a826..caace76 100644
--- a/target/product/full_base.mk
+++ b/target/product/full_base.mk
@@ -48,7 +48,7 @@
PRODUCT_LOCALES := en_US
# Include drawables for all densities
-PRODUCT_AAPT_CONFIG := normal hdpi xhdpi xxhdpi
+PRODUCT_AAPT_CONFIG := normal
# Get some sounds
$(call inherit-product-if-exists, frameworks/base/data/sounds/AllAudio.mk)
diff --git a/target/product/generic_no_telephony.mk b/target/product/generic_no_telephony.mk
index 0713db1..7af62ce 100644
--- a/target/product/generic_no_telephony.mk
+++ b/target/product/generic_no_telephony.mk
@@ -63,6 +63,7 @@
$(call inherit-product-if-exists, external/google-fonts/carrois-gothic-sc/fonts.mk)
$(call inherit-product-if-exists, external/google-fonts/coming-soon/fonts.mk)
$(call inherit-product-if-exists, external/google-fonts/cutive-mono/fonts.mk)
+$(call inherit-product-if-exists, external/lohit-fonts/fonts.mk)
$(call inherit-product-if-exists, external/noto-fonts/fonts.mk)
$(call inherit-product-if-exists, external/naver-fonts/fonts.mk)
$(call inherit-product-if-exists, frameworks/base/data/keyboards/keyboards.mk)
diff --git a/target/product/languages_full.mk b/target/product/languages_full.mk
index 030777e..6d77cde 100644
--- a/target/product/languages_full.mk
+++ b/target/product/languages_full.mk
@@ -21,4 +21,4 @@
# These are all the locales that have translations and are displayable
# by TextView in this branch.
-PRODUCT_LOCALES := en_AU en_US en_IN fr_FR it_IT es_ES et_EE de_DE nl_NL cs_CZ pl_PL ja_JP zh_TW zh_CN zh_HK ru_RU ko_KR nb_NO es_US da_DK el_GR tr_TR pt_PT pt_BR rm_CH sv_SE bg_BG ca_ES en_GB fi_FI hi_IN hr_HR hu_HU in_ID iw_IL lt_LT lv_LV ro_RO sk_SK sl_SI sr_RS uk_UA vi_VN tl_PH ar_EG fa_IR th_TH sw_TZ ms_MY af_ZA zu_ZA am_ET hi_IN en_XA ar_XB fr_CA km_KH lo_LA ne_NP si_LK mn_MN hy_AM az_AZ ka_GE my_MM mr_IN ml_IN is_IS mk_MK ky_KG eu_ES gl_ES bn_BD ta_IN kn_IN te_IN uz_UZ ur_PK kk_KZ
+PRODUCT_LOCALES := en_US en_AU en_IN fr_FR it_IT es_ES et_EE de_DE nl_NL cs_CZ pl_PL ja_JP zh_TW zh_CN zh_HK ru_RU ko_KR nb_NO es_US da_DK el_GR tr_TR pt_PT pt_BR rm_CH sv_SE bg_BG ca_ES en_GB fi_FI hi_IN hr_HR hu_HU in_ID iw_IL lt_LT lv_LV ro_RO sk_SK sl_SI sr_RS uk_UA vi_VN tl_PH ar_EG fa_IR th_TH sw_TZ ms_MY af_ZA zu_ZA am_ET hi_IN en_XA ar_XB fr_CA km_KH lo_LA ne_NP si_LK mn_MN hy_AM az_AZ ka_GE my_MM mr_IN ml_IN is_IS mk_MK ky_KG eu_ES gl_ES bn_BD ta_IN kn_IN te_IN uz_UZ ur_PK kk_KZ
diff --git a/target/product/sdk_base.mk b/target/product/sdk_base.mk
index 8610169..451c0b7 100644
--- a/target/product/sdk_base.mk
+++ b/target/product/sdk_base.mk
@@ -95,14 +95,10 @@
-include external/svox/pico/lang/PicoLangFrFrInSystem.mk
-include external/svox/pico/lang/PicoLangItItInSystem.mk
-# locale + densities. en_US is both first and in alphabetical order to
+# locale. en_US is both first and in alphabetical order to
# ensure this is the default locale.
PRODUCT_LOCALES := \
en_US \
- ldpi \
- hdpi \
- mdpi \
- xhdpi \
ar_EG \
ar_IL \
bg_BG \
diff --git a/target/product/security/verity.pk8 b/target/product/security/verity.pk8
new file mode 100644
index 0000000..bebf216
--- /dev/null
+++ b/target/product/security/verity.pk8
Binary files differ
diff --git a/target/product/security/verity.x509.pem b/target/product/security/verity.x509.pem
new file mode 100644
index 0000000..86399c3
--- /dev/null
+++ b/target/product/security/verity.x509.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
+6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
+fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
+T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
+AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
+jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
+HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
+oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
+NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
+JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
+dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
+UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
+yttuAJAEAymk1mipd9+zp38=
+-----END CERTIFICATE-----
diff --git a/target/product/security/verity_key b/target/product/security/verity_key
index 8db965f..31982d9 100644
--- a/target/product/security/verity_key
+++ b/target/product/security/verity_key
Binary files differ
diff --git a/target/product/security/verity_private_dev_key b/target/product/security/verity_private_dev_key
deleted file mode 100644
index 92528e9..0000000
--- a/target/product/security/verity_private_dev_key
+++ /dev/null
@@ -1,28 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDQxdVrH2RB1eg5
-17/gBmLzW1Ds10RG6ctNZMhxppMOLnEZViKGv1VNRhxqK/JKTv2UujgZ94SJcDub
-G+DwAwaGZKQqDYUa0VU2cng8TYPcnYGPdJ7Usckp6tdg64vns7e+VVf0dOyEovR+
-JyeYUz05OhUMYP9xJIhpA2XnXe5Ekb9iTFSYo9uBpoXDD4IY7aOqUxSbv9wMtyIp
-dl+oTm0+kqRRi4KoxGHV0CzDseEUuWG/Kp/7aVF9Sg45NcC6KYvrGysUKA+Bt09O
-feDn/HRpT9SfRElJa5DRms33UBUtnom15F4yd4vvFgubB0nkPOUuwfZhTFfgeuY4
-H2bHkjKbAgMBAAECggEAMpFYqkPGQvQO9cO+ZALoAM4Dgfp6PTrv1WUt7+lLAUpa
-dqqYXk8F2Fu9EjJm03ziix237QI5Bhk7Nsy/5SK2d+L0qILx1JcTrsZ3PRQBdnRo
-J1k2B4qwkQii9oTXNF4hiWaekUWo7E+ULOJLAuhWkf/xjTgJZ1xT9iuuiSYFSnIa
-9ABNH0vCaKEkW/4ri6fdtXmO26C/ltJlnozl86x07PIFh4uBas7/40E8ykFP00CS
-zdhMh+2DGyCb1Q0eJ1IfGILNatkLNEd2BHgQ7qNBkN9yShZfhvIPblr5gSUlZplX
-diV20ZGLAfByKWgZZWKkwl9KzaisL/J/4dr2UlSVEQKBgQDxAYTsgoTkkP0TKzr3
-i3ljT8OuVOj6TwZVBJYe2MIJ3veivS3gWB53FpsKthbib7y8ifIakn15mQkNCK5R
-7H7F5lvZCNnB6shY5Dz7nLJxKLALcAg+d12l3gTbFQeFDs0iQQJF7P8hs/GPF7kY
-Layb7EF0uzYjyHJCKtFdaZaeZwKBgQDdwvCb7NJVeGTcE97etL+8acu9y4GlqKEF
-o0Vkw8TjNKj/KuDkbkAk9hXxU1ZCmDU3y6r8CVHYl0Sqh08plEhkYB/j3sFy81zY
-3xu/rLFysBwjeJHHlPjRTYkdKr9pABmm8NIEShvu9u8i+mpOhjbX72HxZL+i4Fou
-gz58wEdBrQKBgG8CfyKdn+7UJe3tbLTXRquK8xxauhGJ0uXYPfmpZ/8596C7OOVs
-UWQTQoj1hKb6RtolRCIfNbKL3hJl3D2aDG7Fg6r9m6fpqCzhvIE9FShwUF6EVRfI
-zZb4JA5xqkwMnEpZ3V0uI/p3Mx3xFG3ho+8SLLhC/1YOHysBI/y+BQWjAoGAYiqQ
-PkXYWhOAeleleeqDUdF3al3y1zVNimRbLJ7owjcmdEYz5YrUhEgXMIvWjIY6UKes
-2gL6IynbMK3TIjHM1fojQ8jw04TdXfdtnizBJGbHHgCab8IHXwe2oZ2xu7ZapKbI
-ITP5J5BSDabSdk49attB/Qy/NEeiRCK+/5RSNsUCgYAg6vX9VqMEkhPHeoFfdLGD
-EQPPN6QLrQ4Zif0GKxH96znNSv0rXdNp9t0kyapdgzMuCwIEuOkCSiKgmfjTWnYO
-qh5HMUuD2VbfWwI9jVujQMRmqiaFF7VxxA1bP5j1hJlI6cn1Fjlpi+NsNZN4nm3Q
-92SEwX2vDgjrU0NAtFFL1Q==
------END PRIVATE KEY-----
diff --git a/target/product/verity.mk b/target/product/verity.mk
index 4a1ca5e..0361b64 100644
--- a/target/product/verity.mk
+++ b/target/product/verity.mk
@@ -17,7 +17,11 @@
# Provides dependencies necessary for verified boot
PRODUCT_SUPPORTS_VERITY := true
-PRODUCT_VERITY_SIGNING_KEY := build/target/product/security/verity_private_dev_key
+
+# The dev key is used to sign boot and recovery images, and the verity
+# metadata table. Actual product deliverables will be re-signed by hand.
+# We expect this file to exist with the suffixes ".x509.pem" and ".pk8".
+PRODUCT_VERITY_SIGNING_KEY := build/target/product/security/verity
PRODUCT_PACKAGES += \
verity_key
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index ed6bd87..a80b2db 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -16,6 +16,7 @@
echo "ro.build.user=$USER"
echo "ro.build.host=`hostname`"
echo "ro.build.tags=$BUILD_VERSION_TAGS"
+echo "ro.build.flavor=$TARGET_BUILD_FLAVOR"
echo "ro.product.model=$PRODUCT_MODEL"
echo "ro.product.brand=$PRODUCT_BRAND"
echo "ro.product.name=$PRODUCT_NAME"
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 216486c..8b179d5 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -16,6 +16,7 @@
from collections import deque, OrderedDict
from hashlib import sha1
+import heapq
import itertools
import multiprocessing
import os
@@ -142,9 +143,16 @@
self.goes_before = {}
self.goes_after = {}
+ self.stash_before = []
+ self.use_stash = []
+
self.id = len(by_id)
by_id.append(self)
+ def NetStashChange(self):
+ return (sum(sr.size() for (_, sr) in self.stash_before) -
+ sum(sr.size() for (_, sr) in self.use_stash))
+
def __str__(self):
return (str(self.id) + ": <" + str(self.src_ranges) + " " + self.style +
" to " + str(self.tgt_ranges) + ">")
@@ -182,11 +190,14 @@
# original image.
class BlockImageDiff(object):
- def __init__(self, tgt, src=None, threads=None):
+ def __init__(self, tgt, src=None, threads=None, version=2):
if threads is None:
threads = multiprocessing.cpu_count() // 2
if threads == 0: threads = 1
self.threads = threads
+ self.version = version
+
+ assert version in (1, 2)
self.tgt = tgt
if src is None:
@@ -221,7 +232,12 @@
self.FindVertexSequence()
# Fix up the ordering dependencies that the sequence didn't
# satisfy.
- self.RemoveBackwardEdges()
+ if self.version == 1:
+ self.RemoveBackwardEdges()
+ else:
+ self.ReverseBackwardEdges()
+ self.ImproveVertexSequence()
+
# Double-check our work.
self.AssertSequenceGood()
@@ -231,18 +247,87 @@
def WriteTransfers(self, prefix):
out = []
- out.append("1\n") # format version number
total = 0
performs_read = False
+ stashes = {}
+ stashed_blocks = 0
+ max_stashed_blocks = 0
+
+ free_stash_ids = []
+ next_stash_id = 0
+
for xf in self.transfers:
- # zero [rangeset]
- # new [rangeset]
- # bsdiff patchstart patchlen [src rangeset] [tgt rangeset]
- # imgdiff patchstart patchlen [src rangeset] [tgt rangeset]
- # move [src rangeset] [tgt rangeset]
- # erase [rangeset]
+ if self.version < 2:
+ assert not xf.stash_before
+ assert not xf.use_stash
+
+ for s, sr in xf.stash_before:
+ assert s not in stashes
+ if free_stash_ids:
+ sid = heapq.heappop(free_stash_ids)
+ else:
+ sid = next_stash_id
+ next_stash_id += 1
+ stashes[s] = sid
+ stashed_blocks += sr.size()
+ out.append("stash %d %s\n" % (sid, sr.to_string_raw()))
+
+ if stashed_blocks > max_stashed_blocks:
+ max_stashed_blocks = stashed_blocks
+
+ if self.version == 1:
+ src_string = xf.src_ranges.to_string_raw()
+ elif self.version == 2:
+
+ # <# blocks> <src ranges>
+ # OR
+ # <# blocks> <src ranges> <src locs> <stash refs...>
+ # OR
+ # <# blocks> - <stash refs...>
+
+ size = xf.src_ranges.size()
+ src_string = [str(size)]
+
+ unstashed_src_ranges = xf.src_ranges
+ mapped_stashes = []
+ for s, sr in xf.use_stash:
+ sid = stashes.pop(s)
+ stashed_blocks -= sr.size()
+ unstashed_src_ranges = unstashed_src_ranges.subtract(sr)
+ sr = xf.src_ranges.map_within(sr)
+ mapped_stashes.append(sr)
+ src_string.append("%d:%s" % (sid, sr.to_string_raw()))
+ heapq.heappush(free_stash_ids, sid)
+
+ if unstashed_src_ranges:
+ src_string.insert(1, unstashed_src_ranges.to_string_raw())
+ if xf.use_stash:
+ mapped_unstashed = xf.src_ranges.map_within(unstashed_src_ranges)
+ src_string.insert(2, mapped_unstashed.to_string_raw())
+ mapped_stashes.append(mapped_unstashed)
+ self.AssertPartition(RangeSet(data=(0, size)), mapped_stashes)
+ else:
+ src_string.insert(1, "-")
+ self.AssertPartition(RangeSet(data=(0, size)), mapped_stashes)
+
+ src_string = " ".join(src_string)
+
+ # both versions:
+ # zero <rangeset>
+ # new <rangeset>
+ # erase <rangeset>
+ #
+ # version 1:
+ # bsdiff patchstart patchlen <src rangeset> <tgt rangeset>
+ # imgdiff patchstart patchlen <src rangeset> <tgt rangeset>
+ # move <src rangeset> <tgt rangeset>
+ #
+ # version 2:
+ # bsdiff patchstart patchlen <tgt rangeset> <src_string>
+ # imgdiff patchstart patchlen <tgt rangeset> <src_string>
+ # move <tgt rangeset> <src_string>
tgt_size = xf.tgt_ranges.size()
@@ -255,17 +340,27 @@
assert xf.tgt_ranges
assert xf.src_ranges.size() == tgt_size
if xf.src_ranges != xf.tgt_ranges:
- out.append("%s %s %s\n" % (
- xf.style,
- xf.src_ranges.to_string_raw(), xf.tgt_ranges.to_string_raw()))
+ if self.version == 1:
+ out.append("%s %s %s\n" % (
+ xf.style,
+ xf.src_ranges.to_string_raw(), xf.tgt_ranges.to_string_raw()))
+ elif self.version == 2:
+ out.append("%s %s %s\n" % (
+ xf.style,
+ xf.tgt_ranges.to_string_raw(), src_string))
total += tgt_size
elif xf.style in ("bsdiff", "imgdiff"):
performs_read = True
assert xf.tgt_ranges
assert xf.src_ranges
- out.append("%s %d %d %s %s\n" % (
- xf.style, xf.patch_start, xf.patch_len,
- xf.src_ranges.to_string_raw(), xf.tgt_ranges.to_string_raw()))
+ if self.version == 1:
+ out.append("%s %d %d %s %s\n" % (
+ xf.style, xf.patch_start, xf.patch_len,
+ xf.src_ranges.to_string_raw(), xf.tgt_ranges.to_string_raw()))
+ elif self.version == 2:
+ out.append("%s %d %d %s %s\n" % (
+ xf.style, xf.patch_start, xf.patch_len,
+ xf.tgt_ranges.to_string_raw(), src_string))
total += tgt_size
elif xf.style == "zero":
assert xf.tgt_ranges
@@ -276,7 +371,10 @@
else:
raise ValueError, "unknown transfer style '%s'\n" % (xf.style,)
- out.insert(1, str(total) + "\n")
+
+ # sanity check: abort if we're going to need more than 512 MB if
+ # stash space
+ assert max_stashed_blocks * self.tgt.blocksize < (512 << 20)
all_tgt = RangeSet(data=(0, self.tgt.total_blocks))
if performs_read:
@@ -289,12 +387,24 @@
else:
# if nothing is read (ie, this is a full OTA), then we can start
# by erasing the entire partition.
- out.insert(2, "erase %s\n" % (all_tgt.to_string_raw(),))
+ out.insert(0, "erase %s\n" % (all_tgt.to_string_raw(),))
+
+ out.insert(0, "%d\n" % (self.version,)) # format version number
+ out.insert(1, str(total) + "\n")
+ if self.version >= 2:
+ # version 2 only: after the total block count, we give the number
+ # of stash slots needed, and the maximum size needed (in blocks)
+ out.insert(2, str(next_stash_id) + "\n")
+ out.insert(3, str(max_stashed_blocks) + "\n")
with open(prefix + ".transfer.list", "wb") as f:
for i in out:
f.write(i)
+ if self.version >= 2:
+ print("max stashed blocks: %d (%d bytes)\n" % (
+ max_stashed_blocks, max_stashed_blocks * self.tgt.blocksize))
+
def ComputePatches(self, prefix):
print("Reticulating splines...")
diff_q = []
@@ -409,7 +519,13 @@
# Imagine processing the transfers in order.
for xf in self.transfers:
# Check that the input blocks for this transfer haven't yet been touched.
- assert not touched.overlaps(xf.src_ranges)
+
+ x = xf.src_ranges
+ if self.version >= 2:
+ for _, sr in xf.use_stash:
+ x = x.subtract(sr)
+
+ assert not touched.overlaps(x)
# Check that the output blocks for this transfer haven't yet been touched.
assert not touched.overlaps(xf.tgt_ranges)
# Touch all the blocks written by this transfer.
@@ -418,6 +534,47 @@
# Check that we've written every target block.
assert touched == self.tgt.care_map
+ def ImproveVertexSequence(self):
+ print("Improving vertex order...")
+
+ # At this point our digraph is acyclic; we reversed any edges that
+ # were backwards in the heuristically-generated sequence. The
+ # previously-generated order is still acceptable, but we hope to
+ # find a better order that needs less memory for stashed data.
+ # Now we do a topological sort to generate a new vertex order,
+ # using a greedy algorithm to choose which vertex goes next
+ # whenever we have a choice.
+
+ # Make a copy of the edge set; this copy will get destroyed by the
+ # algorithm.
+ for xf in self.transfers:
+ xf.incoming = xf.goes_after.copy()
+ xf.outgoing = xf.goes_before.copy()
+
+ L = [] # the new vertex order
+
+ # S is the set of sources in the remaining graph; we always choose
+ # the one that leaves the least amount of stashed data after it's
+ # executed.
+ S = [(u.NetStashChange(), u.order, u) for u in self.transfers
+ if not u.incoming]
+ heapq.heapify(S)
+
+ while S:
+ _, _, xf = heapq.heappop(S)
+ L.append(xf)
+ for u in xf.outgoing:
+ del u.incoming[xf]
+ if not u.incoming:
+ heapq.heappush(S, (u.NetStashChange(), u.order, u))
+
+ # if this fails then our graph had a cycle.
+ assert len(L) == len(self.transfers)
+
+ self.transfers = L
+ for i, xf in enumerate(L):
+ xf.order = i
+
def RemoveBackwardEdges(self):
print("Removing backward edges...")
in_order = 0
@@ -425,19 +582,17 @@
lost_source = 0
for xf in self.transfers:
- io = 0
- ooo = 0
lost = 0
size = xf.src_ranges.size()
for u in xf.goes_before:
# xf should go before u
if xf.order < u.order:
# it does, hurray!
- io += 1
+ in_order += 1
else:
# it doesn't, boo. trim the blocks that u writes from xf's
# source, so that xf can go after u.
- ooo += 1
+ out_of_order += 1
assert xf.src_ranges.overlaps(u.tgt_ranges)
xf.src_ranges = xf.src_ranges.subtract(u.tgt_ranges)
xf.intact = False
@@ -448,8 +603,6 @@
lost = size - xf.src_ranges.size()
lost_source += lost
- in_order += io
- out_of_order += ooo
print((" %d/%d dependencies (%.2f%%) were violated; "
"%d source blocks removed.") %
@@ -458,6 +611,48 @@
if (in_order + out_of_order) else 0.0,
lost_source))
+ def ReverseBackwardEdges(self):
+ print("Reversing backward edges...")
+ in_order = 0
+ out_of_order = 0
+ stashes = 0
+ stash_size = 0
+
+ for xf in self.transfers:
+ lost = 0
+ size = xf.src_ranges.size()
+ for u in xf.goes_before.copy():
+ # xf should go before u
+ if xf.order < u.order:
+ # it does, hurray!
+ in_order += 1
+ else:
+ # it doesn't, boo. modify u to stash the blocks that it
+ # writes that xf wants to read, and then require u to go
+ # before xf.
+ out_of_order += 1
+
+ overlap = xf.src_ranges.intersect(u.tgt_ranges)
+ assert overlap
+
+ u.stash_before.append((stashes, overlap))
+ xf.use_stash.append((stashes, overlap))
+ stashes += 1
+ stash_size += overlap.size()
+
+ # reverse the edge direction; now xf must go after u
+ del xf.goes_before[u]
+ del u.goes_after[xf]
+ xf.goes_after[u] = None # value doesn't matter
+ u.goes_before[xf] = None
+
+ print((" %d/%d dependencies (%.2f%%) were violated; "
+ "%d source blocks stashed.") %
+ (out_of_order, in_order + out_of_order,
+ (out_of_order * 100.0 / (in_order + out_of_order))
+ if (in_order + out_of_order) else 0.0,
+ stash_size))
+
def FindVertexSequence(self):
print("Finding vertex sequence...")
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 5d87864..302aa0c 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -160,7 +160,7 @@
# get properties
image_size = prop_dict["partition_size"]
block_dev = prop_dict["verity_block_device"]
- signer_key = prop_dict["verity_key"]
+ signer_key = prop_dict["verity_key"] + ".pk8"
signer_path = prop_dict["verity_signer_cmd"]
# make a tempdir
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 815c76c..8941f89 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -347,7 +347,7 @@
if info_dict.get("verity_key", None):
path = "/" + os.path.basename(sourcedir).lower()
- cmd = ["boot_signer", path, img.name, info_dict["verity_key"], img.name]
+ cmd = ["boot_signer", path, img.name, info_dict["verity_key"] + ".pk8", info_dict["verity_key"] + ".x509.pem", img.name]
p = Run(cmd, stdout=subprocess.PIPE)
p.communicate()
assert p.returncode == 0, "boot_signer of %s image failed" % path
@@ -1030,7 +1030,14 @@
self.partition = partition
self.check_first_block = check_first_block
- b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads)
+ version = 1
+ if OPTIONS.info_dict:
+ version = max(
+ int(i) for i in
+ OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
+
+ b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
+ version=version)
tmpdir = tempfile.mkdtemp()
OPTIONS.tempfiles.append(tmpdir)
self.path = os.path.join(tmpdir, partition)
@@ -1041,9 +1048,16 @@
def WriteScript(self, script, output_zip, progress=None):
if not self.src:
# write the output unconditionally
- if progress: script.ShowProgress(progress, 0)
- self._WriteUpdate(script, output_zip)
+ script.Print("Patching %s image unconditionally..." % (self.partition,))
+ else:
+ script.Print("Patching %s image after verification." % (self.partition,))
+ if progress: script.ShowProgress(progress, 0)
+ self._WriteUpdate(script, output_zip)
+
+ def WriteVerifyScript(self, script):
+ if not self.src:
+ script.Print("Image %s will be patched unconditionally." % (self.partition,))
else:
if self.check_first_block:
self._CheckFirstBlock(script)
@@ -1051,9 +1065,7 @@
script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' %
(self.device, self.src.care_map.to_string_raw(),
self.src.TotalSha1()))
- script.Print("Patching %s image..." % (self.partition,))
- if progress: script.ShowProgress(progress, 0)
- self._WriteUpdate(script, output_zip)
+ script.Print("Verified %s image..." % (self.partition,))
script.AppendExtra(('else\n'
' (range_sha1("%s", "%s") == "%s") ||\n'
' abort("%s partition has unexpected contents");\n'
diff --git a/tools/releasetools/ota_from_target_files b/tools/releasetools/ota_from_target_files
index 755e5c2..945f11a 100755
--- a/tools/releasetools/ota_from_target_files
+++ b/tools/releasetools/ota_from_target_files
@@ -558,6 +558,7 @@
system_items = ItemSet("system", "META/filesystem_config.txt")
script.ShowProgress(system_progress, 0)
+
if block_based:
# Full OTA is done as an "incremental" against an empty source
# image. This has the effect of writing new data from the package
@@ -845,6 +846,11 @@
else
""" % bcb_dev)
+ # Verify the existing partitions.
+ system_diff.WriteVerifyScript(script)
+ if vendor_diff:
+ vendor_diff.WriteVerifyScript(script)
+
script.Comment("---- start making changes here ----")
device_specific.IncrementalOTA_InstallBegin()
diff --git a/tools/releasetools/rangelib.py b/tools/releasetools/rangelib.py
index 8a85d2d..7279c60 100644
--- a/tools/releasetools/rangelib.py
+++ b/tools/releasetools/rangelib.py
@@ -24,7 +24,9 @@
lots of runs."""
def __init__(self, data=None):
- if data:
+ if isinstance(data, str):
+ self._parse_internal(data)
+ elif data:
self.data = tuple(self._remove_pairs(data))
else:
self.data = ()
@@ -46,6 +48,9 @@
else:
return self.to_string()
+ def __repr__(self):
+ return '<RangeSet("' + self.to_string() + '")>'
+
@classmethod
def parse(cls, text):
"""Parse a text string consisting of a space-separated list of
@@ -59,7 +64,9 @@
"15-20 30 10-14" is not, even though they represent the same set
of blocks (and the two RangeSets will compare equal with ==).
"""
+ return cls(text)
+ def _parse_internal(self, text):
data = []
last = -1
monotonic = True
@@ -81,9 +88,8 @@
else:
monotonic = True
data.sort()
- r = RangeSet(cls._remove_pairs(data))
- r.monotonic = monotonic
- return r
+ self.data = tuple(self._remove_pairs(data))
+ self.monotonic = monotonic
@staticmethod
def _remove_pairs(source):
@@ -113,7 +119,13 @@
def union(self, other):
"""Return a new RangeSet representing the union of this RangeSet
- with the argument."""
+ with the argument.
+
+ >>> RangeSet("10-19 30-34").union(RangeSet("18-29"))
+ <RangeSet("10-34")>
+ >>> RangeSet("10-19 30-34").union(RangeSet("22 32"))
+ <RangeSet("10-19 22 30-34")>
+ """
out = []
z = 0
for p, d in heapq.merge(zip(self.data, itertools.cycle((+1, -1))),
@@ -125,7 +137,13 @@
def intersect(self, other):
"""Return a new RangeSet representing the intersection of this
- RangeSet with the argument."""
+ RangeSet with the argument.
+
+ >>> RangeSet("10-19 30-34").intersect(RangeSet("18-32"))
+ <RangeSet("18-19 30-32")>
+ >>> RangeSet("10-19 30-34").intersect(RangeSet("22-28"))
+ <RangeSet("")>
+ """
out = []
z = 0
for p, d in heapq.merge(zip(self.data, itertools.cycle((+1, -1))),
@@ -137,7 +155,13 @@
def subtract(self, other):
"""Return a new RangeSet representing subtracting the argument
- from this RangeSet."""
+ from this RangeSet.
+
+ >>> RangeSet("10-19 30-34").subtract(RangeSet("18-32"))
+ <RangeSet("10-17 33-34")>
+ >>> RangeSet("10-19 30-34").subtract(RangeSet("22-28"))
+ <RangeSet("10-19 30-34")>
+ """
out = []
z = 0
@@ -150,7 +174,13 @@
def overlaps(self, other):
"""Returns true if the argument has a nonempty overlap with this
- RangeSet."""
+ RangeSet.
+
+ >>> RangeSet("10-19 30-34").overlaps(RangeSet("18-32"))
+ True
+ >>> RangeSet("10-19 30-34").overlaps(RangeSet("22-28"))
+ False
+ """
# This is like intersect, but we can stop as soon as we discover the
# output is going to be nonempty.
@@ -164,7 +194,11 @@
def size(self):
"""Returns the total size of the RangeSet (ie, how many integers
- are in the set)."""
+ are in the set).
+
+ >>> RangeSet("10-19 30-34").size()
+ 15
+ """
total = 0
for i, p in enumerate(self.data):
@@ -173,3 +207,37 @@
else:
total -= p
return total
+
+ def map_within(self, other):
+ """'other' should be a subset of 'self'. Returns a RangeSet
+ representing what 'other' would get translated to if the integers
+ of 'self' were translated down to be contiguous starting at zero.
+
+ >>> RangeSet("0-9").map_within(RangeSet("3-4"))
+ <RangeSet("3-4")>
+ >>> RangeSet("10-19").map_within(RangeSet("13-14"))
+ <RangeSet("3-4")>
+ >>> RangeSet("10-19 30-39").map_within(RangeSet("17-19 30-32"))
+ <RangeSet("7-12")>
+ >>> RangeSet("10-19 30-39").map_within(RangeSet("12-13 17-19 30-32"))
+ <RangeSet("2-3 7-12")>
+ """
+
+ out = []
+ offset = 0
+ start = None
+ for p, d in heapq.merge(zip(self.data, itertools.cycle((-5, +5))),
+ zip(other.data, itertools.cycle((-1, +1)))):
+ if d == -5:
+ start = p
+ elif d == +5:
+ offset += p-start
+ start = None
+ else:
+ out.append(offset + p - start)
+ return RangeSet(data=out)
+
+
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod()
diff --git a/tools/releasetools/sign_target_files_apks b/tools/releasetools/sign_target_files_apks
index 075c925..931acb8 100755
--- a/tools/releasetools/sign_target_files_apks
+++ b/tools/releasetools/sign_target_files_apks
@@ -198,6 +198,7 @@
print "NOT signing: %s" % (name,)
output_tf_zip.writestr(out_info, data)
elif info.filename in ("SYSTEM/build.prop",
+ "VENDOR/build.prop",
"RECOVERY/RAMDISK/default.prop"):
print "rewriting %s:" % (info.filename,)
new_data = RewriteProps(data, misc_info)
@@ -295,12 +296,12 @@
original_line = line
if line and line[0] != '#' and "=" in line:
key, value = line.split("=", 1)
- if (key == "ro.build.fingerprint"
+ if (key in ("ro.build.fingerprint", "ro.vendor.build.fingerprint")
and misc_info.get("oem_fingerprint_properties") is None):
pieces = value.split("/")
pieces[-1] = EditTags(pieces[-1])
value = "/".join(pieces)
- elif (key == "ro.build.thumbprint"
+ elif (key in ("ro.build.thumbprint", "ro.vendor.build.thumbprint")
and misc_info.get("oem_fingerprint_properties") is not None):
pieces = value.split("/")
pieces[-1] = EditTags(pieces[-1])