am 29eafdea: AI 149251: Add 1.x Icon Guidelines doc and template pack. BUG=1790234
Merge commit '29eafdea34a5239b25fe82e5cd6debf1a5157c6b' into donut
* commit '29eafdea34a5239b25fe82e5cd6debf1a5157c6b':
AI 149251: Add 1.x Icon Guidelines doc and template pack.
diff --git a/cleanspec.mk b/cleanspec.mk
index 22d9fe1..14c8016 100644
--- a/cleanspec.mk
+++ b/cleanspec.mk
@@ -75,6 +75,10 @@
$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/PinyinIMEGoogleService_intermediates)
$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/com.android.inputmethod.pinyin.lib_intermediates)
$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/PinyinIMEGoogleService_intermediates)
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/framework_intermediates/src/telephony)
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/product/*/obj)
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/system/bin/tcpdump)
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/framework_intermediates/src/location)
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
diff --git a/core/Makefile b/core/Makefile
index 58a9695..110b3c9 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -62,9 +62,6 @@
# Apps are always signed with test keys, and may be re-signed in a post-build
# step. If that happens, the "test-keys" tag will be removed by that step.
BUILD_VERSION_TAGS += test-keys
-ifndef INCLUDE_TEST_OTA_KEYS
- BUILD_VERSION_TAGS += ota-rel-keys
-endif
BUILD_VERSION_TAGS := $(subst $(space),$(comma),$(sort $(BUILD_VERSION_TAGS)))
# A human-readable string that descibes this build in detail.
@@ -129,10 +126,12 @@
BUILD_NUMBER="$(BUILD_NUMBER)" \
PLATFORM_VERSION="$(PLATFORM_VERSION)" \
PLATFORM_SDK_VERSION="$(PLATFORM_SDK_VERSION)" \
+ PLATFORM_VERSION_CODENAME="$(PLATFORM_VERSION_CODENAME)" \
BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
TARGET_BOOTLOADER_BOARD_NAME="$(TARGET_BOOTLOADER_BOARD_NAME)" \
BUILD_FINGERPRINT="$(BUILD_FINGERPRINT)" \
TARGET_BOARD_PLATFORM="$(TARGET_BOARD_PLATFORM)" \
+ TARGET_CPU_ABI="$(TARGET_CPU_ABI)" \
bash $(BUILDINFO_SH) > $@
$(hide) if [ -f $(TARGET_DEVICE_DIR)/system.prop ]; then \
cat $(TARGET_DEVICE_DIR)/system.prop >> $@; \
@@ -271,6 +270,11 @@
INTERNAL_BOOTIMAGE_ARGS += --cmdline "$(BOARD_KERNEL_CMDLINE)"
endif
+BOARD_KERNEL_BASE := $(strip $(BOARD_KERNEL_BASE))
+ifdef BOARD_KERNEL_BASE
+ INTERNAL_BOOTIMAGE_ARGS += --base $(BOARD_KERNEL_BASE)
+endif
+
INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
ifeq ($(TARGET_BOOTIMAGE_USE_EXT2),true)
@@ -439,7 +443,11 @@
installed_notice_html_gz := $(TARGET_OUT)/etc/NOTICE.html.gz
$(installed_notice_html_gz): $(target_notice_file_html_gz) | $(ACP)
$(copy-file-to-target)
+
+# if we've been run my mm, mmm, etc, don't reinstall this every time
+ifeq ($(ONE_SHOT_MAKEFILE),)
ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_gz)
+endif
# The kernel isn't really a module, so to get its module file in there, we
# make the target NOTICE files depend on this particular file too, which will
@@ -644,6 +652,26 @@
ifdef BOARD_KERNEL_CMDLINE
INTERNAL_RECOVERYIMAGE_ARGS += --cmdline "$(BOARD_KERNEL_CMDLINE)"
endif
+ifdef BOARD_KERNEL_BASE
+ INTERNAL_RECOVERYIMAGE_ARGS += --base $(BOARD_KERNEL_BASE)
+endif
+
+# Keys authorized to sign OTA packages this build will accept. The
+# build always uses test-keys for this; release packaging tools will
+# substitute other keys for this one.
+OTA_PUBLIC_KEYS := $(SRC_TARGET_DIR)/product/security/testkey.x509.pem
+
+# Generate a file containing the keys that will be read by the
+# recovery binary.
+RECOVERY_INSTALL_OTA_KEYS := \
+ $(call intermediates-dir-for,PACKAGING,ota_keys)/keys
+DUMPKEY_JAR := $(HOST_OUT_JAVA_LIBRARIES)/dumpkey.jar
+$(RECOVERY_INSTALL_OTA_KEYS): PRIVATE_OTA_PUBLIC_KEYS := $(OTA_PUBLIC_KEYS)
+$(RECOVERY_INSTALL_OTA_KEYS): $(OTA_PUBLIC_KEYS) $(DUMPKEY_JAR)
+ @echo "DumpPublicKey: $@ <= $(PRIVATE_OTA_PUBLIC_KEYS)"
+ @rm -rf $@
+ @mkdir -p $(dir $@)
+ java -jar $(DUMPKEY_JAR) $(PRIVATE_OTA_PUBLIC_KEYS) > $@
$(INSTALLED_RECOVERYIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) \
$(INSTALLED_RAMDISK_TARGET) \
@@ -651,7 +679,8 @@
$(recovery_binary) \
$(recovery_initrc) $(recovery_kernel) \
$(INSTALLED_2NDBOOTLOADER_TARGET) \
- $(recovery_build_prop) $(recovery_resource_deps)
+ $(recovery_build_prop) $(recovery_resource_deps) \
+ $(RECOVERY_INSTALL_OTA_KEYS)
@echo ----- Making recovery image ------
rm -rf $(TARGET_RECOVERY_OUT)
mkdir -p $(TARGET_RECOVERY_OUT)
@@ -666,6 +695,7 @@
cp -rf $(recovery_resources_common) $(TARGET_RECOVERY_ROOT_OUT)/
$(foreach item,$(recovery_resources_private), \
cp -rf $(item) $(TARGET_RECOVERY_ROOT_OUT)/)
+ cp $(RECOVERY_INSTALL_OTA_KEYS) $(TARGET_RECOVERY_ROOT_OUT)/res/keys
cat $(INSTALLED_DEFAULT_PROP_TARGET) $(recovery_build_prop) \
> $(TARGET_RECOVERY_ROOT_OUT)/default.prop
$(MKBOOTFS) $(TARGET_RECOVERY_ROOT_OUT) | gzip > $(recovery_ramdisk)
@@ -765,15 +795,10 @@
.PHONY: otapackage
otapackage: $(INTERNAL_OTA_PACKAGE_TARGET)
-# Keys authorized to sign OTA packages this build will accept.
-ifeq ($(INCLUDE_TEST_OTA_KEYS),true)
- OTA_PUBLIC_KEYS := \
- $(sort $(SRC_TARGET_DIR)/product/security/testkey.x509.pem $(OTA_PUBLIC_KEYS))
-endif
-
-ifeq ($(OTA_PUBLIC_KEYS),)
- $(error No OTA_PUBLIC_KEYS defined)
-endif
+# Keys authorized to sign OTA packages this build will accept. The
+# build always uses test-keys for this; release packaging tools will
+# substitute other keys for this one.
+OTA_PUBLIC_KEYS := $(SRC_TARGET_DIR)/product/security/testkey.x509.pem
# Build a keystore with the authorized keys in it.
# java/android/android/server/checkin/UpdateVerifier.java uses this.
@@ -790,21 +815,6 @@
# -import -file $$f -alias $(notdir $$f) || exit 1; \
# done
-ifdef RECOVERY_INSTALL_OTA_KEYS_INC
-# Generate a C-includable file containing the keys.
-# RECOVERY_INSTALL_OTA_KEYS_INC is defined by recovery/Android.mk.
-# *** THIS IS A TOTAL HACK; EXECUTABLES MUST NOT CHANGE BETWEEN DIFFERENT
-# PRODUCTS/BUILD TYPES. ***
-# TODO: make recovery read the keys from an external file.
-DUMPKEY_JAR := $(HOST_OUT_JAVA_LIBRARIES)/dumpkey.jar
-$(RECOVERY_INSTALL_OTA_KEYS_INC): PRIVATE_OTA_PUBLIC_KEYS := $(OTA_PUBLIC_KEYS)
-$(RECOVERY_INSTALL_OTA_KEYS_INC): $(OTA_PUBLIC_KEYS) $(DUMPKEY_JAR)
- @echo "DumpPublicKey: $@ <= $(PRIVATE_OTA_PUBLIC_KEYS)"
- @rm -rf $@
- @mkdir -p $(dir $@)
- $(hide) java -jar $(DUMPKEY_JAR) $(PRIVATE_OTA_PUBLIC_KEYS) > $@
-endif
-
# -----------------------------------------------------------------
# A zip of the directories that map to the target filesystem.
# This zip can be used to create an OTA package or filesystem image
@@ -833,7 +843,7 @@
endef
built_ota_tools := \
- $(call intermediates-dir-for,EXECUTABLES,applypatch)/applypatch \
+ $(call intermediates-dir-for,EXECUTABLES,applypatch)/applypatch \
$(call intermediates-dir-for,EXECUTABLES,check_prereq)/check_prereq
$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_OTA_TOOLS := $(built_ota_tools)
@@ -903,6 +913,7 @@
@# build them.
$(hide) mkdir -p $(zip_root)/META
$(hide) $(ACP) $(APKCERTS_FILE) $(zip_root)/META/apkcerts.txt
+ $(hide) echo "$(PRODUCT_OTA_PUBLIC_KEYS)" > $(zip_root)/META/otakeys.txt
@# Zip everything up, preserving symlinks
$(hide) (cd $(zip_root) && zip -qry ../$(notdir $@) .)
diff --git a/core/apicheck_msg_current.txt b/core/apicheck_msg_current.txt
index c277ecd..d723a19 100644
--- a/core/apicheck_msg_current.txt
+++ b/core/apicheck_msg_current.txt
@@ -6,12 +6,11 @@
1) You can add "@hide" javadoc comments to the methods, etc. listed in the
errors above.
- 2) You can update current.xml by executing the following commands:
+ 2) You can update current.xml by executing the following command:
- p4 edit frameworks/base/api/current.xml
make update-api
- To check in the revised current.xml, you will need OWNERS approval.
+ To check in the revised current.xml, you will need approval from the android API council.
******************************
diff --git a/core/binary.mk b/core/binary.mk
index 0f35d3f..ddcdc6f 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -47,11 +47,11 @@
arm_objects_mode := $(if $(LOCAL_ARM_MODE),$(LOCAL_ARM_MODE),arm)
normal_objects_mode := $(if $(LOCAL_ARM_MODE),$(LOCAL_ARM_MODE),thumb)
-# Read the values from something like TARGET_arm_release_CFLAGS or
-# TARGET_thumb_debug_CFLAGS. HOST_(arm|thumb)_(release|debug)_CFLAGS
-# values aren't actually used (although they are usually empty).
-arm_objects_cflags := $($(my_prefix)$(arm_objects_mode)_$($(my_prefix)BUILD_TYPE)_CFLAGS)
-normal_objects_cflags := $($(my_prefix)$(normal_objects_mode)_$($(my_prefix)BUILD_TYPE)_CFLAGS)
+# Read the values from something like TARGET_arm_CFLAGS or
+# TARGET_thumb_CFLAGS. HOST_(arm|thumb)_CFLAGS values aren't
+# actually used (although they are usually empty).
+arm_objects_cflags := $($(my_prefix)$(arm_objects_mode)_CFLAGS)
+normal_objects_cflags := $($(my_prefix)$(normal_objects_mode)_CFLAGS)
###########################################################
## Define per-module debugging flags. Users can turn on
@@ -212,6 +212,19 @@
endif
###########################################################
+## ObjC: Compile .m files to .o
+###########################################################
+
+objc_sources := $(filter %.m,$(LOCAL_SRC_FILES))
+objc_objects := $(addprefix $(intermediates)/,$(objc_sources:.m=.o))
+
+ifneq ($(strip $(objc_objects)),)
+$(objc_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.m $(yacc_cpps) $(PRIVATE_ADDITIONAL_DEPENDENCIES)
+ $(transform-$(PRIVATE_HOST)m-to-o)
+-include $(objc_objects:%.o=%.P)
+endif
+
+###########################################################
## AS: Compile .S files to .o.
###########################################################
diff --git a/core/build_id.mk b/core/build_id.mk
index cb18bc4..9163cdf 100644
--- a/core/build_id.mk
+++ b/core/build_id.mk
@@ -23,7 +23,7 @@
# (like "TC1-RC5"). It must be a single word, and is
# capitalized by convention.
#
-BUILD_ID := CUPCAKE
+BUILD_ID := DONUT-BURGER
# DISPLAY_BUILD_NUMBER should only be set for development branches,
# If set, the BUILD_NUMBER (cl) is appended to the BUILD_ID for
diff --git a/core/combo/linux-arm.mk b/core/combo/linux-arm.mk
index fa5f67e..edd2df4 100644
--- a/core/combo/linux-arm.mk
+++ b/core/combo/linux-arm.mk
@@ -15,32 +15,31 @@
$(combo_target)NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
-TARGET_arm_release_CFLAGS := -O2 \
- -fomit-frame-pointer \
- -fstrict-aliasing \
- -funswitch-loops \
- -finline-limit=300
+TARGET_arm_CFLAGS := -O2 \
+ -fomit-frame-pointer \
+ -fstrict-aliasing \
+ -funswitch-loops \
+ -finline-limit=300
-TARGET_thumb_release_CFLAGS := -mthumb \
- -Os \
- -fomit-frame-pointer \
- -fno-strict-aliasing \
- -finline-limit=64
+TARGET_thumb_CFLAGS := -mthumb \
+ -Os \
+ -fomit-frame-pointer \
+ -fno-strict-aliasing \
+ -finline-limit=64
-# When building for debug, compile everything as arm.
-TARGET_arm_debug_CFLAGS := $(TARGET_arm_release_CFLAGS) -fno-omit-frame-pointer -fno-strict-aliasing
-TARGET_thumb_debug_CFLAGS := $(TARGET_thumb_release_CFLAGS) -marm -fno-omit-frame-pointer
-
-# NOTE: if you try to build a debug build with thumb, several
+# Set FORCE_ARM_DEBUGGING to "true" in your buildspec.mk
+# or in your environment to force a full arm build, even for
+# files that are normally built as thumb; this can make
+# gdb debugging easier. Don't forget to do a clean build.
+#
+# NOTE: if you try to build a -O0 build with thumb, several
# of the libraries (libpv, libwebcore, libkjs) need to be built
# with -mlong-calls. When built at -O0, those libraries are
# too big for a thumb "BL <label>" to go from one end to the other.
-
-## As hopefully a temporary hack,
-## use this to force a full ARM build (for easier debugging in gdb)
-## (don't forget to do a clean build)
-##TARGET_arm_release_CFLAGS := $(TARGET_arm_release_CFLAGS) -fno-omit-frame-pointer
-##TARGET_thumb_release_CFLAGS := $(TARGET_thumb_release_CFLAGS) -marm -fno-omit-frame-pointer
+ifeq ($(FORCE_ARM_DEBUGGING),true)
+ TARGET_arm_CFLAGS += -fno-omit-frame-pointer
+ TARGET_thumb_CFLAGS += -marm -fno-omit-frame-pointer
+endif
## on some hosts, the target cross-compiler is not available so do not run this command
ifneq ($(wildcard $($(combo_target)CC)),)
diff --git a/core/combo/select.mk b/core/combo/select.mk
index c54da22..273b660 100644
--- a/core/combo/select.mk
+++ b/core/combo/select.mk
@@ -7,7 +7,6 @@
# $(combo_target)OS -- standard name for this host (LINUX, DARWIN, etc.)
# $(combo_target)ARCH -- standard name for process architecture (powerpc, x86, etc.)
# $(combo_target)GLOBAL_CFLAGS -- C compiler flags to use for everything
-# $(combo_target)DEBUG_CFLAGS -- additional C compiler flags for debug builds
# $(combo_target)RELEASE_CFLAGS -- additional C compiler flags for release builds
# $(combo_target)GLOBAL_ARFLAGS -- flags to use for static linking everything
# $(combo_target)SHLIB_SUFFIX -- suffix of shared libraries
@@ -39,7 +38,6 @@
# These flags might (will) be overridden by the target makefiles
$(combo_target)GLOBAL_CFLAGS := -fno-exceptions -Wno-multichar
-$(combo_target)DEBUG_CFLAGS := -O0 -g
$(combo_target)RELEASE_CFLAGS := -O2 -g -fno-strict-aliasing
$(combo_target)GLOBAL_ARFLAGS := crs
diff --git a/core/config.mk b/core/config.mk
index 90a40a7..1ce5937 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -76,11 +76,9 @@
# These can be changed to modify both host and device modules.
COMMON_GLOBAL_CFLAGS:= -DANDROID -fmessage-length=0 -W -Wall -Wno-unused
-COMMON_DEBUG_CFLAGS:=
COMMON_RELEASE_CFLAGS:= -DNDEBUG -UDEBUG
COMMON_GLOBAL_CPPFLAGS:=
-COMMON_DEBUG_CPPFLAGS:=
COMMON_RELEASE_CPPFLAGS:=
# Set the extensions used for various packages
@@ -227,19 +225,15 @@
# ###############################################################
HOST_GLOBAL_CFLAGS += $(COMMON_GLOBAL_CFLAGS)
-HOST_DEBUG_CFLAGS += $(COMMON_DEBUG_CFLAGS)
HOST_RELEASE_CFLAGS += $(COMMON_RELEASE_CFLAGS)
HOST_GLOBAL_CPPFLAGS += $(COMMON_GLOBAL_CPPFLAGS)
-HOST_DEBUG_CPPFLAGS += $(COMMON_DEBUG_CPPFLAGS)
HOST_RELEASE_CPPFLAGS += $(COMMON_RELEASE_CPPFLAGS)
TARGET_GLOBAL_CFLAGS += $(COMMON_GLOBAL_CFLAGS)
-TARGET_DEBUG_CFLAGS += $(COMMON_DEBUG_CFLAGS)
TARGET_RELEASE_CFLAGS += $(COMMON_RELEASE_CFLAGS)
TARGET_GLOBAL_CPPFLAGS += $(COMMON_GLOBAL_CPPFLAGS)
-TARGET_DEBUG_CPPFLAGS += $(COMMON_DEBUG_CPPFLAGS)
TARGET_RELEASE_CPPFLAGS += $(COMMON_RELEASE_CPPFLAGS)
HOST_GLOBAL_LD_DIRS += -L$(HOST_OUT_INTERMEDIATE_LIBRARIES)
@@ -250,7 +244,7 @@
# Many host compilers don't support these flags, so we have to make
# sure to only specify them for the target compilers checked in to
-# the source tree. The simulator uses the target flags but the
+# the source tree. The simulator passes the target flags to the
# host compiler, so only set them for the target when the target
# is not the simulator.
ifneq ($(TARGET_SIMULATOR),true)
@@ -258,21 +252,11 @@
TARGET_GLOBAL_CPPFLAGS += $(TARGET_ERROR_FLAGS)
endif
-ifeq ($(HOST_BUILD_TYPE),release)
-HOST_GLOBAL_CFLAGS+= $(HOST_RELEASE_CFLAGS)
-HOST_GLOBAL_CPPFLAGS+= $(HOST_RELEASE_CPPFLAGS)
-else
-HOST_GLOBAL_CFLAGS+= $(HOST_DEBUG_CFLAGS)
-HOST_GLOBAL_CPPFLAGS+= $(HOST_DEBUG_CPPFLAGS)
-endif
+HOST_GLOBAL_CFLAGS += $(HOST_RELEASE_CFLAGS)
+HOST_GLOBAL_CPPFLAGS += $(HOST_RELEASE_CPPFLAGS)
-ifeq ($(TARGET_BUILD_TYPE),release)
-TARGET_GLOBAL_CFLAGS+= $(TARGET_RELEASE_CFLAGS)
-TARGET_GLOBAL_CPPFLAGS+= $(TARGET_RELEASE_CPPFLAGS)
-else
-TARGET_GLOBAL_CFLAGS+= $(TARGET_DEBUG_CFLAGS)
-TARGET_GLOBAL_CPPFLAGS+= $(TARGET_DEBUG_CPPFLAGS)
-endif
+TARGET_GLOBAL_CFLAGS += $(TARGET_RELEASE_CFLAGS)
+TARGET_GLOBAL_CPPFLAGS += $(TARGET_RELEASE_CPPFLAGS)
# TODO: do symbol compression
TARGET_COMPRESS_MODULE_SYMBOLS := false
diff --git a/core/definitions.mk b/core/definitions.mk
index 17ec646..069855d 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -794,6 +794,22 @@
endef
###########################################################
+## Commands for running gcc to compile an Objective-C file
+## This should never happen for target builds but this
+## will error at build time.
+###########################################################
+
+define transform-m-to-o-no-deps
+@echo "target ObjC: $(PRIVATE_MODULE) <= $<"
+$(call transform-c-or-s-to-o-no-deps)
+endef
+
+define transform-m-to-o
+$(transform-m-to-o-no-deps)
+$(hide) $(transform-d-to-p)
+endef
+
+###########################################################
## Commands for running gcc to compile a host C++ file
###########################################################
@@ -871,6 +887,20 @@
endef
###########################################################
+## Commands for running gcc to compile a host Objective-C file
+###########################################################
+
+define transform-host-m-to-o-no-deps
+@echo "host ObjC: $(PRIVATE_MODULE) <= $<"
+$(call transform-host-c-or-s-to-o-no-deps)
+endef
+
+define tranform-host-m-to-o
+$(transform-host-m-to-o-no-deps)
+$(transform-d-to-p)
+endef
+
+###########################################################
## Commands for running ar
###########################################################
@@ -1122,7 +1152,11 @@
$(addprefix -P , $(PRIVATE_RESOURCE_PUBLICS_OUTPUT)) \
$(addprefix -S , $(PRIVATE_RESOURCE_DIR)) \
$(addprefix -A , $(PRIVATE_ASSET_DIR)) \
- $(addprefix -I , $(PRIVATE_AAPT_INCLUDES))
+ $(addprefix -I , $(PRIVATE_AAPT_INCLUDES)) \
+ $(addprefix --min-sdk-version , $(DEFAULT_APP_TARGET_SDK)) \
+ $(addprefix --target-sdk-version , $(DEFAULT_APP_TARGET_SDK)) \
+ $(addprefix --version-code , $(PLATFORM_SDK_VERSION)) \
+ $(addprefix --version-name , $(PLATFORM_VERSION))
endef
ifeq ($(HOST_OS),windows)
@@ -1250,6 +1284,9 @@
# A list of dynamic and static parameters; build layers for
# dynamic params that lay over the static ones.
#TODO: update the manifest to point to the package file
+#Note that the version numbers are given to aapt as simple default
+#values; applications can override these by explicitly stating
+#them in their manifest.
define add-assets-to-package
$(hide) $(AAPT) package -z -u $(PRIVATE_AAPT_FLAGS) \
$(addprefix -c , $(PRODUCT_AAPT_CONFIG)) \
@@ -1257,6 +1294,10 @@
$(addprefix -S , $(PRIVATE_RESOURCE_DIR)) \
$(addprefix -A , $(PRIVATE_ASSET_DIR)) \
$(addprefix -I , $(PRIVATE_AAPT_INCLUDES)) \
+ $(addprefix --min-sdk-version , $(DEFAULT_APP_TARGET_SDK)) \
+ $(addprefix --target-sdk-version , $(DEFAULT_APP_TARGET_SDK)) \
+ $(addprefix --version-code , $(PLATFORM_SDK_VERSION)) \
+ $(addprefix --version-name , $(PLATFORM_VERSION)) \
-F $@
endef
diff --git a/core/envsetup.mk b/core/envsetup.mk
index ba93549..31901e9 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -7,6 +7,9 @@
# OUT_DIR is also set to "out" if it's not already set.
# this allows you to set it to somewhere else if you like
+# Set up version information.
+include $(BUILD_SYSTEM)/version_defaults.mk
+
# ---------------------------------------------------------------
# If you update the build system such that the environment setup
# or buildspec.mk need to be updated, increment this number, and
@@ -319,6 +322,8 @@
ifneq ($(PRINT_BUILD_CONFIG),)
$(info ============================================)
+$(info PLATFORM_VERSION_CODENAME=$(PLATFORM_VERSION_CODENAME))
+$(info PLATFORM_VERSION=$(PLATFORM_VERSION))
$(info TARGET_PRODUCT=$(TARGET_PRODUCT))
$(info TARGET_BUILD_VARIANT=$(TARGET_BUILD_VARIANT))
$(info TARGET_SIMULATOR=$(TARGET_SIMULATOR))
diff --git a/core/java.mk b/core/java.mk
index 9150a5c..5a434c4 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -188,9 +188,8 @@
$(LOCAL_MODULE)-findbugs : $(findbugs_html)
$(findbugs_html) : $(findbugs_xml)
@mkdir -p $(dir $@)
- @echo UnionBugs: $@
- $(hide) prebuilt/common/findbugs/bin/unionBugs $(PRIVATE_XML_FILE) \
- | prebuilt/common/findbugs/bin/convertXmlToText -html:fancy.xsl \
+ @echo ConvertXmlToText: $@
+ $(hide) prebuilt/common/findbugs/bin/convertXmlToText -html:fancy.xsl $(PRIVATE_XML_FILE) \
> $@
$(LOCAL_MODULE)-findbugs : $(findbugs_html)
diff --git a/core/main.mk b/core/main.mk
index fdf2567..2bf8102 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -85,9 +85,6 @@
$(error Directory names containing spaces not supported)
endif
-# Set up version information.
-include $(BUILD_SYSTEM)/version_defaults.mk
-
# These are the modifier targets that don't do anything themselves, but
# change the behavior of the build.
# (must be defined before including definitions.make)
@@ -301,7 +298,6 @@
dalvik/tools/dmtracedump \
dalvik/tools/hprof-conv \
development/emulator/mksdcard \
- development/tools/activitycreator \
development/tools/line_endings \
development/host \
external/expat \
@@ -410,6 +406,10 @@
# Clean up/verify variables defined by the board config file.
TARGET_BOOTLOADER_BOARD_NAME := $(strip $(TARGET_BOOTLOADER_BOARD_NAME))
+TARGET_CPU_ABI := $(strip $(TARGET_CPU_ABI))
+ifeq ($(TARGET_CPU_ABI),)
+ $(error No TARGET_CPU_ABI defined by board config: $(board_config_mk))
+endif
#
# Include all of the makefiles in the system
diff --git a/core/pathmap.mk b/core/pathmap.mk
index 13cb80d..de7c1bb 100644
--- a/core/pathmap.mk
+++ b/core/pathmap.mk
@@ -82,6 +82,7 @@
opengl \
sax \
telephony \
+ tts \
wifi \
)
diff --git a/core/prelink-linux-arm.map b/core/prelink-linux-arm.map
index 1cd2aa4..8925e22 100644
--- a/core/prelink-linux-arm.map
+++ b/core/prelink-linux-arm.map
@@ -21,6 +21,7 @@
libevent.so 0xAF800000
libssl.so 0xAF700000
libcrypto.so 0xAF500000
+libsysutils.so 0xAF400000
# bluetooth
liba2dp.so 0xAEE00000
@@ -91,19 +92,26 @@
libqcamera.so 0xA9400000
# pv libraries
-libopencorenet_support.so 0xA7D20000
-libpvasf.so 0xA7BC0000
-libpvasfreg.so 0xA7B70000
-libopencoredownload.so 0xA7B40000
-libopencoredownloadreg.so 0xA7B00000
-libopencorenet_support.so 0xA7A00000
-libopencorertsp.so 0xA7900000
-libopencorertspreg.so 0xA7800000
-libopencoreauthor.so 0xA7600000
-libopencorecommon.so 0xA7500000
-libopencoremp4.so 0xA7400000
-libopencoremp4reg.so 0xA7300000
-libopencoreplayer.so 0xA7000000
+libpvasf.so 0xA7C26000
+libpvasfreg.so 0xA7C00000
+libomx_sharedlibrary.so 0xA7BA0000
+libopencore_download.so 0xA7B40000
+libopencore_downloadreg.so 0xA7B00000
+libopencore_net_support.so 0xA7A00000
+libopencore_rtsp.so 0xA7900000
+libopencore_rtspreg.so 0xA7890000
+libopencore_author.so 0xA7800000
+libomx_aacdec_sharedlibrary.so 0xA7700000
+libomx_amrdec_sharedlibrary.so 0xA76A0000
+libomx_amrenc_sharedlibrary.so 0xA7680000
+libomx_avcdec_sharedlibrary.so 0xA7660000
+libomx_avcenc_sharedlibrary.so 0xA7610000
+libomx_m4vdec_sharedlibrary.so 0xA75C0000
+libomx_m4venc_sharedlibrary.so 0xA7590000
+libomx_mp3dec_sharedlibrary.so 0xA7450000
+libopencore_mp4local.so 0xA7400000
+libopencore_mp4localreg.so 0xA7300000
+libopencore_player.so 0xA7000000
# opencore hardware support
libmm-adspsvc.so 0xA6FFD000
@@ -113,6 +121,10 @@
libOmxVidEnc.so 0xA6F60000
libopencorehw.so 0xA6F50000
+# pv libraries
+libopencore_common.so 0xA6000000
+libqcomm_omx.so 0xA5A00000
+
# libraries for specific apps or temporary libraries
libcam_ipl.so 0x9F000000
libwbxml.so 0x9E800000
diff --git a/core/product_config.mk b/core/product_config.mk
index 64488d8..7cfa5f4 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -110,11 +110,11 @@
TARGET_BUILD_VARIANT := $(word 2,$(product_goals))
# The build server wants to do make PRODUCT-dream-installclean
- # which really means TARGET_PRODUCT=dream make installclean.
+ # which really means TARGET_PRODUCT=dream make installclean.
ifneq ($(filter-out $(INTERNAL_VALID_VARIANTS),$(TARGET_BUILD_VARIANT)),)
MAKECMDGOALS := $(MAKECMDGOALS) $(TARGET_BUILD_VARIANT)
TARGET_BUILD_VARIANT := eng
- default_goal_substitution :=
+ default_goal_substitution :=
else
default_goal_substitution := $(DEFAULT_GOAL)
endif
@@ -135,7 +135,7 @@
#
# Note that modifying this will not affect the goals that make will
# attempt to build, but it's important because we inspect this value
- # in certain situations (like for "make sdk").
+ # in certain situations (like for "make sdk").
#
MAKECMDGOALS := $(patsubst $(goal_name),$(default_goal_substitution),$(MAKECMDGOALS))
@@ -185,7 +185,10 @@
# in PRODUCT_LOCALES, add them to PRODUCT_LOCALES.
extra_locales := $(filter-out $(PRODUCT_LOCALES),$(CUSTOM_LOCALES))
ifneq (,$(extra_locales))
- $(info Adding CUSTOM_LOCALES [$(extra_locales)] to PRODUCT_LOCALES [$(PRODUCT_LOCALES)])
+ ifneq ($(CALLED_FROM_SETUP),true)
+ # Don't spam stdout, because envsetup.sh may be scraping values from it.
+ $(info Adding CUSTOM_LOCALES [$(extra_locales)] to PRODUCT_LOCALES [$(PRODUCT_LOCALES)])
+ endif
PRODUCT_LOCALES += $(extra_locales)
extra_locales :=
endif
@@ -202,7 +205,7 @@
PRODUCT_MODEL := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_MODEL))
ifndef PRODUCT_MODEL
- PRODUCT_MODEL := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_NAME))
+ PRODUCT_MODEL := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_NAME))
endif
PRODUCT_MANUFACTURER := \
@@ -245,32 +248,19 @@
$(ADDITIONAL_BUILD_PROPERTIES) \
$(PRODUCT_PROPERTY_OVERRIDES)
-# Get the list of OTA public keys for the product.
-OTA_PUBLIC_KEYS := \
- $(sort \
- $(OTA_PUBLIC_KEYS) \
- $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OTA_PUBLIC_KEYS) \
- )
-
-# HACK: Not all products define OTA keys yet, and the -user build
-# will fail if no keys are defined.
-# TODO: Let a product opt out of needing OTA keys, and stop defaulting to
-# the test key as soon as possible.
-ifeq (,$(strip $(OTA_PUBLIC_KEYS)))
- ifeq (,$(CALLED_FROM_SETUP))
- $(warning WARNING: adding test OTA key)
- endif
- OTA_PUBLIC_KEYS := $(SRC_TARGET_DIR)/product/security/testkey.x509.pem
-endif
+# The OTA key(s) specified by the product config, if any. The names
+# of these keys are stored in the target-files zip so that post-build
+# signing tools can substitute them for the test key embedded by
+# default.
+PRODUCT_OTA_PUBLIC_KEYS := $(sort \
+ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OTA_PUBLIC_KEYS))
# ---------------------------------------------------------------
-# Force the simulator to be the simulator, and make BUILD_TYPE
-# default to debug.
+# Simulator overrides
ifeq ($(TARGET_PRODUCT),sim)
+ # Tell the build system to turn on some special cases
+ # to deal with the simulator product.
TARGET_SIMULATOR := true
- ifeq (,$(strip $(TARGET_BUILD_TYPE)))
- TARGET_BUILD_TYPE := debug
- endif
# dexpreopt doesn't work when building the simulator
DISABLE_DEXPREOPT := true
endif
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index aed01b2..33cdf93 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -102,7 +102,7 @@
$(hide) java $(PRIVATE_JAVAOPTS) \
-classpath $(PRIVATE_CLASSPATH) \
$(PRIVATE_PARAMS) CollectAllTests $(1) \
- $(2) $(3) $(4)
+ $(2) $(3)
endef
CORE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core,,COMMON)
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 578d779..ca8487f 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -20,6 +20,8 @@
# Guarantees that the following are defined:
# PLATFORM_VERSION
# PLATFORM_SDK_VERSION
+# PLATFORM_VERSION_CODENAME
+# DEFAULT_APP_TARGET_SDK
# BUILD_ID
# BUILD_NUMBER
#
@@ -39,17 +41,40 @@
# which is the version that we reveal to the end user.
# Update this value when the platform version changes (rather
# than overriding it somewhere else). Can be an arbitrary string.
- PLATFORM_VERSION := 1.5
+ PLATFORM_VERSION := Donut
endif
ifeq "" "$(PLATFORM_SDK_VERSION)"
# This is the canonical definition of the SDK version, which defines
- # the set of APIs and functionality available in the platform. This is
- # a single integer, that increases monotonically as updates to the SDK
- # are released.
+ # the set of APIs and functionality available in the platform. It
+ # is a single integer that increases monotonically as updates to
+ # the SDK are released. It should only be incremented when the APIs for
+ # the new release are frozen (so that developers don't write apps against
+ # intermediate builds). During development, this number remains at the
+ # SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds
+ # the code-name of the new development work.
PLATFORM_SDK_VERSION := 3
endif
+ifeq "" "$(PLATFORM_VERSION_CODENAME)"
+ # If the build is not a final release build, then this is the current
+ # development code-name. If this is a final release build, it is simply "REL".
+ PLATFORM_VERSION_CODENAME := Donut
+endif
+
+ifeq "" "$(DEFAULT_APP_TARGET_SDK)"
+ # This is the default minSdkVersion and targetSdkVersion to use for
+ # all .apks created by the build system. It can be overridden by explicitly
+ # setting these in the .apk's AndroidManifest.xml. It is either the code
+ # name of the development build or, if this is a release build, the official
+ # SDK version of this release.
+ ifeq "REL" "$(PLATFORM_VERSION_CODENAME)"
+ DEFAULT_APP_TARGET_SDK := $(PLATFORM_SDK_VERSION)
+ else
+ DEFAULT_APP_TARGET_SDK := $(PLATFORM_VERSION_CODENAME)
+ endif
+endif
+
ifeq "" "$(BUILD_ID)"
# Used to signify special builds. E.g., branches and/or releases,
# like "M5-RC7". Can be an arbitrary string, but must be a single
diff --git a/envsetup.sh b/envsetup.sh
index f8f20ab..541bb11 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -289,30 +289,6 @@
#
function chooseproduct()
{
- # Find the makefiles that must exist for a product.
- # Send stderr to /dev/null in case partner isn't present.
- local -a choices
- choices=(`/bin/ls build/target/board/*/BoardConfig.mk vendor/*/*/BoardConfig.mk 2> /dev/null`)
-
- local choice
- local -a prodlist
- for choice in ${choices[@]}
- do
- # The product name is the name of the directory containing
- # the makefile we found, above.
- prodlist=(${prodlist[@]} `dirname ${choice} | xargs basename`)
- done
-
- local index=1
- local p
- echo "Product choices are:"
- for p in ${prodlist[@]}
- do
- echo " $index. $p"
- let "index = $index + 1"
- done
-
-
if [ "x$TARGET_PRODUCT" != x ] ; then
default_value=$TARGET_PRODUCT
else
@@ -327,8 +303,7 @@
local ANSWER
while [ -z "$TARGET_PRODUCT" ]
do
- echo "You can also type the name of a product if you know it."
- echo -n "Which would you like? [$default_value] "
+ echo -n "Which product would you like? [$default_value] "
if [ -z "$1" ] ; then
read ANSWER
else
@@ -338,13 +313,6 @@
if [ -z "$ANSWER" ] ; then
export TARGET_PRODUCT=$default_value
- elif (echo -n $ANSWER | grep -q -e "^[0-9][0-9]*$") ; then
- local poo=`echo -n $ANSWER`
- if [ $poo -le ${#prodlist[@]} ] ; then
- export TARGET_PRODUCT=${prodlist[$(($ANSWER-$_arrayoffset))]}
- else
- echo "** Bad product selection: $ANSWER"
- fi
else
if check_product $ANSWER
then
@@ -976,18 +944,14 @@
echo "Couldn't locate the top of the tree. Try setting TOP." >&2
return
fi
- (cd "$T" && development/tools/runtest $@)
+ (cd "$T" && development/testrunner/runtest.py $@)
}
-# simple shortcut to the runtest.py command
+# TODO: Remove this some time after 1 June 2009
function runtest_py()
{
- T=$(gettop)
- if [ ! "$T" ]; then
- echo "Couldn't locate the top of the tree. Try setting TOP." >&2
- return
- fi
- (cd "$T" && development/testrunner/runtest.py $@)
+ echo "runtest_py is obsolete; use runtest instead" >&2
+ return 1
}
function godir () {
diff --git a/target/board/generic/BoardConfig.mk b/target/board/generic/BoardConfig.mk
index a874742..6ec2de3 100644
--- a/target/board/generic/BoardConfig.mk
+++ b/target/board/generic/BoardConfig.mk
@@ -7,5 +7,6 @@
TARGET_NO_BOOTLOADER := true
TARGET_NO_KERNEL := true
TARGET_NO_RADIOIMAGE := true
+TARGET_CPU_ABI := armeabi
HAVE_HTC_AUDIO_DRIVER := true
BOARD_USES_GENERIC_AUDIO := true
diff --git a/target/board/sim/BoardConfig.mk b/target/board/sim/BoardConfig.mk
index 92679d9..491b30f 100644
--- a/target/board/sim/BoardConfig.mk
+++ b/target/board/sim/BoardConfig.mk
@@ -17,6 +17,9 @@
# Don't bother with a kernel
TARGET_NO_KERNEL := true
+# The simulator does not support native code at all
+TARGET_CPU_ABI := none
+
#the simulator partially emulates the original HTC /dev/eac audio interface
HAVE_HTC_AUDIO_DRIVER := true
BOARD_USES_GENERIC_AUDIO := true
diff --git a/target/product/core.mk b/target/product/core.mk
index d79b1e1..7c1ca00 100644
--- a/target/product/core.mk
+++ b/target/product/core.mk
@@ -12,6 +12,7 @@
Launcher \
HTMLViewer \
Phone \
+ ApplicationsProvider \
ContactsProvider \
DownloadProvider \
GoogleSearch \
diff --git a/tools/applypatch/applypatch.c b/tools/applypatch/applypatch.c
index 9954869..23b41d7 100644
--- a/tools/applypatch/applypatch.c
+++ b/tools/applypatch/applypatch.c
@@ -43,6 +43,7 @@
if (f == NULL) {
fprintf(stderr, "failed to open \"%s\": %s\n", filename, strerror(errno));
free(file->data);
+ file->data = NULL;
return -1;
}
@@ -51,6 +52,7 @@
fprintf(stderr, "short read of \"%s\" (%d bytes of %d)\n",
filename, bytes_read, file->size);
free(file->data);
+ file->data = NULL;
return -1;
}
fclose(f);
@@ -226,14 +228,16 @@
// replacement for it) and idempotent (it's okay to run this program
// multiple times).
//
-// - if the sha1 hash of <file> is <tgt-sha1>, does nothing and exits
+// - if the sha1 hash of <tgt-file> is <tgt-sha1>, does nothing and exits
// successfully.
//
-// - otherwise, if the sha1 hash of <file> is <src-sha1>, applies the
-// bsdiff <patch> to <file> to produce a new file (the type of patch
+// - otherwise, if the sha1 hash of <src-file> is <src-sha1>, applies the
+// bsdiff <patch> to <src-file> to produce a new file (the type of patch
// is automatically detected from the file header). If that new
-// file has sha1 hash <tgt-sha1>, moves it to replace <file>, and
-// exits successfully.
+// file has sha1 hash <tgt-sha1>, moves it to replace <tgt-file>, and
+// exits successfully. Note that if <src-file> and <tgt-file> are
+// not the same, <src-file> is NOT deleted on success. <tgt-file>
+// may be the string "-" to mean "the same as src-file".
//
// - otherwise, or if any error is encountered, exits with non-zero
// status.
@@ -241,7 +245,7 @@
int main(int argc, char** argv) {
if (argc < 2) {
usage:
- fprintf(stderr, "usage: %s <file> <tgt-sha1> <tgt-size> [<src-sha1>:<patch> ...]\n"
+ fprintf(stderr, "usage: %s <src-file> <tgt-file> <tgt-sha1> <tgt-size> [<src-sha1>:<patch> ...]\n"
" or %s -c <file> [<sha1> ...]\n"
" or %s -s <bytes>\n"
" or %s -l\n",
@@ -273,26 +277,31 @@
uint8_t target_sha1[SHA_DIGEST_SIZE];
const char* source_filename = argv[1];
+ const char* target_filename = argv[2];
+ if (target_filename[0] == '-' &&
+ target_filename[1] == '\0') {
+ target_filename = source_filename;
+ }
- // assume that source_filename (eg "/system/app/Foo.apk") is located
+ // assume that target_filename (eg "/system/app/Foo.apk") is located
// on the same filesystem as its top-level directory ("/system").
// We need something that exists for calling statfs().
- char* source_fs = strdup(argv[1]);
- char* slash = strchr(source_fs+1, '/');
+ char* target_fs = strdup(target_filename);
+ char* slash = strchr(target_fs+1, '/');
if (slash != NULL) {
*slash = '\0';
}
- if (ParseSha1(argv[2], target_sha1) != 0) {
- fprintf(stderr, "failed to parse tgt-sha1 \"%s\"\n", argv[2]);
+ if (ParseSha1(argv[3], target_sha1) != 0) {
+ fprintf(stderr, "failed to parse tgt-sha1 \"%s\"\n", argv[3]);
return 1;
}
- unsigned long target_size = strtoul(argv[3], NULL, 0);
+ unsigned long target_size = strtoul(argv[4], NULL, 0);
int num_patches;
Patch* patches;
- if (ParseShaArgs(argc-4, argv+4, &patches, &num_patches) < 0) { return 1; }
+ if (ParseShaArgs(argc-5, argv+5, &patches, &num_patches) < 0) { return 1; }
FileContents copy_file;
FileContents source_file;
@@ -300,15 +309,27 @@
const char* copy_patch_filename = NULL;
int made_copy = 0;
- if (LoadFileContents(source_filename, &source_file) == 0) {
+ // We try to load the target file into the source_file object.
+ if (LoadFileContents(target_filename, &source_file) == 0) {
if (memcmp(source_file.sha1, target_sha1, SHA_DIGEST_SIZE) == 0) {
// The early-exit case: the patch was already applied, this file
// has the desired hash, nothing for us to do.
fprintf(stderr, "\"%s\" is already target; no patch needed\n",
- source_filename);
+ target_filename);
return 0;
}
+ }
+ if (source_file.data == NULL ||
+ (target_filename != source_filename &&
+ strcmp(target_filename, source_filename) != 0)) {
+ // Need to load the source file: either we failed to load the
+ // target file, or we did but it's different from the source file.
+ free(source_file.data);
+ LoadFileContents(source_filename, &source_file);
+ }
+
+ if (source_file.data != NULL) {
const Patch* to_use =
FindMatchingPatch(source_file.sha1, patches, num_patches);
if (to_use != NULL) {
@@ -340,7 +361,7 @@
}
// Is there enough room in the target filesystem to hold the patched file?
- size_t free_space = FreeSpaceForFile(source_fs);
+ size_t free_space = FreeSpaceForFile(target_fs);
int enough_space = free_space > (target_size * 3 / 2); // 50% margin of error
printf("target %ld bytes; free space %ld bytes; enough %d\n",
(long)target_size, (long)free_space, enough_space);
@@ -361,8 +382,8 @@
made_copy = 1;
unlink(source_filename);
- size_t free_space = FreeSpaceForFile(source_fs);
- printf("(now %ld bytes free for source)\n", (long)free_space);
+ size_t free_space = FreeSpaceForFile(target_fs);
+ printf("(now %ld bytes free for target)\n", (long)free_space);
}
FileContents* source_to_use;
@@ -375,14 +396,14 @@
patch_filename = copy_patch_filename;
}
- // We write the decoded output to "<file>.patch".
- char* outname = (char*)malloc(strlen(source_filename) + 10);
- strcpy(outname, source_filename);
+ // We write the decoded output to "<tgt-file>.patch".
+ char* outname = (char*)malloc(strlen(target_filename) + 10);
+ strcpy(outname, target_filename);
strcat(outname, ".patch");
FILE* output = fopen(outname, "wb");
if (output == NULL) {
fprintf(stderr, "failed to patch file %s: %s\n",
- source_filename, strerror(errno));
+ target_filename, strerror(errno));
return 1;
}
@@ -441,10 +462,10 @@
return 1;
}
- // Finally, rename the .patch file to replace the original source file.
- if (rename(outname, source_filename) != 0) {
+ // Finally, rename the .patch file to replace the target file.
+ if (rename(outname, target_filename) != 0) {
fprintf(stderr, "rename of .patch to \"%s\" failed: %s\n",
- source_filename, strerror(errno));
+ target_filename, strerror(errno));
return 1;
}
diff --git a/tools/applypatch/applypatch.sh b/tools/applypatch/applypatch.sh
index 181cd5c..88f3025 100755
--- a/tools/applypatch/applypatch.sh
+++ b/tools/applypatch/applypatch.sh
@@ -24,16 +24,22 @@
# partition that WORK_DIR is located on, without the leading slash
WORK_FS=system
+# set to 0 to use a device instead
+USE_EMULATOR=1
+
# ------------------------
tmpdir=$(mktemp -d)
-emulator -wipe-data -noaudio -no-window -port $EMULATOR_PORT &
-pid_emulator=$!
+if [ "$USE_EMULATOR" == 1 ]; then
+ emulator -wipe-data -noaudio -no-window -port $EMULATOR_PORT &
+ pid_emulator=$!
+ ADB="adb -s emulator-$EMULATOR_PORT "
+else
+ ADB="adb -d "
+fi
-ADB="adb -s emulator-$EMULATOR_PORT "
-
-echo "emulator is $pid_emulator; waiting for startup"
+echo "waiting to connect to device"
$ADB wait-for-device
echo "device is available"
$ADB remount
@@ -56,7 +62,8 @@
echo
echo FAIL: $testname
echo
- kill $pid_emulator
+ [ "$open_pid" == "" ] || kill $open_pid
+ [ "$pid_emulator" == "" ] || kill $pid_emulator
exit 1
}
@@ -68,6 +75,23 @@
run_command df | awk "/$1/ {print gensub(/K/, \"\", \"g\", \$6)}"
}
+cleanup() {
+ # not necessary if we're about to kill the emulator, but nice for
+ # running on real devices or already-running emulators.
+ testname "removing test files"
+ run_command rm $WORK_DIR/bloat.dat
+ run_command rm $WORK_DIR/old.file
+ run_command rm $WORK_DIR/patch.bsdiff
+ run_command rm $WORK_DIR/applypatch
+ run_command rm $CACHE_TEMP_SOURCE
+ run_command rm /cache/bloat*.dat
+
+ [ "$pid_emulator" == "" ] || kill $pid_emulator
+
+ rm -rf $tmpdir
+}
+
+cleanup
$ADB push $ANDROID_PRODUCT_OUT/system/bin/applypatch $WORK_DIR/applypatch
@@ -146,16 +170,71 @@
fi
testname "apply bsdiff patch"
-run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
$ADB pull $WORK_DIR/old.file $tmpdir/patched
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
testname "reapply bsdiff patch"
-run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
$ADB pull $WORK_DIR/old.file $tmpdir/patched
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
+# --------------- apply patch in new location ----------------------
+
+$ADB push $DATA_DIR/old.file $WORK_DIR
+$ADB push $DATA_DIR/patch.bsdiff $WORK_DIR
+
+# Check that the partition has enough space to apply the patch without
+# copying. If it doesn't, we'll be testing the low-space condition
+# when we intend to test the not-low-space condition.
+testname "apply patch to new location (with enough space)"
+free_kb=$(free_space $WORK_FS)
+echo "${free_kb}kb free on /$WORK_FS."
+if (( free_kb * 1024 < NEW_SIZE * 3 / 2 )); then
+ echo "Not enough space on /$WORK_FS to patch test file."
+ echo
+ echo "This doesn't mean that applypatch is necessarily broken;"
+ echo "just that /$WORK_FS doesn't have enough free space to"
+ echo "properly run this test."
+ exit 1
+fi
+
+run_command rm $WORK_DIR/new.file
+run_command rm $CACHE_TEMP_SOURCE
+
+testname "apply bsdiff patch to new location"
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file $WORK_DIR/new.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
+$ADB pull $WORK_DIR/new.file $tmpdir/patched
+diff -q $DATA_DIR/new.file $tmpdir/patched || fail
+
+testname "reapply bsdiff patch to new location"
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file $WORK_DIR/new.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
+$ADB pull $WORK_DIR/new.file $tmpdir/patched
+diff -q $DATA_DIR/new.file $tmpdir/patched || fail
+
+$ADB push $DATA_DIR/old.file $CACHE_TEMP_SOURCE
+# put some junk in the old file
+run_command dd if=/dev/urandom of=$WORK_DIR/old.file count=100 bs=1024 || fail
+
+testname "apply bsdiff patch to new location with corrupted source"
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file $WORK_DIR/new.file $NEW_SHA1 $NEW_SIZE $OLD_SHA1:$WORK_DIR/patch.bsdiff $BAD1_SHA1:$WORK_DIR/foo || fail
+$ADB pull $WORK_DIR/new.file $tmpdir/patched
+diff -q $DATA_DIR/new.file $tmpdir/patched || fail
+
+# put some junk in the cache copy, too
+run_command dd if=/dev/urandom of=$CACHE_TEMP_SOURCE count=100 bs=1024 || fail
+
+run_command rm $WORK_DIR/new.file
+testname "apply bsdiff patch to new location with corrupted source and copy (no new file)"
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file $WORK_DIR/new.file $NEW_SHA1 $NEW_SIZE $OLD_SHA1:$WORK_DIR/patch.bsdiff $BAD1_SHA1:$WORK_DIR/foo && fail
+
+# put some junk in the new file
+run_command dd if=/dev/urandom of=$WORK_DIR/new.file count=100 bs=1024 || fail
+
+testname "apply bsdiff patch to new location with corrupted source and copy (bad new file)"
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file $WORK_DIR/new.file $NEW_SHA1 $NEW_SIZE $OLD_SHA1:$WORK_DIR/patch.bsdiff $BAD1_SHA1:$WORK_DIR/foo && fail
+
# --------------- apply patch with low space on /system ----------------------
$ADB push $DATA_DIR/old.file $WORK_DIR
@@ -169,12 +248,12 @@
echo "${free_kb}kb free on /$WORK_FS now."
testname "apply bsdiff patch with low space"
-run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
$ADB pull $WORK_DIR/old.file $tmpdir/patched
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
testname "reapply bsdiff patch with low space"
-run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
$ADB pull $WORK_DIR/old.file $tmpdir/patched
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
@@ -213,7 +292,7 @@
run_command ls $CACHE_TEMP_SOURCE || fail # wasn't deleted because it's the source file copy
# should fail; not enough files can be deleted
-run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff && fail
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff && fail
run_command ls /cache/bloat_large.dat || fail # wasn't deleted because it was open
run_command ls /cache/subdir/a.file || fail # wasn't deleted because it's in a subdir
run_command ls $CACHE_TEMP_SOURCE || fail # wasn't deleted because it's the source file copy
@@ -229,7 +308,7 @@
run_command ls $CACHE_TEMP_SOURCE || fail # wasn't deleted because it's the source file copy
# should succeed
-run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
$ADB pull $WORK_DIR/old.file $tmpdir/patched
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
run_command ls /cache/subdir/a.file || fail # still wasn't deleted because it's in a subdir
@@ -242,7 +321,7 @@
run_command dd if=/dev/urandom of=$WORK_DIR/old.file count=100 bs=1024 || fail
testname "apply bsdiff patch from cache (corrupted source) with low space"
-run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
$ADB pull $WORK_DIR/old.file $tmpdir/patched
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
@@ -251,20 +330,14 @@
run_command rm $WORK_DIR/old.file
testname "apply bsdiff patch from cache (missing source) with low space"
-run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
+run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
$ADB pull $WORK_DIR/old.file $tmpdir/patched
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
# --------------- cleanup ----------------------
-# not necessary if we're about to kill the emulator, but nice for
-# running on real devices or already-running emulators.
-run_command rm /cache/bloat*.dat $WORK_DIR/bloat.dat $CACHE_TEMP_SOURCE $WORK_DIR/old.file $WORK_DIR/patch.xdelta3 $WORK_DIR/patch.bsdiff $WORK_DIR/applypatch
-
-kill $pid_emulator
-
-rm -rf $tmpdir
+cleanup
echo
echo PASS
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index 4e99bf5..5c738a2 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -7,6 +7,7 @@
echo "ro.build.display.id=$BUILD_DISPLAY_ID"
echo "ro.build.version.incremental=$BUILD_NUMBER"
echo "ro.build.version.sdk=$PLATFORM_SDK_VERSION"
+echo "ro.build.version.codename=$PLATFORM_VERSION_CODENAME"
echo "ro.build.version.release=$PLATFORM_VERSION"
echo "ro.build.date=`date`"
echo "ro.build.date.utc=`date +%s`"
@@ -19,6 +20,7 @@
echo "ro.product.name=$PRODUCT_NAME"
echo "ro.product.device=$TARGET_DEVICE"
echo "ro.product.board=$TARGET_BOOTLOADER_BOARD_NAME"
+echo "ro.product.cpu.abi=$TARGET_CPU_ABI"
echo "ro.product.manufacturer=$PRODUCT_MANUFACTURER"
echo "ro.product.locale.language=$PRODUCT_DEFAULT_LANGUAGE"
echo "ro.product.locale.region=$PRODUCT_DEFAULT_REGION"
diff --git a/tools/droiddoc/templates-pdk/customization.cs b/tools/droiddoc/templates-pdk/customization.cs
index 01b6e96..563af1e 100644
--- a/tools/droiddoc/templates-pdk/customization.cs
+++ b/tools/droiddoc/templates-pdk/customization.cs
@@ -5,7 +5,7 @@
def:custom_masthead() ?>
<div id="header">
<div id="headerLeft">
- <a href="<?cs var:toroot ?>index.html" tabindex="-1"><img
+ <a href="<?cs var:toroot ?>guide/index.html" tabindex="-1"><img
src="<?cs var:toroot ?>assets/images/open_source.png" alt="Open Source Project: Platform Development Kit" /></a>
<ul class="<?cs
if:reference ?> <?cs
@@ -15,7 +15,7 @@
elif:community ?> <?cs
elif:publish ?> <?cs
elif:about ?> <?cs /if ?>">
- <li id="guide-link"><a href="<?cs var:toroot ?>index.html"
+ <li id="guide-link"><a href="<?cs var:toroot ?>guide/index.html"
onClick="return loadLast('guide)'"><span>Porting Guide</span></a></li>
<li id="opensource-link"><a href="http://source.android.com/"
onClick="return loadLast('open')"><span>Open Source</span></a></li>
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 705ed84..51a6d8f 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import errno
import getopt
import getpass
import os
@@ -28,6 +29,7 @@
class Options(object): pass
OPTIONS = Options()
OPTIONS.signapk_jar = "out/host/linux-x86/framework/signapk.jar"
+OPTIONS.dumpkey_jar = "out/host/linux-x86/framework/dumpkey.jar"
OPTIONS.max_image_size = {}
OPTIONS.verbose = False
OPTIONS.tempfiles = []
@@ -131,9 +133,16 @@
those which require them. Return a {key: password} dict. password
will be None if the key has no password."""
- key_passwords = {}
+ no_passwords = []
+ need_passwords = []
devnull = open("/dev/null", "w+b")
for k in sorted(keylist):
+ # An empty-string key is used to mean don't re-sign this package.
+ # Obviously we don't need a password for this non-key.
+ if not k:
+ no_passwords.append(k)
+ continue
+
p = subprocess.Popen(["openssl", "pkcs8", "-in", k+".pk8",
"-inform", "DER", "-nocrypt"],
stdin=devnull.fileno(),
@@ -141,12 +150,13 @@
stderr=subprocess.STDOUT)
p.communicate()
if p.returncode == 0:
- print "%s.pk8 does not require a password" % (k,)
- key_passwords[k] = None
+ no_passwords.append(k)
else:
- key_passwords[k] = getpass.getpass("Enter password for %s.pk8> " % (k,))
+ need_passwords.append(k)
devnull.close()
- print
+
+ key_passwords = PasswordManager().GetPasswords(need_passwords)
+ key_passwords.update(dict.fromkeys(no_passwords, None))
return key_passwords
@@ -271,3 +281,102 @@
shutil.rmtree(i)
else:
os.remove(i)
+
+
+class PasswordManager(object):
+ def __init__(self):
+ self.editor = os.getenv("EDITOR", None)
+ self.pwfile = os.getenv("ANDROID_PW_FILE", None)
+
+ def GetPasswords(self, items):
+ """Get passwords corresponding to each string in 'items',
+ returning a dict. (The dict may have keys in addition to the
+ values in 'items'.)
+
+ Uses the passwords in $ANDROID_PW_FILE if available, letting the
+ user edit that file to add more needed passwords. If no editor is
+ available, or $ANDROID_PW_FILE isn't define, prompts the user
+ interactively in the ordinary way.
+ """
+
+ current = self.ReadFile()
+
+ first = True
+ while True:
+ missing = []
+ for i in items:
+ if i not in current or not current[i]:
+ missing.append(i)
+ # Are all the passwords already in the file?
+ if not missing: return current
+
+ for i in missing:
+ current[i] = ""
+
+ if not first:
+ print "key file %s still missing some passwords." % (self.pwfile,)
+ answer = raw_input("try to edit again? [y]> ").strip()
+ if answer and answer[0] not in 'yY':
+ raise RuntimeError("key passwords unavailable")
+ first = False
+
+ current = self.UpdateAndReadFile(current)
+
+ def PromptResult(self, current):
+ """Prompt the user to enter a value (password) for each key in
+ 'current' whose value is fales. Returns a new dict with all the
+ values.
+ """
+ result = {}
+ for k, v in sorted(current.iteritems()):
+ if v:
+ result[k] = v
+ else:
+ while True:
+ result[k] = getpass.getpass("Enter password for %s key> "
+ % (k,)).strip()
+ if result[k]: break
+ return result
+
+ def UpdateAndReadFile(self, current):
+ if not self.editor or not self.pwfile:
+ return self.PromptResult(current)
+
+ f = open(self.pwfile, "w")
+ os.chmod(self.pwfile, 0600)
+ f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
+ f.write("# (Additional spaces are harmless.)\n\n")
+
+ first_line = None
+ sorted = [(not v, k, v) for (k, v) in current.iteritems()]
+ sorted.sort()
+ for i, (_, k, v) in enumerate(sorted):
+ f.write("[[[ %s ]]] %s\n" % (v, k))
+ if not v and first_line is None:
+ # position cursor on first line with no password.
+ first_line = i + 4
+ f.close()
+
+ p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
+ _, _ = p.communicate()
+
+ return self.ReadFile()
+
+ def ReadFile(self):
+ result = {}
+ if self.pwfile is None: return result
+ try:
+ f = open(self.pwfile, "r")
+ for line in f:
+ line = line.strip()
+ if not line or line[0] == '#': continue
+ m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
+ if not m:
+ print "failed to parse password file: ", line
+ else:
+ result[m.group(2)] = m.group(1)
+ f.close()
+ except IOError, e:
+ if e.errno != errno.ENOENT:
+ print "error reading password file: ", str(e)
+ return result
diff --git a/tools/releasetools/ota_from_target_files b/tools/releasetools/ota_from_target_files
index dbac03d..364f751 100755
--- a/tools/releasetools/ota_from_target_files
+++ b/tools/releasetools/ota_from_target_files
@@ -33,6 +33,18 @@
Generate an incremental OTA using the given target-files zip as
the starting build.
+ -w (--wipe_user_data)
+ Generate an OTA package that will wipe the user data partition
+ when installed.
+
+ -n (--no_prereq)
+ Omit the timestamp prereq check normally included at the top of
+ the build scripts (used for developer OTA packages which
+ legitimately need to go back and forth).
+
+ -e (--extra_script) <file>
+ Insert the contents of file at the end of the update script.
+
"""
import sys
@@ -58,6 +70,9 @@
OPTIONS.require_verbatim = set()
OPTIONS.prohibit_verbatim = set(("system/build.prop",))
OPTIONS.patch_threshold = 0.95
+OPTIONS.wipe_user_data = False
+OPTIONS.omit_prereq = False
+OPTIONS.extra_script = None
def MostPopularKey(d, default):
"""Given a dict, return the key corresponding to the largest
@@ -318,9 +333,10 @@
def WriteFullOTAPackage(input_zip, output_zip):
script = []
- ts = GetBuildProp("ro.build.date.utc", input_zip)
- script.append("run_program PACKAGE:check_prereq %s" % (ts,))
- IncludeBinary("check_prereq", input_zip, output_zip)
+ if not OPTIONS.omit_prereq:
+ ts = GetBuildProp("ro.build.date.utc", input_zip)
+ script.append("run_program PACKAGE:check_prereq %s" % (ts,))
+ IncludeBinary("check_prereq", input_zip, output_zip)
AppendAssertions(script, input_zip)
@@ -331,6 +347,9 @@
script.append("write_radio_image PACKAGE:radio.img")
script.append("show_progress 0.5 0")
+ if OPTIONS.wipe_user_data:
+ script.append("format DATA:")
+
script.append("format SYSTEM:")
script.append("copy_dir PACKAGE:system SYSTEM:")
@@ -348,6 +367,9 @@
script.append("write_raw_image PACKAGE:boot.img BOOT:")
script.append("show_progress 0.2 10")
+ if OPTIONS.extra_script is not None:
+ script.append(OPTIONS.extra_script)
+
AddScript(script, output_zip)
@@ -511,6 +533,9 @@
script.append("\n# ---- start making changes here\n")
+ if OPTIONS.wipe_user_data:
+ script.append("format DATA:")
+
DeleteFiles(script, [SubstituteRoot(i[0]) for i in verbatim_targets])
if updating_boot:
@@ -543,7 +568,7 @@
script.append("show_progress %f 1" %
(next_sizes * pb_apply / total_patched_size,))
script.append(("run_program PACKAGE:applypatch "
- "/%s %s %d %s:/tmp/patchtmp/%s.p") %
+ "/%s - %s %d %s:/tmp/patchtmp/%s.p") %
(fn, tf.sha1, tf.size, sf.sha1, fn))
target_symlinks = CopySystemFiles(target_zip, None)
@@ -594,6 +619,9 @@
script.append("show_progress 0.1 5")
script.append("write_raw_image PACKAGE:boot.img BOOT:")
+ if OPTIONS.extra_script is not None:
+ script.append(OPTIONS.extra_script)
+
AddScript(script, output_zip)
@@ -602,21 +630,28 @@
def option_handler(o, a):
if o in ("-b", "--board_config"):
common.LoadBoardConfig(a)
- return True
elif o in ("-k", "--package_key"):
OPTIONS.package_key = a
- return True
elif o in ("-i", "--incremental_from"):
OPTIONS.incremental_source = a
- return True
+ elif o in ("-w", "--wipe_user_data"):
+ OPTIONS.wipe_user_data = True
+ elif o in ("-n", "--no_prereq"):
+ OPTIONS.omit_prereq = True
+ elif o in ("-e", "--extra_script"):
+ OPTIONS.extra_script = a
else:
return False
+ return True
args = common.ParseOptions(argv, __doc__,
- extra_opts="b:k:i:d:",
+ extra_opts="b:k:i:d:wne:",
extra_long_opts=["board_config=",
"package_key=",
- "incremental_from="],
+ "incremental_from=",
+ "wipe_user_data",
+ "no_prereq",
+ "extra_script="],
extra_option_handler=option_handler)
if len(args) != 2:
@@ -630,6 +665,9 @@
print " images don't exceed partition sizes."
print
+ if OPTIONS.extra_script is not None:
+ OPTIONS.extra_script = open(OPTIONS.extra_script).read()
+
print "unzipping target target-files..."
OPTIONS.input_tmp = common.UnzipTemp(args[0])
OPTIONS.target_tmp = OPTIONS.input_tmp
diff --git a/tools/releasetools/sign_target_files_apks b/tools/releasetools/sign_target_files_apks
index b632924..9f393c8 100755
--- a/tools/releasetools/sign_target_files_apks
+++ b/tools/releasetools/sign_target_files_apks
@@ -47,6 +47,20 @@
-d and -k options are added to the set of mappings in the order
in which they appear on the command line.
+
+ -o (--replace_ota_keys)
+ Replace the certificate (public key) used by OTA package
+ verification with the one specified in the input target_files
+ zip (in the META/otakeys.txt file). Key remapping (-k and -d)
+ is performed on this key.
+
+ -t (--tag_changes) <+tag>,<-tag>,...
+ Comma-separated list of changes to make to the set of tags (in
+ the last component of the build fingerprint). Prefix each with
+ '+' or '-' to indicate whether that tag should be added or
+ removed. Changes are processed in the order they appear.
+ Default value is "-test-keys,+ota-rel-keys,+release-keys".
+
"""
import sys
@@ -55,6 +69,8 @@
print >> sys.stderr, "Python 2.4 or newer is required."
sys.exit(1)
+import cStringIO
+import copy
import os
import re
import subprocess
@@ -67,7 +83,8 @@
OPTIONS.extra_apks = {}
OPTIONS.key_map = {}
-
+OPTIONS.replace_ota_keys = False
+OPTIONS.tag_changes = ("-test-keys", "+ota-rel-keys", "+release-keys")
def GetApkCerts(tf_zip):
certmap = {}
@@ -84,6 +101,85 @@
return certmap
+def CheckAllApksSigned(input_tf_zip, apk_key_map):
+ """Check that all the APKs we want to sign have keys specified, and
+ error out if they don't."""
+ unknown_apks = []
+ for info in input_tf_zip.infolist():
+ if info.filename.endswith(".apk"):
+ name = os.path.basename(info.filename)
+ if name not in apk_key_map:
+ unknown_apks.append(name)
+ if unknown_apks:
+ print "ERROR: no key specified for:\n\n ",
+ print "\n ".join(unknown_apks)
+ print "\nUse '-e <apkname>=' to specify a key (which may be an"
+ print "empty string to not sign this apk)."
+ sys.exit(1)
+
+
+def SharedUserForApk(data):
+ tmp = tempfile.NamedTemporaryFile()
+ tmp.write(data)
+ tmp.flush()
+
+ p = common.Run(["aapt", "dump", "xmltree", tmp.name, "AndroidManifest.xml"],
+ stdout=subprocess.PIPE)
+ data, _ = p.communicate()
+ if p.returncode != 0:
+ raise ExternalError("failed to run aapt dump")
+ lines = data.split("\n")
+ for i in lines:
+ m = re.match(r'^\s*A: android:sharedUserId\([0-9a-fx]*\)="([^"]*)" .*$', i)
+ if m:
+ return m.group(1)
+ return None
+
+
+def CheckSharedUserIdsConsistent(input_tf_zip, apk_key_map):
+ """Check that all packages that request the same shared user id are
+ going to be signed with the same key."""
+
+ shared_user_apks = {}
+ maxlen = len("(unknown key)")
+
+ for info in input_tf_zip.infolist():
+ if info.filename.endswith(".apk"):
+ data = input_tf_zip.read(info.filename)
+
+ name = os.path.basename(info.filename)
+ shared_user = SharedUserForApk(data)
+ key = apk_key_map[name]
+ maxlen = max(maxlen, len(key))
+
+ if shared_user is not None:
+ shared_user_apks.setdefault(
+ shared_user, {}).setdefault(key, []).append(name)
+
+ errors = []
+ for k, v in shared_user_apks.iteritems():
+ # each shared user should have exactly one key used for all the
+ # apks that want that user.
+ if len(v) > 1:
+ errors.append((k, v))
+
+ if not errors: return
+
+ print "ERROR: shared user inconsistency. All apks wanting to use"
+ print " a given shared user must be signed with the same key."
+ print
+ errors.sort()
+ for user, keys in errors:
+ print 'shared user id "%s":' % (user,)
+ for key, apps in keys.iteritems():
+ print ' %-*s %s' % (maxlen, key or "(unknown key)", apps[0])
+ for a in apps[1:]:
+ print (' ' * (maxlen+5)) + a
+ print
+
+ sys.exit(1)
+
+
def SignApk(data, keyname, pw):
unsigned = tempfile.NamedTemporaryFile()
unsigned.write(data)
@@ -100,44 +196,105 @@
return data
-def SignApks(input_tf_zip, output_tf_zip):
- apk_key_map = GetApkCerts(input_tf_zip)
-
- key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
-
+def SignApks(input_tf_zip, output_tf_zip, apk_key_map, key_passwords):
maxsize = max([len(os.path.basename(i.filename))
for i in input_tf_zip.infolist()
if i.filename.endswith('.apk')])
for info in input_tf_zip.infolist():
data = input_tf_zip.read(info.filename)
+ out_info = copy.copy(info)
if info.filename.endswith(".apk"):
name = os.path.basename(info.filename)
- key = apk_key_map.get(name, None)
- if key is not None:
- print "signing: %-*s (%s)" % (maxsize, name, key)
+ key = apk_key_map[name]
+ if key:
+ print " signing: %-*s (%s)" % (maxsize, name, key)
signed_data = SignApk(data, key, key_passwords[key])
- output_tf_zip.writestr(info, signed_data)
+ output_tf_zip.writestr(out_info, signed_data)
else:
# an APK we're not supposed to sign.
- print "skipping: %s" % (name,)
- output_tf_zip.writestr(info, data)
- elif info.filename == "SYSTEM/build.prop":
- # Change build fingerprint to reflect the fact that apps are signed.
- m = re.search(r"ro\.build\.fingerprint=.*\b(test-keys)\b.*", data)
- if not m:
- print 'WARNING: ro.build.fingerprint does not contain "test-keys"'
- else:
- data = data[:m.start(1)] + "release-keys" + data[m.end(1):]
- m = re.search(r"ro\.build\.description=.*\b(test-keys)\b.*", data)
- if not m:
- print 'WARNING: ro.build.description does not contain "test-keys"'
- else:
- data = data[:m.start(1)] + "release-keys" + data[m.end(1):]
- output_tf_zip.writestr(info, data)
+ print "NOT signing: %s" % (name,)
+ output_tf_zip.writestr(out_info, data)
+ elif info.filename in ("SYSTEM/build.prop",
+ "RECOVERY/RAMDISK/default.prop"):
+ print "rewriting %s:" % (info.filename,)
+ new_data = RewriteProps(data)
+ output_tf_zip.writestr(out_info, new_data)
else:
# a non-APK file; copy it verbatim
- output_tf_zip.writestr(info, data)
+ output_tf_zip.writestr(out_info, data)
+
+
+def RewriteProps(data):
+ output = []
+ for line in data.split("\n"):
+ line = line.strip()
+ original_line = line
+ if line and line[0] != '#':
+ key, value = line.split("=", 1)
+ if key == "ro.build.fingerprint":
+ pieces = line.split("/")
+ tags = set(pieces[-1].split(","))
+ for ch in OPTIONS.tag_changes:
+ if ch[0] == "-":
+ tags.discard(ch[1:])
+ elif ch[0] == "+":
+ tags.add(ch[1:])
+ line = "/".join(pieces[:-1] + [",".join(sorted(tags))])
+ elif key == "ro.build.description":
+ pieces = line.split(" ")
+ assert len(pieces) == 5
+ tags = set(pieces[-1].split(","))
+ for ch in OPTIONS.tag_changes:
+ if ch[0] == "-":
+ tags.discard(ch[1:])
+ elif ch[0] == "+":
+ tags.add(ch[1:])
+ line = " ".join(pieces[:-1] + [",".join(sorted(tags))])
+ if line != original_line:
+ print " replace: ", original_line
+ print " with: ", line
+ output.append(line)
+ return "\n".join(output) + "\n"
+
+
+def ReplaceOtaKeys(input_tf_zip, output_tf_zip):
+ try:
+ keylist = input_tf_zip.read("META/otakeys.txt").split()
+ except KeyError:
+ raise ExternalError("can't read META/otakeys.txt from input")
+
+ mapped_keys = []
+ for k in keylist:
+ m = re.match(r"^(.*)\.x509\.pem$", k)
+ if not m:
+ raise ExternalError("can't parse \"%s\" from META/otakeys.txt" % (k,))
+ k = m.group(1)
+ mapped_keys.append(OPTIONS.key_map.get(k, k) + ".x509.pem")
+
+ print "using:\n ", "\n ".join(mapped_keys)
+ print "for OTA package verification"
+
+ # recovery uses a version of the key that has been slightly
+ # predigested (by DumpPublicKey.java) and put in res/keys.
+
+ p = common.Run(["java", "-jar", OPTIONS.dumpkey_jar] + mapped_keys,
+ stdout=subprocess.PIPE)
+ data, _ = p.communicate()
+ if p.returncode != 0:
+ raise ExternalError("failed to run dumpkeys")
+ output_tf_zip.writestr("RECOVERY/RAMDISK/res/keys", data)
+
+ # SystemUpdateActivity uses the x509.pem version of the keys, but
+ # put into a zipfile system/etc/security/otacerts.zip.
+
+ tempfile = cStringIO.StringIO()
+ certs_zip = zipfile.ZipFile(tempfile, "w")
+ for k in mapped_keys:
+ certs_zip.write(k)
+ certs_zip.close()
+ output_tf_zip.writestr("SYSTEM/etc/security/otacerts.zip",
+ tempfile.getvalue())
def main(argv):
@@ -160,16 +317,28 @@
elif o in ("-k", "--key_mapping"):
s, d = a.split("=")
OPTIONS.key_map[s] = d
+ elif o in ("-o", "--replace_ota_keys"):
+ OPTIONS.replace_ota_keys = True
+ elif o in ("-t", "--tag_changes"):
+ new = []
+ for i in a.split(","):
+ i = i.strip()
+ if not i or i[0] not in "-+":
+ raise ValueError("Bad tag change '%s'" % (i,))
+ new.append(i[0] + i[1:].strip())
+ OPTIONS.tag_changes = tuple(new)
else:
return False
return True
args = common.ParseOptions(argv, __doc__,
- extra_opts="s:e:d:k:",
+ extra_opts="s:e:d:k:ot:",
extra_long_opts=["signapk_jar=",
"extra_apks=",
"default_key_mappings=",
- "key_mapping="],
+ "key_mapping=",
+ "replace_ota_keys",
+ "tag_changes="],
extra_option_handler=option_handler)
if len(args) != 2:
@@ -179,7 +348,15 @@
input_zip = zipfile.ZipFile(args[0], "r")
output_zip = zipfile.ZipFile(args[1], "w")
- SignApks(input_zip, output_zip)
+ apk_key_map = GetApkCerts(input_zip)
+ CheckAllApksSigned(input_zip, apk_key_map)
+ CheckSharedUserIdsConsistent(input_zip, apk_key_map)
+
+ key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
+ SignApks(input_zip, output_zip, apk_key_map, key_passwords)
+
+ if OPTIONS.replace_ota_keys:
+ ReplaceOtaKeys(input_zip, output_zip)
input_zip.close()
output_zip.close()
diff --git a/tools/zipalign/ZipAlign.cpp b/tools/zipalign/ZipAlign.cpp
index 9e3cb66..058f9ed 100644
--- a/tools/zipalign/ZipAlign.cpp
+++ b/tools/zipalign/ZipAlign.cpp
@@ -30,7 +30,8 @@
{
fprintf(stderr, "Zip alignment utility\n");
fprintf(stderr,
- "Usage: zipalign [-f] [-v] <align> infile.zip outfile.zip\n");
+ "Usage: zipalign [-f] [-v] <align> infile.zip outfile.zip\n"
+ " zipalign -c [-v] <align> infile.zip\n" );
}
/*
@@ -152,14 +153,14 @@
pEntry = zipFile.getEntryByIndex(i);
if (pEntry->isCompressed()) {
if (verbose) {
- printf("%8ld %s (OK - compressed)\n",
+ printf("%8ld %s (OK - compressed)\n",
(long) pEntry->getFileOffset(), pEntry->getFileName());
}
} else {
long offset = pEntry->getFileOffset();
if ((offset % alignment) != 0) {
if (verbose) {
- printf("%8ld %s (BAD - %ld)\n",
+ printf("%8ld %s (BAD - %ld)\n",
(long) offset, pEntry->getFileName(),
offset % alignment);
}
@@ -185,6 +186,7 @@
int main(int argc, char* const argv[])
{
bool wantUsage = false;
+ bool check = false;
bool force = false;
bool verbose = false;
int result = 1;
@@ -204,6 +206,9 @@
while (*cp != '\0') {
switch (*cp) {
+ case 'c':
+ check = true;
+ break;
case 'f':
force = true;
break;
@@ -223,7 +228,7 @@
argv++;
}
- if (argc != 3) {
+ if (!((check && argc == 2) || (!check && argc == 3))) {
wantUsage = true;
goto bail;
}
@@ -235,12 +240,17 @@
goto bail;
}
- /* create the new archive */
- result = process(argv[1], argv[2], alignment, force);
+ if (check) {
+ /* check existing archive for correct alignment */
+ result = verify(argv[1], alignment, verbose);
+ } else {
+ /* create the new archive */
+ result = process(argv[1], argv[2], alignment, force);
- /* trust, but verify */
- if (result == 0)
- result = verify(argv[2], alignment, verbose);
+ /* trust, but verify */
+ if (result == 0)
+ result = verify(argv[2], alignment, verbose);
+ }
bail:
if (wantUsage) {
@@ -250,4 +260,3 @@
return result;
}
-